Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import BartTokenizer, BartForConditionalGeneration, BartConfig | |
# Load tokenizer and model | |
tokenizer = BartTokenizer.from_pretrained("facebook/bart-large-cnn") | |
config = BartConfig.from_pretrained("./models/bart-summarizer/checkpoint-8000/config.json") | |
model_path = "./models/bart-summarizer/checkpoint-8000/" | |
model = BartForConditionalGeneration.from_pretrained(pretrained_model_name_or_path=model_path, config=config) | |
# Define summarize function | |
def summarize(text): | |
inputs = tokenizer([text], max_length=1024, return_tensors='pt', truncation=False) | |
summary_ids = model.generate(inputs['input_ids'], num_beams=4, min_length=30, max_length=128, early_stopping=True) | |
summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True, clean_up_tokenization_spaces=True) | |
return summary | |
# Create Gradio interface | |
inputs = gr.Textbox(lines=10, label="Input Text") | |
outputs = gr.Textbox(label="Summary") | |
gr.Interface(summarize, inputs, outputs, title="Mail Subject Extraction", description="Get Subject from Email Content").launch() | |