Sum4rize / app.py
tomato
use another model [csebuetnlp/mT5_multilingual_XLSum]
e6eb31e
raw
history blame
1.36 kB
import gradio as gr
import torch
from tqdm import tqdm
from transformers import PegasusForConditionalGeneration
from tokenizers_pegasus import PegasusTokenizer
MODEL_NAME = "csebuetnlp/mT5_multilingual_XLSum"
summarizer = PegasusForConditionalGeneration.from_pretrained(MODEL_NAME)
tokenizer = PegasusTokenizer.from_pretrained(MODEL_NAME)
def summarize(text):
inputs = tokenizer(text, max_length=1024, return_tensors="pt")
# Generate Summary
summary_ids = summarizer.generate(inputs["input_ids"])
return tokenizer.batch_decode(summary_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
demo = gr.Blocks(title="⭐ Summ4rizer ⭐")
demo.encrypt = False
with demo:
gr.Markdown(f'''
<div>
<h1 style='text-align: center'>Text Summarizer</h1>
</div>
<div>
Using summarization Model from <a href='https://huggingface.co/{MODEL_NAME}' target='_blank'><b>{MODEL_NAME}</b></a>.
</div>
''')
text = gr.Textbox(label="Text here !!", lines=1, interactive=True)
summarize_btn = gr.Button("Let's Summarize",)
summarization = gr.Textbox()
html_output = gr.Markdown()
summarize_btn.click(summarize, [text], outputs=[html_output, summarization])
demo.launch()