import gradio as gr import torch from tqdm import tqdm from transformers import PegasusForConditionalGeneration from tokenizers_pegasus import PegasusTokenizer MODEL_NAME = "csebuetnlp/mT5_multilingual_XLSum" summarizer = PegasusForConditionalGeneration.from_pretrained(MODEL_NAME) tokenizer = PegasusTokenizer.from_pretrained(MODEL_NAME) def summarize(text): inputs = tokenizer(text, max_length=1024, return_tensors="pt") # Generate Summary summary_ids = summarizer.generate(inputs["input_ids"]) return tokenizer.batch_decode(summary_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0] demo = gr.Blocks(title="⭐ Summ4rizer ⭐") demo.encrypt = False with demo: gr.Markdown(f'''

Text Summarizer

Using summarization Model from {MODEL_NAME}.
''') text = gr.Textbox(label="Text here !!", lines=1, interactive=True) summarize_btn = gr.Button("Let's Summarize",) summarization = gr.Textbox() html_output = gr.Markdown() summarize_btn.click(summarize, [text], outputs=[html_output, summarization]) demo.launch()