Spaces:
Paused
Paused
File size: 1,355 Bytes
9e5c5bb c872f8c 9e5c5bb e6eb31e 9e5c5bb c872f8c 9e5c5bb c872f8c 9e5c5bb c872f8c 9e5c5bb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 |
import gradio as gr
import torch
from tqdm import tqdm
from transformers import PegasusForConditionalGeneration
from tokenizers_pegasus import PegasusTokenizer
MODEL_NAME = "csebuetnlp/mT5_multilingual_XLSum"
summarizer = PegasusForConditionalGeneration.from_pretrained(MODEL_NAME)
tokenizer = PegasusTokenizer.from_pretrained(MODEL_NAME)
def summarize(text):
inputs = tokenizer(text, max_length=1024, return_tensors="pt")
# Generate Summary
summary_ids = summarizer.generate(inputs["input_ids"])
return tokenizer.batch_decode(summary_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
demo = gr.Blocks(title="⭐ Summ4rizer ⭐")
demo.encrypt = False
with demo:
gr.Markdown(f'''
<div>
<h1 style='text-align: center'>Text Summarizer</h1>
</div>
<div>
Using summarization Model from <a href='https://huggingface.co/{MODEL_NAME}' target='_blank'><b>{MODEL_NAME}</b></a>.
</div>
''')
text = gr.Textbox(label="Text here !!", lines=1, interactive=True)
summarize_btn = gr.Button("Let's Summarize",)
summarization = gr.Textbox()
html_output = gr.Markdown()
summarize_btn.click(summarize, [text], outputs=[html_output, summarization])
demo.launch() |