Spaces:
Sleeping
Sleeping
File size: 1,579 Bytes
065c3c8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 |
from newspaper import Article
import gradio as gr
from transformers import pipeline
ta_pipeline = pipeline(model="marksverdhei/unifiedqa-large-reddit-syac")
ta_pipeline.tokenizer.model_max_length = 2048
ta_pipeline.model.config.max_length = 300
description = """
Enter the url for a clickbait article, or the answer and content.
We will provide the answer to the clickbait title.
Disclaimer: the model can generate wrong information. Read more about the model [here](https://huggingface.co/marksverdhei/unifiedqa-large-reddit-syac).
"""
def fetch_article_content(url):
article = Article(url)
article.download()
article.parse()
if not (article.title and article.text):
raise Exception("Unable to fetch article. Try copy-pasting in the text fields instead.")
return article.title, article.text
def predict(title, body):
title = title.lower()
body = body.lower()
input_text = title + r" \n " + body
output = ta_pipeline(input_text, truncation=True)
output_text = output[0]["generated_text"]
return output_text
def predict_from_inputs(url, title, body):
if url:
title, body = fetch_article_content(url)
if title and body:
return title, predict(title, body)
else:
raise Exception("You must supply either url or title and body")
gr.Interface(
fn=predict_from_inputs,
inputs=["text", "text", "text"],
outputs=[
gr.Textbox(label="title"),
gr.Textbox(label="answer")
],
title="Saved you a click!",
description=description,
).launch()
|