abdulmatinomotoso commited on
Commit
e79d3d5
1 Parent(s): e96727a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -12
app.py CHANGED
@@ -35,24 +35,20 @@ from transformers import (
35
  )
36
 
37
  #initializing the tokenizer and the model
38
- model_type_2 ="valurank/pegasus-multi_news-headline"
39
- tokenizer_2 = AutoTokenizer.from_pretrained(model_type_2)
40
- model_2 = AutoModelForSeq2SeqLM.from_pretrained(model_type_2)
41
 
42
  #Defining a function to generate the headlines
43
  def headline_generator_2(file):
44
  input_text = clean_text(file)
45
  input_text = sent_tokenize(input_text)
46
  text = ''.join(input_text[:6])
47
-
48
- with tokenizer_2.as_target_tokenizer():
49
- batch = tokenizer_2(
50
- text, truncation=True, return_tensors="pt"
51
- )
52
-
53
- translated = model_2.generate(**batch)
54
- summary_2 = tokenizer_2.batch_decode(translated, skip_special_tokens=True, max_length=20)
55
- return summary_2[0]
56
 
57
  #creating an interface for the headline generator using gradio
58
  demo = gr.Interface(headline_generator_2, inputs=[gr.inputs.Textbox(label="Drop your .txt file here", optional=False)],
 
35
  )
36
 
37
  #initializing the tokenizer and the model
38
+ tokenizer = AutoTokenizer.from_pretrained("abdulmatinomotoso/pegasus-samsum")
39
+ model = AutoModelForSeq2SeqLM.from_pretrained("abdulmatinomotoso/pegasus-samsum")
 
40
 
41
  #Defining a function to generate the headlines
42
  def headline_generator_2(file):
43
  input_text = clean_text(file)
44
  input_text = sent_tokenize(input_text)
45
  text = ''.join(input_text[:6])
46
+
47
+ inputs = tokenizer(text,truncation=True, return_tensors="pt")
48
+ summary_ids = model.generate(inputs["input_ids"])
49
+ summary = tokenizer.batch_decode(summary_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
50
+
51
+ return summary
 
 
 
52
 
53
  #creating an interface for the headline generator using gradio
54
  demo = gr.Interface(headline_generator_2, inputs=[gr.inputs.Textbox(label="Drop your .txt file here", optional=False)],