File size: 1,417 Bytes
60eadbb
4bffa5f
60eadbb
 
a367f33
49904cb
a367f33
4bffa5f
ce04557
4bffa5f
 
ce04557
4bffa5f
ce04557
60eadbb
 
 
 
 
 
ce04557
60eadbb
 
ce04557
 
4bffa5f
 
ce04557
 
 
 
 
4bffa5f
ce04557
 
 
 
4bffa5f
60eadbb
a367f33
60eadbb
 
ce04557
 
 
a367f33
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import torch
from transformers import pipeline
import streamlit as st

# Model name
model_name = "YasirAbdali/bart-summarization"  # Replace with the path to your fine-tuned model or Hugging Face model ID

# Load summarization pipeline
try:
    summarizer = pipeline("summarization", model=model_name)
    st.write("Summarization pipeline loaded successfully.")
except Exception as e:
    st.error(f"Error loading summarization pipeline: {e}")
    st.stop()

# Streamlit app
st.title("Summary Generator")

# User input
topic = st.text_area("Enter text:")
max_length = st.slider("Maximum length of generated text:", min_value=100, max_value=500, value=200, step=50)

if topic:
    # Generate summary
    try:
        summary = summarizer(topic, max_length=max_length, min_length=50, do_sample=False)
        generated_summary = summary[0]['summary_text']
        st.write("Summary generated successfully.")
    except Exception as e:
        st.error(f"Error generating summary: {e}")
        st.stop()

    # Display generated summary
    try:
        st.subheader("Generated Summary:")
        st.markdown(generated_summary)
    except Exception as e:
        st.error(f"Error displaying generated summary: {e}")

    # Option to download the summary
    st.download_button(
        label="Download Summary",
        data=generated_summary,
        file_name="generated_summary.txt",
        mime="text/plain"
    )