import torch from transformers import pipeline import streamlit as st # Model name model_name = "YasirAbdali/bart-summarization" # Replace with the path to your fine-tuned model or Hugging Face model ID # Load summarization pipeline try: summarizer = pipeline("summarization", model=model_name) st.write("Summarization pipeline loaded successfully.") except Exception as e: st.error(f"Error loading summarization pipeline: {e}") st.stop() # Streamlit app st.title("Summary Generator") # User input topic = st.text_area("Enter text:") max_length = st.slider("Maximum length of generated text:", min_value=100, max_value=500, value=200, step=50) if topic: # Generate summary try: summary = summarizer(topic, max_length=max_length, min_length=50, do_sample=False) generated_summary = summary[0]['summary_text'] st.write("Summary generated successfully.") except Exception as e: st.error(f"Error generating summary: {e}") st.stop() # Display generated summary try: st.subheader("Generated Summary:") st.markdown(generated_summary) except Exception as e: st.error(f"Error displaying generated summary: {e}") # Option to download the summary st.download_button( label="Download Summary", data=generated_summary, file_name="generated_summary.txt", mime="text/plain" )