Spaces:
Sleeping
Sleeping
File size: 2,006 Bytes
60eadbb f069082 60eadbb ce04557 a367f33 ce04557 a367f33 ce04557 60eadbb ce04557 60eadbb ce04557 60eadbb a367f33 60eadbb ce04557 a367f33 ce04557 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 |
import torch
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
import streamlit as st
# Model name
model_name = "YasirAbdali/bart-summarization" # Replace with the path to your fine-tuned model or Hugging Face model ID
# Load tokenizer and model
try:
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
st.write("Model and tokenizer loaded successfully.")
except Exception as e:
st.error(f"Error loading model or tokenizer: {e}")
st.stop()
# Streamlit app
st.title("Summary Generator")
# User input
topic = st.text_area("Enter text:")
max_length = st.slider("Maximum length of generated text:", min_value=100, max_value=500, value=200, step=50)
if topic:
# Tokenize input
try:
input_ids = tokenizer.encode(topic, return_tensors="pt")
st.write("Input text tokenized successfully.")
except Exception as e:
st.error(f"Error tokenizing input text: {e}")
st.stop()
# Generate summary
try:
with torch.no_grad():
output = model.generate(
input_ids,
max_length=max_length,
num_return_sequences=1,
no_repeat_ngram_size=2,
top_k=50,
top_p=0.95,
temperature=0.7
)
st.write("Summary generated successfully.")
except Exception as e:
st.error(f"Error generating summary: {e}")
st.stop()
# Decode and display generated summary
try:
generated_summary = tokenizer.decode(output[0], skip_special_tokens=True)
st.subheader("Generated Summary:")
st.markdown(generated_summary)
except Exception as e:
st.error(f"Error decoding generated summary: {e}")
# Option to download the summary
st.download_button(
label="Download Summary",
data=generated_summary,
file_name="generated_summary.txt",
mime="text/plain"
)
|