This model bases on T5-base model, finetuned using bbc-news-summary dataset Example of using:
from transformers import pipeline, T5ForConditionalGeneration, T5Tokenizer
import torch
model_name = "Andrew0488/t5-summarizer"
model = T5ForConditionalGeneration.from_pretrained(model_name).cuda()
tokenizer = T5Tokenizer.from_pretrained(model_name)
def t5_summary(text: str):
inputs = tokenizer.encode(
"summarize: " + text,
return_tensors='pt',
max_length=2000,
truncation=True,
padding='max_length'
).to(torch.device("cuda"))
# Generate the summary
summary_ids = model.generate(
inputs,
max_length=250,
num_beams=5
)
return tokenizer.decode(summary_ids[0], skip_special_tokens=True)