|
from transformers import T5Tokenizer, T5ForConditionalGeneration |
|
import torch |
|
import colorama |
|
from colorama import Fore, Back, Style |
|
|
|
colorama.init() |
|
|
|
|
|
model = T5ForConditionalGeneration.from_pretrained("./Ruttoni_AI") |
|
tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-base") |
|
|
|
|
|
def generate_summary(input_text): |
|
input_ids = tokenizer.encode(input_text, return_tensors='pt') |
|
outputs = model.generate(input_ids) |
|
summary = tokenizer.decode(outputs[0], skip_special_tokens=True) |
|
return summary |
|
|
|
|
|
input_text = "Who is pesce beddo?" |
|
summary = generate_summary(input_text) |
|
print(Back.GREEN + "Answer: " + summary) |