from transformers import AutoModelForCausalLM, AutoTokenizer | |
tokenizer = AutoTokenizer.from_pretrained("microsoft/lts-gpt2-sm") | |
model = AutoModelForCausalLM.from_pretrained("microsoft/lts-gpt2-sm", subfolder="gpt2_0e1b5a3c867d6473da270799061f3089a1df5afd") | |
text = "# Halo Infinite Review" | |
input_ids = tokenizer(text, return_tensors="pt").input_ids | |
generated_ids = model.generate(input_ids, max_length=128) | |
print(tokenizer.decode(generated_ids[0], skip_special_tokens=True)) |