from transformers import GPTNeoForCausalLM, AutoTokenizer | |
# Load model and tokenizer | |
model = GPTNeoForCausalLM.from_pretrained("harshagnihotri14/SOP_Generator") | |
tokenizer = AutoTokenizer.from_pretrained("harshagnihotri14/SOP_Generator") | |
# Prepare input | |
input_text = "Generate an SOP for handling customer complaints:" | |
input_ids = tokenizer.encode(input_text, return_tensors="pt") | |
# Generate text | |
output = model.generate(input_ids, max_length=200, num_return_sequences=1, no_repeat_ngram_size=2) | |
# Decode and print the result | |
generated_text = tokenizer.decode(output[0], skip_special_tokens=True) | |
print(generated_text) |