ultrabotbot's picture
Update app.py
4da87de verified
raw
history blame contribute delete
614 Bytes
from transformers import pipeline
# Initialize the pipeline with your fine-tuned model
pipe = pipeline("text-generation", model="crystal99/my-fine-tuned-model")
# Define input prompts
input_texts = [
"Solve this: 23 + 19?",
"Simplify: 48 / 6.",
"What is the square root of 64?"
]
# Generate outputs
outputs = pipe("<|startoftext|> <|user|> what comes after 7? <|bot|>", max_length=150, temperature=0.7, top_k=50, top_p=0.9)
print(outputs)
# Display results
# for i, out in enumerate(outputs):
# print(f"Input {i+1}: {input_texts[i]}")
# print(f"Output {i+1}: {out['generated_text']}\n")