ultrabotbot's picture
Create app.py
c27e6ed verified
raw
history blame
550 Bytes
from transformers import pipeline
# Initialize the pipeline with your fine-tuned model
pipe = pipeline("text-generation", model="crystal99/my-fine-tuned-model")
# Define input prompts
input_texts = [
"Solve this: 23 + 19?",
"Simplify: 48 / 6.",
"What is the square root of 64?"
]
# Generate outputs
outputs = pipe(input_texts, max_length=50, temperature=0.7, top_k=50, top_p=0.9)
# Display results
for i, out in enumerate(outputs):
print(f"Input {i+1}: {input_texts[i]}")
print(f"Output {i+1}: {out['generated_text']}\n")