faace / app.py
alaaashari's picture
Create app.py
a8f0087 verified
raw
history blame contribute delete
565 Bytes
from fastapi import FastAPI
import onnxruntime as ort
from huggingface_hub import hf_hub_download
app = FastAPI()
# تحميل النموذج عند بدء التشغيل
MODEL_PATH = hf_hub_download(repo_id="immich-app/antelopev2", filename="model.onnx")
ort_session = ort.InferenceSession(MODEL_PATH)
@app.get("/")
def read_root():
return {"message": "Antelope v2 Model is running!"}
@app.post("/predict")
def predict(data: dict):
# استبدل هذا بالمنطق الفعلي لتشغيل النموذج
return {"prediction": "dummy result"}