Xlabs-Gradio / cog.yaml
DamarJati's picture
experiment
f81b95a
raw
history blame contribute delete
No virus
691 Bytes
# Configuration for Cog ⚙️
# Reference: https://cog.run/yaml
build:
gpu: true
cuda: "12.1"
python_version: "3.11"
python_packages:
- "accelerate==0.30.1"
- "deepspeed==0.14.4"
- "einops==0.8.0"
- "transformers==4.43.3"
- "huggingface-hub==0.24.5"
- "einops==0.8.0"
- "pandas==2.2.2"
- "opencv-python==4.10.0.84"
- "pillow==10.4.0"
- "optimum-quanto==0.2.4"
- "sentencepiece==0.2.0"
run:
- curl -o /usr/local/bin/pget -L "https://github.com/replicate/pget/releases/download/v0.8.2/pget_linux_x86_64" && chmod +x /usr/local/bin/pget
# predict.py defines how predictions are run on your model
predict: "predict.py:Predictor"