fastapi>=0.68.0 uvicorn>=0.15.0 codecarbon>=2.3.1 datasets>=2.14.0 scikit-learn>=1.0.2 xformers sentence-transformers==3.3.1 pydantic>=1.10.0 python-dotenv>=1.0.0 gradio>=4.0.0 requests>=2.31.0 librosa==0.10.2.post1 skops transformers torch transformers[torch]