jbilcke-hf HF Staff commited on
Commit
c550ede
·
1 Parent(s): 873d3e4
Files changed (2) hide show
  1. Dockerfile +2 -6
  2. requirements.txt +7 -12
Dockerfile CHANGED
@@ -32,13 +32,9 @@ RUN apt-get update && apt-get install -y \
32
  # Create app directory
33
  WORKDIR /app
34
 
35
- # Install Python dependencies first for better caching
36
  COPY requirements.txt .
37
- # Install most requirements first
38
- RUN grep -v torchcodec requirements.txt > requirements_without_torchcodec.txt && \
39
- pip3 install --no-cache-dir -r requirements_without_torchcodec.txt
40
- # Install torchcodec with CUDA support (for CUDA 12.x which matches our base image)
41
- RUN pip3 install --no-cache-dir torchcodec --index-url=https://download.pytorch.org/whl/cu124
42
 
43
  # Verify FFmpeg installation and libraries
44
  RUN ffmpeg -version && \
 
32
  # Create app directory
33
  WORKDIR /app
34
 
35
+ # Install Python dependencies
36
  COPY requirements.txt .
37
+ RUN pip3 install --no-cache-dir -r requirements.txt
 
 
 
 
38
 
39
  # Verify FFmpeg installation and libraries
40
  RUN ffmpeg -version && \
requirements.txt CHANGED
@@ -1,15 +1,12 @@
1
- # we don't have a flash attention binary yet for Pytorch 2.7
2
- # so we use pytorch 2.6
3
- torch==2.6.0
4
- torchvision==0.21.0
5
- torchdata==0.10.1
6
- torchao==0.9.0
7
-
8
- # for video decoding
9
- # Note: We need to install torchcodec with CUDA support for A100
10
- # This will be handled in the Dockerfile with the correct index-url
11
  torchcodec
12
 
 
 
13
  # something broke in Transformers > 4.55.4
14
  transformers==4.55.4
15
 
@@ -28,8 +25,6 @@ diffusers @ git+https://github.com/huggingface/diffusers.git@main
28
  imageio
29
  imageio-ffmpeg
30
 
31
- flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.6cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
32
-
33
  # for youtube video download
34
  pytube
35
  pytubefix
 
1
+ --find-links https://download.pytorch.org/whl/torch_stable.html
2
+ torch==2.8.0
3
+ torchvision==0.23.0
4
+ torchdata==0.11.0
5
+ torchao==0.12.0
 
 
 
 
 
6
  torchcodec
7
 
8
+ flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.8.3/flash_attn-2.8.3+cu12torch2.8cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
9
+
10
  # something broke in Transformers > 4.55.4
11
  transformers==4.55.4
12
 
 
25
  imageio
26
  imageio-ffmpeg
27
 
 
 
28
  # for youtube video download
29
  pytube
30
  pytubefix