jbilcke-hf HF Staff commited on
Commit
829df0b
·
1 Parent(s): 9d0a236

use older version of torchcodec

Browse files
Files changed (1) hide show
  1. requirements.txt +5 -1
requirements.txt CHANGED
@@ -1,4 +1,5 @@
1
  --find-links https://download.pytorch.org/whl/torch_stable.html
 
2
  # we seem to have an issue with Torch 2.8
3
  # I believe it works but it is incompatible with older weights formats?
4
  # it looks like they changed the checkpoint format or something
@@ -28,7 +29,10 @@ torch==2.6.0
28
  torchvision==0.21.0
29
  torchdata==0.10.1
30
  torchao==0.9.0
31
- torchcodec==0.4.0
 
 
 
32
  flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.6cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
33
 
34
  # something broke in Transformers > 4.55.4
 
1
  --find-links https://download.pytorch.org/whl/torch_stable.html
2
+
3
  # we seem to have an issue with Torch 2.8
4
  # I believe it works but it is incompatible with older weights formats?
5
  # it looks like they changed the checkpoint format or something
 
29
  torchvision==0.21.0
30
  torchdata==0.10.1
31
  torchao==0.9.0
32
+
33
+ # for torch 2.6, we must use torchcodec 0.2
34
+ --index-url=https://download.pytorch.org/whl/cu128
35
+ torchcodec==0.2.1
36
  flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.6cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
37
 
38
  # something broke in Transformers > 4.55.4