blanchon commited on
Commit
a0e6fbe
·
1 Parent(s): 9af0e64

update deps

Browse files
Files changed (1) hide show
  1. requirements.txt +2 -2
requirements.txt CHANGED
@@ -1,10 +1,10 @@
1
- torch==2.6.0
2
  torchvision>=0.20.1
3
  diffusers>=0.32.1
4
  transformers>=4.47.1
5
  accelerate>=1.6.0
6
  xformers
7
- https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.6cxx11abiTRUE-cp310-cp310-linux_x86_64.whl
8
  einops>=0.7.0
9
  gradio>=5.23.3
10
  spaces>=0.34.1
 
1
+ torch==2.4.0
2
  torchvision>=0.20.1
3
  diffusers>=0.32.1
4
  transformers>=4.47.1
5
  accelerate>=1.6.0
6
  xformers
7
+ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.4cxx11abiTRUE-cp310-cp310-linux_x86_64.whl
8
  einops>=0.7.0
9
  gradio>=5.23.3
10
  spaces>=0.34.1