File size: 322 Bytes
e21e3cf
28cc4d5
1470394
 
 
cfe77c3
 
8bc9cc7
01838d2
78c0d01
 
6fe4c5d
9d038eb
 
d79df6b
ddaa3ca
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
spaces
gradio
torch
torchvision
torchaudio
diffusers>=0.32.1
transformers>=4.47.1
accelerate>=1.2.1
sentencepiece
einops
xformers
packaging
ninja
psutil
setuptools
https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.5cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
hdi1