torch>=2.5.1 torchvision>=0.20.1 diffusers>=0.32.1 transformers>=4.47.1 accelerate>=1.6.0 xformers https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.4cxx11abiTRUE-cp310-cp310-linux_x86_64.whl einops>=0.7.0 gradio>=5.23.3 spaces>=0.34.1