torch==2.4.0 torchvision transformers==4.39.2 sentencepiece==0.1.99 scikit-learn==1.2.2 timm==1.0.8 deepspeed==0.12.6 numpy==1.26.4 open_clip_torch==2.32.0 typed-argument-parser spaces gradio==4.44.1 gradio_client==1.3.0 pillow accelerate==0.27.2 pydantic==2.10.6 bitsandbytes tqdm https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.0.post2/flash_attn-2.7.0.post2+cu12torch2.4cxx11abiFALSE-cp310-cp310-linux_x86_64.whl