pip==24.1.2 --find-links https://download.pytorch.org/whl/cu121 torch==2.2.0 triton git+https://github.com/unslothai/unsloth.git#egg=unsloth[cu121-ampere-torch220] transformers accelerate bitsandbytes einops xformers --find-links https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.9/ flash_attn-2.5.9.post1+cu122torch2.2cxx11abiTRUE-cp312-cp312-linux_x86_64 numpy packaging