torch==2.4.0 torchvision==0.19.0 transformers==4.50.0 opencv-python-headless<4.10 peft<0.14.0 timm==1.0.9 einops==0.8.0 #flash_attn #https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4cxx11abiFALSE-cp310-cp310-linux_x86_64.whl sentencepiece==0.2.0 mmengine<1 accelerate numpy<2 spaces html2image scipy moviepy==1.0.3 imageio pydantic==2.10.6 imgkit wkhtmltopdf