| pip==24.1.2 | |
| --find-links https://download.pytorch.org/whl/cu121 | |
| torch==2.2.0 | |
| triton | |
| --find-links https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.9/ | |
| flash-attn==2.5.9.post1+cu122torch2.2cxx11abiTRUE | |
| transformers | |
| accelerate | |
| bitsandbytes | |
| einops | |
| xformers | |
| numpy | |
| packaging | |
| git+https://github.com/unslothai/unsloth.git#egg=unsloth[cu121-ampere-torch220] |