Test flash-attention v0.2.1 on docker CI

This commit is contained in:
jnwei 2023-10-24 13:50:32 -04:00
parent 5f5c8f2a5b
commit a90da39554
1 changed files with 5 additions and 4 deletions

View File

@ -21,13 +21,14 @@ dependencies:
- wandb==0.12.21
- modelcif==0.7
- awscli
- ml-collections
- bioconda::aria2
- bioconda::hmmer==3.3.2
- bioconda::hhsuite==3.3.0
- bioconda::kalign2==2.04
- pip:
- deepspeed==0.5.10 # can this be updated?
- dm-tree==0.1.6 # 0.1.6 yanked from conda-forge - update?
- ml-collections==0.1.0 # 0.1.1 is oldest available on conda-forge - update?
- deepspeed==0.5.10
- dm-tree==0.1.6
- git+https://github.com/NVIDIA/dllogger.git
- git+https://github.com/Dao-AILab/flash-attention.git@5b838a8
- git+https://github.com/Dao-AILab/flash-attention.git=0.2.1
# - git+https://github.com/Dao-AILab/flash-attention.git@5b838a8