lzyhha commited on
Commit
7bf1b5d
·
1 Parent(s): 7ecea30
Files changed (2) hide show
  1. app.py +0 -2
  2. requirements.txt +2 -1
app.py CHANGED
@@ -1,5 +1,3 @@
1
- import subprocess
2
- subprocess.run('pip install flash-attn --no-build-isolation', shell=True)
3
  import argparse
4
  import spaces
5
  from visualcloze import VisualClozeModel
 
 
 
1
  import argparse
2
  import spaces
3
  from visualcloze import VisualClozeModel
requirements.txt CHANGED
@@ -18,4 +18,5 @@ scipy
18
  tqdm
19
  einops
20
  sentencepiece
21
- hf_xet
 
 
18
  tqdm
19
  einops
20
  sentencepiece
21
+ hf_xet
22
+ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.2.post1/flash_attn-2.7.2.post1+cu12torch2.1cxx11abiFALSE-cp310-cp310-linux_x86_64.whl