Commit
·
67ea722
1
Parent(s):
2364c9f
install flash-attn with cuda
Browse files- app.py +9 -7
- requirements.txt +1 -1
app.py
CHANGED
|
@@ -11,15 +11,15 @@ CHECKPOINTS_PATH = os.path.join(PWD, "checkpoints")
|
|
| 11 |
# import subprocess
|
| 12 |
|
| 13 |
# copy cudnn files
|
| 14 |
-
|
| 15 |
-
|
| 16 |
|
| 17 |
os.system("apt-get update && apt-get install -qqy libmagickwand-dev")
|
| 18 |
|
| 19 |
# install packages
|
| 20 |
-
os.system('export FLASH_ATTENTION_SKIP_CUDA_BUILD=
|
| 21 |
os.system(
|
| 22 |
-
"pip install https://download.pytorch.org/whl/cu128/flashinfer/flashinfer_python-0.2.5%2Bcu128torch2.7-cp38-abi3-linux_x86_64.whl"
|
| 23 |
)
|
| 24 |
os.system('export VLLM_ATTENTION_BACKEND=FLASHINFER && pip install "vllm==0.9.0"')
|
| 25 |
os.system('pip install "decord==0.6.0"')
|
|
@@ -31,10 +31,12 @@ os.system(
|
|
| 31 |
"export CONDA_PREFIX=/usr/local/cuda && ln -sf $CONDA_PREFIX/lib/python3.10/site-packages/nvidia/*/include/* $CONDA_PREFIX/include/python3.10"
|
| 32 |
)
|
| 33 |
|
| 34 |
-
os.system('pip install --no-build-isolation "transformer-engine[pytorch]"')
|
| 35 |
-
os.system('pip install "decord==0.6.0"')
|
| 36 |
|
| 37 |
-
os.system(
|
|
|
|
|
|
|
| 38 |
|
| 39 |
# setup env
|
| 40 |
os.environ["CUDA_HOME"] = "/usr/local/cuda"
|
|
|
|
| 11 |
# import subprocess
|
| 12 |
|
| 13 |
# copy cudnn files
|
| 14 |
+
os.system("cp /root/.pyenv/versions/3.10.18/lib/python3.10/site-packages/nvidia/cudnn/include/*.h /usr/local/cuda/include")
|
| 15 |
+
os.system("cp /root/.pyenv/versions/3.10.18/lib/python3.10/site-packages/nvidia/cudnn/lib/*.so* /usr/local/cuda/lib64")
|
| 16 |
|
| 17 |
os.system("apt-get update && apt-get install -qqy libmagickwand-dev")
|
| 18 |
|
| 19 |
# install packages
|
| 20 |
+
# os.system('export FLASH_ATTENTION_SKIP_CUDA_BUILD=FALSE && pip install --timeout=1000000000 --no-build-isolation "flash-attn<=2.7.4.post1"')
|
| 21 |
os.system(
|
| 22 |
+
"pip install --timeout=1000000000 https://download.pytorch.org/whl/cu128/flashinfer/flashinfer_python-0.2.5%2Bcu128torch2.7-cp38-abi3-linux_x86_64.whl"
|
| 23 |
)
|
| 24 |
os.system('export VLLM_ATTENTION_BACKEND=FLASHINFER && pip install "vllm==0.9.0"')
|
| 25 |
os.system('pip install "decord==0.6.0"')
|
|
|
|
| 31 |
"export CONDA_PREFIX=/usr/local/cuda && ln -sf $CONDA_PREFIX/lib/python3.10/site-packages/nvidia/*/include/* $CONDA_PREFIX/include/python3.10"
|
| 32 |
)
|
| 33 |
|
| 34 |
+
os.system('pip install --timeout=1000000000 --no-build-isolation "transformer-engine[pytorch]"')
|
| 35 |
+
os.system('pip install --timeout=1000000000 "decord==0.6.0"')
|
| 36 |
|
| 37 |
+
os.system(
|
| 38 |
+
'pip install --timeout=1000000000 "git+https://github.com/nvidia-cosmos/cosmos-transfer1@e4055e39ee9c53165e85275bdab84ed20909714a"'
|
| 39 |
+
)
|
| 40 |
|
| 41 |
# setup env
|
| 42 |
os.environ["CUDA_HOME"] = "/usr/local/cuda"
|
requirements.txt
CHANGED
|
@@ -23,7 +23,7 @@ iopath==0.1.10
|
|
| 23 |
loguru==0.7.3
|
| 24 |
matplotlib==3.10.3
|
| 25 |
mediapy==1.2.4
|
| 26 |
-
megatron_core
|
| 27 |
natsort==8.4.0
|
| 28 |
nltk==3.9.1
|
| 29 |
numpy==2.2.6
|
|
|
|
| 23 |
loguru==0.7.3
|
| 24 |
matplotlib==3.10.3
|
| 25 |
mediapy==1.2.4
|
| 26 |
+
megatron_core==0.10.0
|
| 27 |
natsort==8.4.0
|
| 28 |
nltk==3.9.1
|
| 29 |
numpy==2.2.6
|