Spaces:
Running
Running
add flash atten properly
Browse files- app.py +7 -0
- requirements.txt +1 -1
app.py
CHANGED
|
@@ -1,4 +1,11 @@
|
|
| 1 |
import spaces
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
import time
|
| 3 |
import logging
|
| 4 |
import gradio as gr
|
|
|
|
| 1 |
import spaces
|
| 2 |
+
import subprocess
|
| 3 |
+
# Install flash attention, skipping CUDA build if necessary
|
| 4 |
+
subprocess.run(
|
| 5 |
+
"pip install flash-attn --no-build-isolation",
|
| 6 |
+
env={"FLASH_ATTENTION_SKIP_CUDA_BUILD": "TRUE"},
|
| 7 |
+
shell=True,
|
| 8 |
+
)
|
| 9 |
import time
|
| 10 |
import logging
|
| 11 |
import gradio as gr
|
requirements.txt
CHANGED
|
@@ -6,4 +6,4 @@ termcolor
|
|
| 6 |
transformers
|
| 7 |
torch
|
| 8 |
num2words
|
| 9 |
-
flash-attn
|
|
|
|
| 6 |
transformers
|
| 7 |
torch
|
| 8 |
num2words
|
| 9 |
+
# flash-attn
|