Update start.py
Browse files
start.py
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
import subprocess
|
| 2 |
-
|
| 3 |
# commented because the existing llama-cpp-python package was renoved fron requirements.txt
|
| 4 |
-
subprocess.run("pip uninstall -y llama-cpp-python", shell=True)
|
| 5 |
|
| 6 |
-
install_command = "CMAKE_ARGS='-DGGML_CUDA=on -DCUDA_PATH=/usr/local/cuda-12.2 -DCUDAToolkit_ROOT=/usr/local/cuda-12.2 -DCUDAToolkit_INCLUDE_DIR=/usr/local/cuda-12.2/include -DCUDAToolkit_LIBRARY_DIR=/usr/local/cuda-12.2/lib64' FORCE_CMAKE=1 pip install llama-cpp-python --no-cache-dir"
|
| 7 |
-
subprocess.run(install_command, shell=True)
|
| 8 |
|
| 9 |
-
subprocess.run("
|
|
|
|
| 1 |
import subprocess
|
| 2 |
+
import flask
|
| 3 |
# commented because the existing llama-cpp-python package was renoved fron requirements.txt
|
| 4 |
+
#subprocess.run("pip uninstall -y llama-cpp-python", shell=True)
|
| 5 |
|
| 6 |
+
#install_command = "CMAKE_ARGS='-DGGML_CUDA=on -DCUDA_PATH=/usr/local/cuda-12.2 -DCUDAToolkit_ROOT=/usr/local/cuda-12.2 -DCUDAToolkit_INCLUDE_DIR=/usr/local/cuda-12.2/include -DCUDAToolkit_LIBRARY_DIR=/usr/local/cuda-12.2/lib64' FORCE_CMAKE=1 pip install llama-cpp-python --no-cache-dir"
|
| 7 |
+
#subprocess.run(install_command, shell=True)
|
| 8 |
|
| 9 |
+
subprocess.run("app.run(debug=True)", shell=True)
|