Spaces:
Runtime error
Runtime error
Cascade Bot
commited on
Commit
·
6cc7431
1
Parent(s):
865ec2f
Added llama-cpp-python and system dependencies
Browse files- requirements.txt +5 -0
- space.yml +4 -2
- startup.sh +12 -0
requirements.txt
CHANGED
|
@@ -22,6 +22,11 @@ networkx>=3.2.1 # Added for graph operations
|
|
| 22 |
# Model integration
|
| 23 |
huggingface-hub>=0.19.4
|
| 24 |
groq>=0.4.1
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 25 |
|
| 26 |
# Utilities
|
| 27 |
typing-extensions>=4.0.0
|
|
|
|
| 22 |
# Model integration
|
| 23 |
huggingface-hub>=0.19.4
|
| 24 |
groq>=0.4.1
|
| 25 |
+
llama-cpp-python>=0.2.23 # Added for local LLM support
|
| 26 |
+
|
| 27 |
+
# Build dependencies
|
| 28 |
+
cmake>=3.25.0 # For building llama-cpp-python
|
| 29 |
+
ninja>=1.11.1 # For faster builds
|
| 30 |
|
| 31 |
# Utilities
|
| 32 |
typing-extensions>=4.0.0
|
space.yml
CHANGED
|
@@ -58,11 +58,13 @@ dependencies:
|
|
| 58 |
- "pandas>=2.1.0"
|
| 59 |
- "scikit-learn>=1.3.2"
|
| 60 |
- "plotly>=5.18.0"
|
| 61 |
-
- "networkx>=3.2.1"
|
|
|
|
| 62 |
system:
|
| 63 |
- git-lfs
|
| 64 |
- cmake
|
| 65 |
-
- build
|
|
|
|
| 66 |
- cuda-toolkit-11-8
|
| 67 |
- nvidia-cuda-toolkit
|
| 68 |
- libcudnn8
|
|
|
|
| 58 |
- "pandas>=2.1.0"
|
| 59 |
- "scikit-learn>=1.3.2"
|
| 60 |
- "plotly>=5.18.0"
|
| 61 |
+
- "networkx>=3.2.1"
|
| 62 |
+
- "llama-cpp-python>=0.2.23" # Added for local LLM support
|
| 63 |
system:
|
| 64 |
- git-lfs
|
| 65 |
- cmake
|
| 66 |
+
- ninja-build # For faster builds
|
| 67 |
+
- build-essential # Required for compilation
|
| 68 |
- cuda-toolkit-11-8
|
| 69 |
- nvidia-cuda-toolkit
|
| 70 |
- libcudnn8
|
startup.sh
CHANGED
|
@@ -11,6 +11,14 @@ if [ -z "$GROQ_API_KEY" ]; then
|
|
| 11 |
exit 1
|
| 12 |
fi
|
| 13 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 14 |
# Update pip and install dependencies
|
| 15 |
echo "Updating pip and installing dependencies..."
|
| 16 |
python -m pip install --upgrade pip
|
|
@@ -37,6 +45,10 @@ if [ -z "$HUGGINGFACE_TOKEN" ]; then
|
|
| 37 |
echo "Warning: HUGGINGFACE_TOKEN not set. Some features may be limited."
|
| 38 |
fi
|
| 39 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
# Start the application
|
| 41 |
echo "Starting Advanced Agentic System..."
|
| 42 |
export PYTHONPATH="${PYTHONPATH}:${PWD}"
|
|
|
|
| 11 |
exit 1
|
| 12 |
fi
|
| 13 |
|
| 14 |
+
# Install system dependencies
|
| 15 |
+
echo "Installing system dependencies..."
|
| 16 |
+
apt-get update && apt-get install -y \
|
| 17 |
+
cmake \
|
| 18 |
+
ninja-build \
|
| 19 |
+
build-essential \
|
| 20 |
+
git-lfs
|
| 21 |
+
|
| 22 |
# Update pip and install dependencies
|
| 23 |
echo "Updating pip and installing dependencies..."
|
| 24 |
python -m pip install --upgrade pip
|
|
|
|
| 45 |
echo "Warning: HUGGINGFACE_TOKEN not set. Some features may be limited."
|
| 46 |
fi
|
| 47 |
|
| 48 |
+
# Set environment variables for llama-cpp-python
|
| 49 |
+
export CMAKE_ARGS="-DLLAMA_CUBLAS=on"
|
| 50 |
+
export FORCE_CMAKE=1
|
| 51 |
+
|
| 52 |
# Start the application
|
| 53 |
echo "Starting Advanced Agentic System..."
|
| 54 |
export PYTHONPATH="${PYTHONPATH}:${PWD}"
|