RezinWiz commited on
Commit
ac5551c
·
verified ·
1 Parent(s): 97b3ec6

Upload 15 files

Browse files
Files changed (15) hide show
  1. .env +27 -0
  2. .gitattributes +2 -35
  3. .gitignore +181 -0
  4. 2.0 +1 -0
  5. 3.0.0 +1 -0
  6. Dockerfile +31 -0
  7. README.md +18 -7
  8. api.py +744 -0
  9. api_diagnostics.py +321 -0
  10. download_model.py +67 -0
  11. easi_calculator.py +198 -0
  12. image_processing.py +125 -0
  13. model_loader.py +216 -0
  14. render.yaml +17 -0
  15. requirements.txt +11 -0
.env ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # =========================
2
+ # API Configuration
3
+ # =========================
4
+ API_HOST=0.0.0.0
5
+ API_PORT=8000
6
+ DEBUG=True
7
+
8
+ # =========================
9
+ # Model Paths and Google Drive Integration
10
+ # =========================
11
+ # Local model directories (these will be auto-created/downloaded if missing)
12
+ DERM_MODEL_PATH=./derm_foundation
13
+ EASI_MODEL_PATH=./trained_model/easi_severity_model_derm_foundation_individual.pkl
14
+
15
+ # =========================
16
+ # Image Processing Settings
17
+ # =========================
18
+ MAX_IMAGE_SIZE_MB=10
19
+ PROCESSED_IMAGE_SIZE=448
20
+
21
+ # =========================
22
+ # API Security (optional for production)
23
+ # =========================
24
+ # API_KEY=your-secret-key-here
25
+
26
+
27
+
.gitattributes CHANGED
@@ -1,35 +1,2 @@
1
- *.7z filter=lfs diff=lfs merge=lfs -text
2
- *.arrow filter=lfs diff=lfs merge=lfs -text
3
- *.bin filter=lfs diff=lfs merge=lfs -text
4
- *.bz2 filter=lfs diff=lfs merge=lfs -text
5
- *.ckpt filter=lfs diff=lfs merge=lfs -text
6
- *.ftz filter=lfs diff=lfs merge=lfs -text
7
- *.gz filter=lfs diff=lfs merge=lfs -text
8
- *.h5 filter=lfs diff=lfs merge=lfs -text
9
- *.joblib filter=lfs diff=lfs merge=lfs -text
10
- *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
- *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
- *.model filter=lfs diff=lfs merge=lfs -text
13
- *.msgpack filter=lfs diff=lfs merge=lfs -text
14
- *.npy filter=lfs diff=lfs merge=lfs -text
15
- *.npz filter=lfs diff=lfs merge=lfs -text
16
- *.onnx filter=lfs diff=lfs merge=lfs -text
17
- *.ot filter=lfs diff=lfs merge=lfs -text
18
- *.parquet filter=lfs diff=lfs merge=lfs -text
19
- *.pb filter=lfs diff=lfs merge=lfs -text
20
- *.pickle filter=lfs diff=lfs merge=lfs -text
21
- *.pkl filter=lfs diff=lfs merge=lfs -text
22
- *.pt filter=lfs diff=lfs merge=lfs -text
23
- *.pth filter=lfs diff=lfs merge=lfs -text
24
- *.rar filter=lfs diff=lfs merge=lfs -text
25
- *.safetensors filter=lfs diff=lfs merge=lfs -text
26
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
- *.tar.* filter=lfs diff=lfs merge=lfs -text
28
- *.tar filter=lfs diff=lfs merge=lfs -text
29
- *.tflite filter=lfs diff=lfs merge=lfs -text
30
- *.tgz filter=lfs diff=lfs merge=lfs -text
31
- *.wasm filter=lfs diff=lfs merge=lfs -text
32
- *.xz filter=lfs diff=lfs merge=lfs -text
33
- *.zip filter=lfs diff=lfs merge=lfs -text
34
- *.zst filter=lfs diff=lfs merge=lfs -text
35
- *tfevents* filter=lfs diff=lfs merge=lfs -text
 
1
+ # Auto detect text files and perform LF normalization
2
+ * text=auto
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
.gitignore ADDED
@@ -0,0 +1,181 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ # Usually these files are written by a python script from a template
31
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
32
+ *.manifest
33
+ *.spec
34
+
35
+ # Installer logs
36
+ pip-log.txt
37
+ pip-delete-this-directory.txt
38
+
39
+ # Unit test / coverage reports
40
+ htmlcov/
41
+ .tox/
42
+ .nox/
43
+ .coverage
44
+ .coverage.*
45
+ .cache
46
+ nosetests.xml
47
+ coverage.xml
48
+ *.cover
49
+ *.py,cover
50
+ .hypothesis/
51
+ .pytest_cache/
52
+ cover/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ .pybuilder/
76
+ target/
77
+
78
+ # Jupyter Notebook
79
+ .ipynb_checkpoints
80
+
81
+ # IPython
82
+ profile_default/
83
+ ipython_config.py
84
+
85
+ # pyenv
86
+ # For a library or package, you might want to ignore these files since the code is
87
+ # intended to run in multiple environments; otherwise, check them in:
88
+ # .python-version
89
+
90
+ # pipenv
91
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
93
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
94
+ # install all needed dependencies.
95
+ #Pipfile.lock
96
+
97
+ # UV
98
+ # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
99
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
100
+ # commonly ignored for libraries.
101
+ #uv.lock
102
+
103
+ # poetry
104
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
105
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
106
+ # commonly ignored for libraries.
107
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
108
+ #poetry.lock
109
+
110
+ # pdm
111
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
112
+ #pdm.lock
113
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
114
+ # in version control.
115
+ # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
116
+ .pdm.toml
117
+ .pdm-python
118
+ .pdm-build/
119
+
120
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
121
+ __pypackages__/
122
+
123
+ # Celery stuff
124
+ celerybeat-schedule
125
+ celerybeat.pid
126
+
127
+ # SageMath parsed files
128
+ *.sage.py
129
+
130
+ # Environments
131
+ .env
132
+ .venv
133
+ env/
134
+ venv/
135
+ ENV/
136
+ env.bak/
137
+ venv.bak/
138
+
139
+ # Spyder project settings
140
+ .spyderproject
141
+ .spyproject
142
+
143
+ # Rope project settings
144
+ .ropeproject
145
+
146
+ # mkdocs documentation
147
+ /site
148
+
149
+ # mypy
150
+ .mypy_cache/
151
+ .dmypy.json
152
+ dmypy.json
153
+
154
+ # Pyre type checker
155
+ .pyre/
156
+
157
+ # pytype static type analyzer
158
+ .pytype/
159
+
160
+ # Cython debug symbols
161
+ cython_debug/
162
+
163
+ # PyCharm
164
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
165
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
166
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
167
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
168
+ #.idea/
169
+
170
+ # Ruff stuff:
171
+ .ruff_cache/
172
+
173
+ # PyPI configuration file
174
+ .pypirc
175
+
176
+ # Cursor
177
+ # Cursor is an AI-powered code editor.`.cursorignore` specifies files/directories to
178
+ # exclude from AI features like autocomplete and code analysis. Recommended for sensitive data
179
+ # refer to https://docs.cursor.com/context/ignore-files
180
+ .cursorignore
181
+ .cursorindexingignore
2.0 ADDED
@@ -0,0 +1 @@
 
 
1
+ Requirement already satisfied: numpy in c:\users\ralph\downloads\eczemanage_api\venv\lib\site-packages (2.2.6)
3.0.0 ADDED
@@ -0,0 +1 @@
 
 
1
+ Requirement already satisfied: keras in c:\users\ralph\downloads\eczemanage_api\venv\lib\site-packages (2.15.0)
Dockerfile ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ **`Dockerfile`** (same as before but optimized for HF):
2
+ ```dockerfile
3
+ FROM python:3.9-slim
4
+
5
+ WORKDIR /app
6
+
7
+ # Install system dependencies
8
+ RUN apt-get update && apt-get install -y --no-install-recommends \
9
+ gcc \
10
+ curl \
11
+ && rm -rf /var/lib/apt/lists/*
12
+
13
+ # Copy requirements and install
14
+ COPY requirements.txt .
15
+ RUN pip install --no-cache-dir -r requirements.txt
16
+
17
+ # Copy application files
18
+ COPY . .
19
+
20
+ # Download model during build (HF has plenty of RAM for this)
21
+ RUN python download_model.py
22
+
23
+ # Expose port (HF Spaces uses 8000 by default)
24
+ EXPOSE 8000
25
+
26
+ # Health check
27
+ HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
28
+ CMD curl -f http://localhost:8000/health || exit 1
29
+
30
+ # Start server
31
+ CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "8000"]
README.md CHANGED
@@ -1,12 +1,23 @@
1
  ---
2
- title: Eczemanage
3
- emoji: 🐢
4
- colorFrom: green
5
- colorTo: gray
6
  sdk: docker
7
  pinned: false
8
- license: mit
9
- short_description: Hosting for Eczemanage App
10
  ---
11
 
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+ title: EASI Severity Prediction API
3
+ emoji: 🔬
4
+ colorFrom: blue
5
+ colorTo: green
6
  sdk: docker
7
  pinned: false
8
+ app_port: 8000
 
9
  ---
10
 
11
+ # EASI Severity Prediction API
12
+
13
+ FastAPI-based REST API for predicting EASI scores from dermatological images.
14
+
15
+ ## Endpoints
16
+ - `POST /predict` - Upload image and get EASI predictions
17
+ - `GET /health` - Health check
18
+ - `GET /conditions` - List available conditions
19
+
20
+ ## Usage
21
+ ```bash
22
+ curl -X POST "https://YOUR-USERNAME-easi-api.hf.space/predict" \
23
api.py ADDED
@@ -0,0 +1,744 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ EASI Severity Prediction REST API
3
+ ==================================
4
+
5
+ FastAPI-based REST API for predicting EASI scores from dermatological images.
6
+ Designed for integration with Flutter mobile applications.
7
+
8
+ Endpoints:
9
+ - POST /predict - Upload image and get EASI predictions
10
+ - GET /health - Health check endpoint
11
+ - GET /conditions - Get list of available conditions
12
+
13
+ Installation:
14
+ pip install fastapi uvicorn python-multipart pillow tensorflow numpy pandas huggingface-hub requests
15
+
16
+ Run:
17
+ uvicorn api:app --host 0.0.0.0 --port 8000 --reload
18
+ """
19
+
20
+ import os
21
+ import warnings
22
+ import logging
23
+ from typing import List, Dict, Any, Optional
24
+ from io import BytesIO
25
+ from pathlib import Path
26
+
27
+ # Suppress warnings
28
+ os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
29
+ os.environ['TF_ENABLE_ONEDNN_OPTS'] = '0'
30
+ os.environ['MLIR_CRASH_REPRODUCER_DIRECTORY'] = ''
31
+ warnings.filterwarnings('ignore')
32
+ logging.getLogger('absl').setLevel(logging.ERROR)
33
+
34
+ import tensorflow as tf
35
+ tf.get_logger().setLevel('ERROR')
36
+ tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
37
+
38
+ from fastapi import FastAPI, File, UploadFile, HTTPException, status
39
+ from fastapi.middleware.cors import CORSMiddleware
40
+ from fastapi.responses import JSONResponse
41
+ from pydantic import BaseModel, Field
42
+ import numpy as np
43
+ from PIL import Image
44
+ import pickle
45
+ import pandas as pd
46
+ import requests
47
+ from huggingface_hub import hf_hub_download, login
48
+
49
+ # Initialize FastAPI app
50
+ app = FastAPI(
51
+ title="EASI Severity Prediction API",
52
+ description="REST API for predicting EASI scores from skin images",
53
+ version="1.0.0"
54
+ )
55
+
56
+ # CORS middleware for Flutter web/mobile
57
+ app.add_middleware(
58
+ CORSMiddleware,
59
+ allow_origins=["*"], # In production, specify your Flutter app domain
60
+ allow_credentials=True,
61
+ allow_methods=["*"],
62
+ allow_headers=["*"],
63
+ )
64
+
65
+ # Configuration
66
+ HF_REPO_ID = "google/derm-foundation"
67
+ DERM_FOUNDATION_PATH = "./derm_foundation/"
68
+ R2_BASE_URL = os.environ.get("R2_BASE_URL", "https://r2-worker.eczemanage.workers.dev")
69
+
70
+ # Get Hugging Face token from environment variable
71
+ HF_TOKEN = os.environ.get("HUGGINGFACE_TOKEN")
72
+
73
+ # Response Models
74
+ class ConditionPrediction(BaseModel):
75
+ condition: str
76
+ probability: float = Field(..., ge=0, le=1)
77
+ confidence: float = Field(..., ge=0)
78
+ weight: float = Field(..., ge=0)
79
+ easi_category: Optional[str] = None
80
+ easi_contribution: int = Field(..., ge=0, le=3)
81
+
82
+ class EASIComponent(BaseModel):
83
+ name: str
84
+ score: int = Field(..., ge=0, le=3)
85
+ contributing_conditions: List[Dict[str, Any]]
86
+
87
+ class PredictionResponse(BaseModel):
88
+ success: bool
89
+ total_easi_score: int = Field(..., ge=0, le=12)
90
+ severity_interpretation: str
91
+ easi_components: Dict[str, EASIComponent]
92
+ predicted_conditions: List[ConditionPrediction]
93
+ summary_statistics: Dict[str, float]
94
+ image_info: Dict[str, Any]
95
+
96
+ class HealthResponse(BaseModel):
97
+ status: str
98
+ models_loaded: Dict[str, bool]
99
+ available_conditions: int
100
+ hf_token_configured: bool
101
+ model_source: str
102
+
103
+ class ErrorResponse(BaseModel):
104
+ success: bool = False
105
+ error: str
106
+ detail: Optional[str] = None
107
+
108
+
109
+ # Model wrapper class
110
+ class DermFoundationNeuralNetwork:
111
+ def __init__(self):
112
+ self.model = None
113
+ self.mlb = None
114
+ self.embedding_scaler = None
115
+ self.confidence_scaler = None
116
+ self.weighted_scaler = None
117
+
118
+ def load_model(self, filepath):
119
+ try:
120
+ with open(filepath, 'rb') as f:
121
+ model_data = pickle.load(f)
122
+
123
+ self.mlb = model_data['mlb']
124
+ self.embedding_scaler = model_data['embedding_scaler']
125
+ self.confidence_scaler = model_data['confidence_scaler']
126
+ self.weighted_scaler = model_data['weighted_scaler']
127
+
128
+ keras_model_path = model_data['keras_model_path']
129
+ if os.path.exists(keras_model_path):
130
+ self.model = tf.keras.models.load_model(keras_model_path)
131
+ return True
132
+ else:
133
+ return False
134
+ except Exception as e:
135
+ print(f"Error loading model: {e}")
136
+ return False
137
+
138
+ def predict(self, embedding):
139
+ if self.model is None:
140
+ return None
141
+
142
+ if len(embedding.shape) == 1:
143
+ embedding = embedding.reshape(1, -1)
144
+
145
+ embedding_scaled = self.embedding_scaler.transform(embedding)
146
+ predictions = self.model.predict(embedding_scaled, verbose=0)
147
+
148
+ condition_probs = predictions['conditions'][0]
149
+ individual_confidences = predictions['individual_confidences'][0]
150
+ individual_weights = predictions['individual_weights'][0]
151
+
152
+ condition_threshold = 0.3
153
+ predicted_condition_indices = np.where(condition_probs > condition_threshold)[0]
154
+
155
+ predicted_conditions = []
156
+ predicted_confidences = []
157
+ predicted_weights_dict = {}
158
+
159
+ for idx in predicted_condition_indices:
160
+ condition_name = self.mlb.classes_[idx]
161
+ condition_prob = float(condition_probs[idx])
162
+
163
+ if individual_confidences[idx] > 0:
164
+ confidence_orig = self.confidence_scaler.inverse_transform([[individual_confidences[idx]]])[0, 0]
165
+ else:
166
+ confidence_orig = 0.0
167
+
168
+ if individual_weights[idx] > 0:
169
+ weight_orig = self.weighted_scaler.inverse_transform([[individual_weights[idx]]])[0, 0]
170
+ else:
171
+ weight_orig = 0.0
172
+
173
+ predicted_conditions.append(condition_name)
174
+ predicted_confidences.append(max(0, confidence_orig))
175
+ predicted_weights_dict[condition_name] = max(0, weight_orig)
176
+
177
+ all_condition_probs = {}
178
+ all_confidences = {}
179
+ all_weights = {}
180
+
181
+ for i, class_name in enumerate(self.mlb.classes_):
182
+ all_condition_probs[class_name] = float(condition_probs[i])
183
+
184
+ if individual_confidences[i] > 0:
185
+ conf_orig = self.confidence_scaler.inverse_transform([[individual_confidences[i]]])[0, 0]
186
+ all_confidences[class_name] = max(0, conf_orig)
187
+ else:
188
+ all_confidences[class_name] = 0.0
189
+
190
+ if individual_weights[i] > 0:
191
+ weight_orig = self.weighted_scaler.inverse_transform([[individual_weights[i]]])[0, 0]
192
+ all_weights[class_name] = max(0, weight_orig)
193
+ else:
194
+ all_weights[class_name] = 0.0
195
+
196
+ return {
197
+ 'dermatologist_skin_condition_on_label_name': predicted_conditions,
198
+ 'dermatologist_skin_condition_confidence': predicted_confidences,
199
+ 'weighted_skin_condition_label': predicted_weights_dict,
200
+ 'all_condition_probabilities': all_condition_probs,
201
+ 'all_individual_confidences': all_confidences,
202
+ 'all_individual_weights': all_weights,
203
+ 'condition_threshold': condition_threshold
204
+ }
205
+
206
+
207
+ # Helper function to download from Cloudflare R2 with chunked streaming
208
+ def download_derm_foundation_from_r2(output_dir):
209
+ """Download Derm Foundation model from Cloudflare R2 using memory-efficient streaming"""
210
+ try:
211
+ print(f"Downloading Derm Foundation model from R2 ({R2_BASE_URL})...")
212
+ os.makedirs(output_dir, exist_ok=True)
213
+
214
+ # Files to download
215
+ files_to_download = [
216
+ "saved_model.pb",
217
+ "variables/variables.index",
218
+ "variables/variables.data-00000-of-00001"
219
+ ]
220
+
221
+ for file_path in files_to_download:
222
+ print(f"Downloading {file_path}...")
223
+ url = f"{R2_BASE_URL}/{file_path}"
224
+ local_path = os.path.join(output_dir, file_path)
225
+
226
+ # Create subdirectories if needed
227
+ os.makedirs(os.path.dirname(local_path), exist_ok=True)
228
+
229
+ # Download file with streaming (ULTRA MEMORY EFFICIENT)
230
+ # Use tiny chunk size and aggressive garbage collection
231
+ import gc
232
+
233
+ with requests.get(url, stream=True, timeout=900) as response:
234
+ response.raise_for_status()
235
+
236
+ total_size = int(response.headers.get('content-length', 0))
237
+ downloaded = 0
238
+ chunk_count = 0
239
+
240
+ # Write directly to disk in tiny chunks (256KB to minimize memory)
241
+ with open(local_path, 'wb') as f:
242
+ for chunk in response.iter_content(chunk_size=256*1024): # 256KB chunks
243
+ if chunk:
244
+ f.write(chunk)
245
+ f.flush() # Force write to disk
246
+ downloaded += len(chunk)
247
+ chunk_count += 1
248
+
249
+ # Aggressive garbage collection every 10 chunks (~2.5MB)
250
+ if chunk_count % 10 == 0:
251
+ gc.collect()
252
+
253
+ # Less frequent progress updates to reduce print overhead
254
+ if total_size > 0 and chunk_count % 20 == 0:
255
+ progress = (downloaded / total_size) * 100
256
+ mb_downloaded = downloaded / (1024*1024)
257
+ mb_total = total_size / (1024*1024)
258
+ print(f" Progress: {progress:.1f}% ({mb_downloaded:.1f}/{mb_total:.1f} MB)")
259
+
260
+ print() # New line after progress
261
+ gc.collect() # Final cleanup
262
+
263
+ print(f"✓ Downloaded: {file_path}")
264
+
265
+ print(f"✓ Derm Foundation model downloaded successfully from R2")
266
+ return True
267
+ except Exception as e:
268
+ print(f"✗ Error downloading from R2: {e}")
269
+ import traceback
270
+ traceback.print_exc()
271
+ return False
272
+
273
+
274
+ # Helper function to download from Hugging Face (Fallback) with memory-efficient streaming
275
+ def download_derm_foundation_from_hf(output_dir):
276
+ """Download Derm Foundation model from Hugging Face using memory-efficient streaming"""
277
+ try:
278
+ # Login to Hugging Face if token is available
279
+ if HF_TOKEN:
280
+ print("Authenticating with Hugging Face...")
281
+ login(token=HF_TOKEN)
282
+ else:
283
+ print("WARNING: No HF token found. Attempting download without authentication...")
284
+
285
+ print(f"Downloading Derm Foundation model from Hugging Face...")
286
+ os.makedirs(output_dir, exist_ok=True)
287
+
288
+ # Files to download
289
+ files_to_download = [
290
+ "saved_model.pb",
291
+ "variables/variables.data-00000-of-00001",
292
+ "variables/variables.index"
293
+ ]
294
+
295
+ for file_path in files_to_download:
296
+ print(f"Downloading {file_path}...")
297
+ local_path = os.path.join(output_dir, file_path)
298
+
299
+ # Create subdirectories if needed
300
+ os.makedirs(os.path.dirname(local_path), exist_ok=True)
301
+
302
+ # Download file with token if available
303
+ # hf_hub_download handles streaming internally
304
+ downloaded_path = hf_hub_download(
305
+ repo_id=HF_REPO_ID,
306
+ filename=file_path,
307
+ token=HF_TOKEN,
308
+ cache_dir=None,
309
+ local_dir=output_dir,
310
+ local_dir_use_symlinks=False,
311
+ resume_download=True # Resume if interrupted
312
+ )
313
+ print(f"✓ Downloaded: {file_path}")
314
+
315
+ print(f"✓ Derm Foundation model downloaded successfully from HuggingFace")
316
+ return True
317
+ except Exception as e:
318
+ print(f"✗ Error downloading from Hugging Face: {e}")
319
+ print(f"Make sure HUGGINGFACE_TOKEN is set in Render environment variables")
320
+ import traceback
321
+ traceback.print_exc()
322
+ return False
323
+
324
+
325
+ # EASI calculation functions
326
+ def calculate_easi_scores(predictions):
327
+ easi_categories = {
328
+ 'erythema': {
329
+ 'name': 'Erythema (Redness)',
330
+ 'conditions': [
331
+ 'Post-Inflammatory hyperpigmentation', 'Erythema ab igne', 'Erythema annulare centrifugum',
332
+ 'Erythema elevatum diutinum', 'Erythema gyratum repens', 'Erythema multiforme',
333
+ 'Erythema nodosum', 'Flagellate erythema', 'Annular erythema', 'Drug Rash',
334
+ 'Allergic Contact Dermatitis', 'Irritant Contact Dermatitis', 'Contact dermatitis',
335
+ 'Acute dermatitis', 'Chronic dermatitis', 'Acute and chronic dermatitis',
336
+ 'Sunburn', 'Photodermatitis', 'Phytophotodermatitis', 'Rosacea',
337
+ 'Seborrheic Dermatitis', 'Stasis Dermatitis', 'Perioral Dermatitis',
338
+ 'Burn erythema of abdominal wall', 'Burn erythema of back of hand',
339
+ 'Burn erythema of lower leg', 'Cellulitis', 'Infection of skin',
340
+ 'Viral Exanthem', 'Infected eczema', 'Crusted eczematous dermatitis',
341
+ 'Inflammatory dermatosis', 'Vasculitis of the skin', 'Leukocytoclastic Vasculitis',
342
+ 'Cutaneous lupus', 'CD - Contact dermatitis', 'Acute dermatitis, NOS',
343
+ 'Herpes Simplex', 'Hypersensitivity', 'Impetigo', 'Pigmented purpuric eruption',
344
+ 'Pityriasis rosea', 'Tinea', 'Tinea Versicolor'
345
+ ]
346
+ },
347
+ 'induration': {
348
+ 'name': 'Induration/Papulation (Swelling/Bumps)',
349
+ 'conditions': [
350
+ 'Prurigo nodularis', 'Urticaria', 'Granuloma annulare', 'Morphea',
351
+ 'Scleroderma', 'Lichen Simplex Chronicus', 'Lichen planus', 'lichenoid eruption',
352
+ 'Lichen nitidus', 'Lichen spinulosus', 'Lichen striatus', 'Keratosis pilaris',
353
+ 'Molluscum Contagiosum', 'Verruca vulgaris', 'Folliculitis', 'Acne',
354
+ 'Hidradenitis', 'Nodular vasculitis', 'Sweet syndrome', 'Necrobiosis lipoidica',
355
+ 'Basal Cell Carcinoma', 'SCC', 'SCCIS', 'SK', 'ISK',
356
+ 'Cutaneous T Cell Lymphoma', 'Skin cancer', 'Adnexal neoplasm',
357
+ 'Insect Bite', 'Milia', 'Miliaria', 'Xanthoma', 'Psoriasis',
358
+ 'Lichen planus/lichenoid eruption'
359
+ ]
360
+ },
361
+ 'excoriation': {
362
+ 'name': 'Excoriation (Scratching Damage)',
363
+ 'conditions': [
364
+ 'Inflicted skin lesions', 'Scabies', 'Abrasion', 'Abrasion of wrist',
365
+ 'Superficial wound of body region', 'Scrape', 'Animal bite - wound',
366
+ 'Pruritic dermatitis', 'Prurigo', 'Atopic dermatitis', 'Scab'
367
+ ]
368
+ },
369
+ 'lichenification': {
370
+ 'name': 'Lichenification (Skin Thickening)',
371
+ 'conditions': [
372
+ 'Lichenified eczematous dermatitis', 'Acanthosis nigricans',
373
+ 'Hyperkeratosis of skin', 'HK - Hyperkeratosis', 'Keratoderma',
374
+ 'Ichthyosis', 'Ichthyosiform dermatosis', 'Chronic eczema',
375
+ 'Psoriasis', 'Xerosis'
376
+ ]
377
+ }
378
+ }
379
+
380
+ def probability_to_score(prob):
381
+ if prob < 0.171:
382
+ return 0
383
+ elif prob < 0.238:
384
+ return 1
385
+ elif prob < 0.421:
386
+ return 2
387
+ elif prob < 0.614:
388
+ return 3
389
+ else:
390
+ return 3
391
+
392
+ easi_results = {}
393
+ all_condition_probs = predictions['all_condition_probabilities']
394
+
395
+ for component, category_info in easi_categories.items():
396
+ category_conditions = []
397
+
398
+ for condition_name, probability in all_condition_probs.items():
399
+ if condition_name.lower() == 'eczema':
400
+ continue
401
+
402
+ if condition_name in category_info['conditions']:
403
+ category_conditions.append({
404
+ 'condition': condition_name,
405
+ 'probability': probability,
406
+ 'individual_score': probability_to_score(probability)
407
+ })
408
+
409
+ category_conditions = [c for c in category_conditions if c['individual_score'] > 0]
410
+ category_conditions.sort(key=lambda x: x['probability'], reverse=True)
411
+
412
+ component_score = sum(c['individual_score'] for c in category_conditions)
413
+ component_score = min(component_score, 3)
414
+
415
+ easi_results[component] = {
416
+ 'name': category_info['name'],
417
+ 'score': component_score,
418
+ 'contributing_conditions': category_conditions
419
+ }
420
+
421
+ total_easi = sum(result['score'] for result in easi_results.values())
422
+
423
+ return easi_results, total_easi
424
+
425
+
426
+ def get_severity_interpretation(total_easi):
427
+ if total_easi == 0:
428
+ return "No significant EASI features detected"
429
+ elif total_easi <= 3:
430
+ return "Mild EASI severity"
431
+ elif total_easi <= 6:
432
+ return "Moderate EASI severity"
433
+ elif total_easi <= 9:
434
+ return "Severe EASI severity"
435
+ else:
436
+ return "Very Severe EASI severity"
437
+
438
+
439
+ # Image processing functions
440
+ def smart_crop_to_square(image):
441
+ width, height = image.size
442
+ if width == height:
443
+ return image
444
+
445
+ size = min(width, height)
446
+ left = (width - size) // 2
447
+ top = (height - size) // 2
448
+ right = left + size
449
+ bottom = top + size
450
+
451
+ return image.crop((left, top, right, bottom))
452
+
453
+
454
+ def generate_derm_foundation_embedding(model, image):
455
+ try:
456
+ if image.mode != 'RGB':
457
+ image = image.convert('RGB')
458
+
459
+ buf = BytesIO()
460
+ image.save(buf, format='JPEG')
461
+ image_bytes = buf.getvalue()
462
+
463
+ input_tensor = tf.train.Example(features=tf.train.Features(
464
+ feature={'image/encoded': tf.train.Feature(
465
+ bytes_list=tf.train.BytesList(value=[image_bytes]))
466
+ })).SerializeToString()
467
+
468
+ infer = model.signatures["serving_default"]
469
+ output = infer(inputs=tf.constant([input_tensor]))
470
+
471
+ if 'embedding' in output:
472
+ embedding_vector = output['embedding'].numpy().flatten()
473
+ else:
474
+ key = list(output.keys())[0]
475
+ embedding_vector = output[key].numpy().flatten()
476
+
477
+ return embedding_vector
478
+ except Exception as e:
479
+ raise HTTPException(status_code=500, detail=f"Error generating embedding: {str(e)}")
480
+
481
+
482
+ # Global model instances
483
+ derm_model = None
484
+ easi_model = None
485
+ model_source = "not_loaded"
486
+
487
+
488
+ @app.on_event("startup")
489
+ async def load_models():
490
+ """Load models on startup"""
491
+ global derm_model, easi_model, model_source
492
+
493
+ # Force garbage collection before starting
494
+ import gc
495
+ gc.collect()
496
+
497
+ # Check if model exists (should be pre-downloaded in Docker or already cached)
498
+ if not os.path.exists(DERM_FOUNDATION_PATH) or not os.path.exists(os.path.join(DERM_FOUNDATION_PATH, "saved_model.pb")):
499
+ print("=" * 60)
500
+ print("Derm Foundation model not found locally.")
501
+ print("=" * 60)
502
+
503
+ # Try R2 first (fast)
504
+ print("\n[1/2] Attempting download from Cloudflare R2...")
505
+ success = download_derm_foundation_from_r2(DERM_FOUNDATION_PATH)
506
+
507
+ if success:
508
+ model_source = "cloudflare_r2"
509
+ else:
510
+ # Fallback to HuggingFace
511
+ print("\n[2/2] R2 failed, trying HuggingFace as fallback...")
512
+
513
+ if not HF_TOKEN:
514
+ print("=" * 60)
515
+ print("WARNING: HUGGINGFACE_TOKEN environment variable not set!")
516
+ print("Set it in Render Dashboard > Environment > Environment Variables")
517
+ print("Variable name: HUGGINGFACE_TOKEN")
518
+ print("Variable value: <your-hf-token>")
519
+ print("=" * 60)
520
+
521
+ success = download_derm_foundation_from_hf(DERM_FOUNDATION_PATH)
522
+ if success:
523
+ model_source = "huggingface"
524
+ else:
525
+ print("=" * 60)
526
+ print("ERROR: Failed to download model from both R2 and HuggingFace!")
527
+ print("=" * 60)
528
+ model_source = "failed"
529
+ else:
530
+ print("✓ Derm Foundation model found locally (pre-downloaded or cached)")
531
+ model_source = "local_cache"
532
+
533
+ # Load Derm Foundation model
534
+ if os.path.exists(os.path.join(DERM_FOUNDATION_PATH, "saved_model.pb")):
535
+ try:
536
+ print(f"Loading Derm-Foundation model from: {DERM_FOUNDATION_PATH}")
537
+ # Force garbage collection before loading large model
538
+ gc.collect()
539
+
540
+ derm_model = tf.saved_model.load(DERM_FOUNDATION_PATH)
541
+ print(f"✓ Derm-Foundation model loaded successfully (source: {model_source})")
542
+
543
+ # Cleanup after loading
544
+ gc.collect()
545
+ except Exception as e:
546
+ print(f"✗ Failed to load Derm Foundation model: {str(e)}")
547
+
548
+ # Load EASI model (keep this local in your repo)
549
+ model_path = './trained_model/easi_severity_model_derm_foundation_individual.pkl'
550
+ if os.path.exists(model_path):
551
+ easi_model = DermFoundationNeuralNetwork()
552
+ success = easi_model.load_model(model_path)
553
+ if success:
554
+ print(f"✓ EASI model loaded from: {model_path}")
555
+ else:
556
+ print(f"✗ Failed to load EASI model")
557
+ easi_model = None
558
+ else:
559
+ print(f"✗ EASI model not found at: {model_path}")
560
+
561
+ if derm_model is None or easi_model is None:
562
+ print("=" * 60)
563
+ print("WARNING: Some models failed to load!")
564
+ print(f"Derm Foundation: {'✓' if derm_model else '✗'}")
565
+ print(f"EASI Model: {'✓' if easi_model else '✗'}")
566
+ print("=" * 60)
567
+ else:
568
+ print("=" * 60)
569
+ print("✓ All models loaded successfully!")
570
+ print(f"Model source: {model_source}")
571
+ print("=" * 60)
572
+
573
+
574
+ # API Endpoints
575
+
576
+ @app.get("/")
577
+ async def root():
578
+ """Root endpoint"""
579
+ return {
580
+ "message": "EASI Severity Prediction API",
581
+ "version": "1.0.0",
582
+ "model_source": model_source,
583
+ "docs": "/docs",
584
+ "health": "/health",
585
+ "predict": "/predict",
586
+ "conditions": "/conditions"
587
+ }
588
+
589
+
590
+ @app.get("/health", response_model=HealthResponse)
591
+ async def health_check():
592
+ """Health check endpoint"""
593
+ return {
594
+ "status": "ok" if (derm_model is not None and easi_model is not None) else "degraded",
595
+ "models_loaded": {
596
+ "derm_foundation": derm_model is not None,
597
+ "easi_model": easi_model is not None
598
+ },
599
+ "available_conditions": len(easi_model.mlb.classes_) if easi_model else 0,
600
+ "hf_token_configured": HF_TOKEN is not None,
601
+ "model_source": model_source
602
+ }
603
+
604
+
605
+ @app.get("/conditions", response_model=Dict[str, List[str]])
606
+ async def get_conditions():
607
+ """Get list of available conditions"""
608
+ if easi_model is None:
609
+ raise HTTPException(status_code=503, detail="EASI model not loaded")
610
+
611
+ return {
612
+ "conditions": easi_model.mlb.classes_.tolist()
613
+ }
614
+
615
+
616
+ @app.post("/predict", response_model=PredictionResponse)
617
+ async def predict_easi(
618
+ file: UploadFile = File(..., description="Skin image file (JPG, JPEG, PNG)")
619
+ ):
620
+ """
621
+ Predict EASI scores from uploaded skin image.
622
+
623
+ - **file**: Image file (JPG, JPEG, PNG)
624
+ - Returns: EASI scores, component breakdown, and condition predictions
625
+ """
626
+
627
+ # Validate models loaded
628
+ if derm_model is None or easi_model is None:
629
+ raise HTTPException(
630
+ status_code=503,
631
+ detail="Models not loaded. Check server logs."
632
+ )
633
+
634
+ # Validate file type
635
+ if not file.content_type.startswith('image/'):
636
+ raise HTTPException(
637
+ status_code=400,
638
+ detail="File must be an image (JPG, JPEG, PNG)"
639
+ )
640
+
641
+ try:
642
+ # Read and process image
643
+ image_bytes = await file.read()
644
+ original_image = Image.open(BytesIO(image_bytes)).convert('RGB')
645
+ original_size = original_image.size
646
+
647
+ # Process to 448x448
648
+ cropped_img = smart_crop_to_square(original_image)
649
+ processed_img = cropped_img.resize((448, 448), Image.Resampling.LANCZOS)
650
+
651
+ # Generate embedding
652
+ embedding = generate_derm_foundation_embedding(derm_model, processed_img)
653
+
654
+ # Make prediction
655
+ predictions = easi_model.predict(embedding)
656
+
657
+ if predictions is None:
658
+ raise HTTPException(status_code=500, detail="Prediction failed")
659
+
660
+ # Calculate EASI scores
661
+ easi_results, total_easi = calculate_easi_scores(predictions)
662
+ severity = get_severity_interpretation(total_easi)
663
+
664
+ # Format predicted conditions
665
+ predicted_conditions = []
666
+ for i, condition in enumerate(predictions['dermatologist_skin_condition_on_label_name']):
667
+ prob = predictions['all_condition_probabilities'][condition]
668
+ conf = predictions['dermatologist_skin_condition_confidence'][i]
669
+ weight = predictions['weighted_skin_condition_label'][condition]
670
+
671
+ # Find EASI category
672
+ easi_category = None
673
+ easi_contribution = 0
674
+ for cat_key, cat_info in easi_results.items():
675
+ for contrib in cat_info['contributing_conditions']:
676
+ if contrib['condition'] == condition:
677
+ easi_category = cat_info['name']
678
+ easi_contribution = contrib['individual_score']
679
+ break
680
+
681
+ predicted_conditions.append(ConditionPrediction(
682
+ condition=condition,
683
+ probability=float(prob),
684
+ confidence=float(conf),
685
+ weight=float(weight),
686
+ easi_category=easi_category,
687
+ easi_contribution=easi_contribution
688
+ ))
689
+
690
+ # Summary statistics
691
+ summary_stats = {
692
+ "total_conditions": len(predicted_conditions),
693
+ "average_confidence": float(np.mean(predictions['dermatologist_skin_condition_confidence'])) if predicted_conditions else 0.0,
694
+ "average_weight": float(np.mean(list(predictions['weighted_skin_condition_label'].values()))) if predicted_conditions else 0.0,
695
+ "total_weight": float(sum(predictions['weighted_skin_condition_label'].values()))
696
+ }
697
+
698
+ # Format EASI components
699
+ easi_components_formatted = {
700
+ component: EASIComponent(
701
+ name=result['name'],
702
+ score=result['score'],
703
+ contributing_conditions=result['contributing_conditions']
704
+ )
705
+ for component, result in easi_results.items()
706
+ }
707
+
708
+ return PredictionResponse(
709
+ success=True,
710
+ total_easi_score=total_easi,
711
+ severity_interpretation=severity,
712
+ easi_components=easi_components_formatted,
713
+ predicted_conditions=predicted_conditions,
714
+ summary_statistics=summary_stats,
715
+ image_info={
716
+ "original_size": f"{original_size[0]}x{original_size[1]}",
717
+ "processed_size": "448x448",
718
+ "filename": file.filename
719
+ }
720
+ )
721
+
722
+ except HTTPException:
723
+ raise
724
+ except Exception as e:
725
+ raise HTTPException(
726
+ status_code=500,
727
+ detail=f"Error processing image: {str(e)}"
728
+ )
729
+
730
+
731
+ @app.exception_handler(HTTPException)
732
+ async def http_exception_handler(request, exc):
733
+ return JSONResponse(
734
+ status_code=exc.status_code,
735
+ content=ErrorResponse(
736
+ error=exc.detail,
737
+ detail=str(exc)
738
+ ).dict()
739
+ )
740
+
741
+
742
+ if __name__ == "__main__":
743
+ import uvicorn
744
+ uvicorn.run(app, host="0.0.0.0", port=8000)
api_diagnostics.py ADDED
@@ -0,0 +1,321 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ API Diagnostic Script
3
+ =====================
4
+ Comprehensive diagnostics for EASI Severity Prediction API
5
+ Checks dependencies, models, file paths, and numpy compatibility issues
6
+ """
7
+
8
+ import sys
9
+ import os
10
+ from pathlib import Path
11
+ import importlib.util
12
+
13
+ def print_section(title):
14
+ """Print a formatted section header"""
15
+ print("\n" + "=" * 70)
16
+ print(f" {title}")
17
+ print("=" * 70)
18
+
19
+
20
+ def check_python_environment():
21
+ """Check Python version and environment"""
22
+ print_section("Python Environment")
23
+ print(f"Python Version: {sys.version}")
24
+ print(f"Python Executable: {sys.executable}")
25
+ print(f"Platform: {sys.platform}")
26
+ print(f"Current Working Directory: {os.getcwd()}")
27
+
28
+
29
+ def check_package_versions():
30
+ """Check installed package versions"""
31
+ print_section("Package Versions")
32
+
33
+ packages = [
34
+ 'numpy',
35
+ 'tensorflow',
36
+ 'fastapi',
37
+ 'uvicorn',
38
+ 'pillow',
39
+ 'pandas',
40
+ 'sklearn',
41
+ 'pydantic',
42
+ ]
43
+
44
+ for package in packages:
45
+ try:
46
+ if package == 'pillow':
47
+ import PIL
48
+ print(f"✓ PIL (Pillow): {PIL.__version__}")
49
+ elif package == 'sklearn':
50
+ import sklearn
51
+ print(f"✓ scikit-learn: {sklearn.__version__}")
52
+ else:
53
+ module = __import__(package)
54
+ version = getattr(module, '__version__', 'Unknown')
55
+ print(f"✓ {package}: {version}")
56
+ except ImportError as e:
57
+ print(f"✗ {package}: NOT INSTALLED - {e}")
58
+ except Exception as e:
59
+ print(f"✗ {package}: ERROR - {e}")
60
+
61
+
62
+ def check_numpy_detailed():
63
+ """Detailed numpy diagnostics"""
64
+ print_section("NumPy Detailed Diagnostics")
65
+
66
+ try:
67
+ import numpy as np
68
+ print(f"✓ NumPy Version: {np.__version__}")
69
+ print(f"✓ NumPy Location: {np.__file__}")
70
+
71
+ # Check for numpy._core
72
+ try:
73
+ import numpy._core
74
+ print(f"✓ numpy._core exists: {numpy._core.__file__}")
75
+ except ImportError:
76
+ print("✗ numpy._core NOT FOUND (NumPy < 2.0)")
77
+ print(" This is the main issue! NumPy 2.0+ required for numpy._core")
78
+
79
+ # Check for numpy.core (old path)
80
+ try:
81
+ import numpy.core
82
+ print(f"✓ numpy.core exists: {numpy.core.__file__}")
83
+ except ImportError:
84
+ print("✗ numpy.core NOT FOUND")
85
+
86
+ # Check numpy configuration
87
+ print(f"\nNumPy Configuration:")
88
+ try:
89
+ np.show_config()
90
+ except:
91
+ print(" Could not show numpy config")
92
+
93
+ except ImportError as e:
94
+ print(f"✗ NumPy NOT INSTALLED: {e}")
95
+ except Exception as e:
96
+ print(f"✗ NumPy ERROR: {e}")
97
+
98
+
99
+ def check_tensorflow():
100
+ """Check TensorFlow installation and GPU support"""
101
+ print_section("TensorFlow Diagnostics")
102
+
103
+ try:
104
+ import tensorflow as tf
105
+ print(f"✓ TensorFlow Version: {tf.__version__}")
106
+ print(f"✓ TensorFlow Location: {tf.__file__}")
107
+ print(f"✓ Built with CUDA: {tf.test.is_built_with_cuda()}")
108
+ print(f"✓ GPU Available: {len(tf.config.list_physical_devices('GPU')) > 0}")
109
+
110
+ # List devices
111
+ devices = tf.config.list_physical_devices()
112
+ print(f"\nAvailable Devices:")
113
+ for device in devices:
114
+ print(f" - {device}")
115
+
116
+ except ImportError as e:
117
+ print(f"✗ TensorFlow NOT INSTALLED: {e}")
118
+ except Exception as e:
119
+ print(f"✗ TensorFlow ERROR: {e}")
120
+
121
+
122
+ def check_model_files():
123
+ """Check for required model files"""
124
+ print_section("Model Files Check")
125
+
126
+ # Check Derm Foundation model paths
127
+ print("\n1. Derm Foundation Model:")
128
+ derm_paths = [
129
+ "./derm_foundation/",
130
+ "./",
131
+ "./saved_model/",
132
+ "./model/",
133
+ "./derm-foundation/"
134
+ ]
135
+
136
+ found_derm = False
137
+ for path in derm_paths:
138
+ saved_model_pb = os.path.join(path, "saved_model.pb")
139
+ if os.path.exists(saved_model_pb):
140
+ print(f" ✓ Found: {saved_model_pb}")
141
+ print(f" Size: {os.path.getsize(saved_model_pb)} bytes")
142
+ found_derm = True
143
+
144
+ # Check for variables folder
145
+ variables_path = os.path.join(path, "variables")
146
+ if os.path.exists(variables_path):
147
+ print(f" Variables folder: {variables_path}")
148
+ var_files = os.listdir(variables_path)
149
+ print(f" Variable files: {len(var_files)}")
150
+ else:
151
+ print(f" ✗ Not found: {saved_model_pb}")
152
+
153
+ if not found_derm:
154
+ print("\n ⚠ WARNING: No Derm Foundation model found!")
155
+
156
+ # Check EASI model
157
+ print("\n2. EASI Model:")
158
+ easi_path = './trained_model/easi_severity_model_derm_foundation_individual.pkl'
159
+ if os.path.exists(easi_path):
160
+ print(f" ✓ Found: {easi_path}")
161
+ print(f" Size: {os.path.getsize(easi_path)} bytes")
162
+
163
+ # Try to peek at pickle contents
164
+ try:
165
+ import pickle
166
+ with open(easi_path, 'rb') as f:
167
+ try:
168
+ model_data = pickle.load(f)
169
+ print(f" Keys in model: {list(model_data.keys())}")
170
+ if 'keras_model_path' in model_data:
171
+ keras_path = model_data['keras_model_path']
172
+ print(f" Keras model path: {keras_path}")
173
+ if os.path.exists(keras_path):
174
+ print(f" ✓ Keras model exists: {keras_path}")
175
+ else:
176
+ print(f" ✗ Keras model NOT FOUND: {keras_path}")
177
+ except Exception as e:
178
+ print(f" ✗ Error loading pickle: {e}")
179
+ except ImportError:
180
+ print(" ✗ pickle module not available")
181
+ else:
182
+ print(f" ✗ Not found: {easi_path}")
183
+ print(f" Current directory: {os.getcwd()}")
184
+
185
+ # Check if trained_model directory exists
186
+ if os.path.exists('./trained_model/'):
187
+ print(f" trained_model/ exists. Contents:")
188
+ for item in os.listdir('./trained_model/'):
189
+ print(f" - {item}")
190
+
191
+
192
+ def check_directory_structure():
193
+ """Check directory structure"""
194
+ print_section("Directory Structure")
195
+
196
+ current_dir = os.getcwd()
197
+ print(f"Current Directory: {current_dir}\n")
198
+
199
+ # List all items in current directory
200
+ items = os.listdir('.')
201
+ print("Contents:")
202
+ for item in sorted(items):
203
+ path = os.path.join('.', item)
204
+ if os.path.isdir(path):
205
+ print(f" 📁 {item}/")
206
+ else:
207
+ size = os.path.getsize(path)
208
+ print(f" 📄 {item} ({size} bytes)")
209
+
210
+
211
+ def test_pickle_load():
212
+ """Test if pickle can load with current numpy"""
213
+ print_section("Pickle Load Test")
214
+
215
+ easi_path = './trained_model/easi_severity_model_derm_foundation_individual.pkl'
216
+
217
+ if not os.path.exists(easi_path):
218
+ print(f"✗ Model file not found: {easi_path}")
219
+ return
220
+
221
+ try:
222
+ import pickle
223
+ import numpy as np
224
+
225
+ print(f"Attempting to load: {easi_path}")
226
+ print(f"NumPy version: {np.__version__}")
227
+
228
+ with open(easi_path, 'rb') as f:
229
+ model_data = pickle.load(f)
230
+
231
+ print("✓ Successfully loaded pickle file!")
232
+ print(f"Model data keys: {list(model_data.keys())}")
233
+
234
+ except ModuleNotFoundError as e:
235
+ print(f"✗ Module not found: {e}")
236
+ print("\n DIAGNOSIS: The pickle file was saved with a newer NumPy version.")
237
+ print(" SOLUTION: Upgrade numpy to version 2.0 or higher")
238
+ print(" Command: pip install --upgrade numpy>=2.0")
239
+
240
+ except Exception as e:
241
+ print(f"✗ Error loading pickle: {e}")
242
+ print(f" Error type: {type(e).__name__}")
243
+
244
+
245
+ def check_sklearn():
246
+ """Check scikit-learn and its compatibility"""
247
+ print_section("Scikit-learn Diagnostics")
248
+
249
+ try:
250
+ import sklearn
251
+ print(f"✓ scikit-learn Version: {sklearn.__version__}")
252
+
253
+ # Check for common sklearn modules used in the model
254
+ try:
255
+ from sklearn.preprocessing import MultiLabelBinarizer, StandardScaler
256
+ print("✓ MultiLabelBinarizer available")
257
+ print("✓ StandardScaler available")
258
+ except ImportError as e:
259
+ print(f"✗ Import error: {e}")
260
+
261
+ except ImportError:
262
+ print("✗ scikit-learn NOT INSTALLED")
263
+
264
+
265
+ def provide_solutions():
266
+ """Provide solutions based on diagnostics"""
267
+ print_section("Recommended Solutions")
268
+
269
+ print("""
270
+ Based on the error "No module named 'numpy._core'", here are the solutions:
271
+
272
+ 1. UPGRADE NUMPY (Recommended):
273
+ pip install --upgrade numpy>=2.0.0
274
+
275
+ This is the cleanest solution as newer packages expect NumPy 2.0+
276
+
277
+ 2. If NumPy 2.0 causes compatibility issues, RECREATE THE PICKLE:
278
+ - Load the original model with the old NumPy version
279
+ - Save it again with protocol 4 for better compatibility
280
+ - Or rebuild the model from scratch
281
+
282
+ 3. CHECK ALL DEPENDENCIES:
283
+ pip install --upgrade tensorflow numpy pandas scikit-learn pillow fastapi uvicorn
284
+
285
+ 4. CREATE FRESH VIRTUAL ENVIRONMENT:
286
+ python -m venv fresh_env
287
+ source fresh_env/bin/activate # On Windows: fresh_env\\Scripts\\activate
288
+ pip install -r requirements.txt
289
+
290
+ 5. VERIFY PACKAGE COMPATIBILITY:
291
+ pip list --outdated
292
+ pip check
293
+
294
+ After upgrading, restart your API server.
295
+ """)
296
+
297
+
298
+ def main():
299
+ """Run all diagnostics"""
300
+ print("=" * 70)
301
+ print(" EASI API DIAGNOSTIC TOOL")
302
+ print(" Analyzing system configuration and dependencies...")
303
+ print("=" * 70)
304
+
305
+ check_python_environment()
306
+ check_package_versions()
307
+ check_numpy_detailed()
308
+ check_tensorflow()
309
+ check_sklearn()
310
+ check_directory_structure()
311
+ check_model_files()
312
+ test_pickle_load()
313
+ provide_solutions()
314
+
315
+ print("\n" + "=" * 70)
316
+ print(" Diagnostics Complete!")
317
+ print("=" * 70)
318
+
319
+
320
+ if __name__ == "__main__":
321
+ main()
download_model.py ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Download model during Docker build on Hugging Face Spaces
3
+ """
4
+ import os
5
+ import requests
6
+ import gc
7
+
8
+ R2_BASE_URL = "https://r2-worker.eczemanage.workers.dev"
9
+ OUTPUT_DIR = "./derm_foundation/"
10
+
11
+ def download_model():
12
+ print("=" * 70)
13
+ print("DOWNLOADING DERM FOUNDATION MODEL (Hugging Face Spaces Build)")
14
+ print("=" * 70)
15
+
16
+ os.makedirs(OUTPUT_DIR, exist_ok=True)
17
+
18
+ files = [
19
+ "saved_model.pb",
20
+ "variables/variables.index",
21
+ "variables/variables.data-00000-of-00001"
22
+ ]
23
+
24
+ for file_path in files:
25
+ print(f"\n📥 Downloading {file_path}...")
26
+ url = f"{R2_BASE_URL}/{file_path}"
27
+ local_path = os.path.join(OUTPUT_DIR, file_path)
28
+ os.makedirs(os.path.dirname(local_path), exist_ok=True)
29
+
30
+ try:
31
+ with requests.get(url, stream=True, timeout=1800) as r:
32
+ r.raise_for_status()
33
+ total_size = int(r.headers.get('content-length', 0))
34
+ downloaded = 0
35
+ chunk_count = 0
36
+
37
+ with open(local_path, 'wb') as f:
38
+ for chunk in r.iter_content(chunk_size=2*1024*1024): # 2MB chunks (HF has RAM)
39
+ if chunk:
40
+ f.write(chunk)
41
+ f.flush()
42
+ downloaded += len(chunk)
43
+ chunk_count += 1
44
+
45
+ if chunk_count % 5 == 0:
46
+ gc.collect()
47
+
48
+ if total_size > 0 and chunk_count % 10 == 0:
49
+ progress = (downloaded / total_size) * 100
50
+ mb_downloaded = downloaded / (1024*1024)
51
+ mb_total = total_size / (1024*1024)
52
+ print(f" Progress: {progress:.1f}% ({mb_downloaded:.1f}/{mb_total:.1f} MB)")
53
+
54
+ gc.collect()
55
+
56
+ print(f"✅ Successfully downloaded: {file_path}")
57
+
58
+ except Exception as e:
59
+ print(f"❌ Error downloading {file_path}: {e}")
60
+ raise
61
+
62
+ print("\n" + "=" * 70)
63
+ print("✅ MODEL DOWNLOAD COMPLETE! Ready to serve predictions.")
64
+ print("=" * 70)
65
+
66
+ if __name__ == "__main__":
67
+ download_model()
easi_calculator.py ADDED
@@ -0,0 +1,198 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ from typing import Dict, Tuple, List
3
+
4
+
5
+ # EASI component categorization
6
+ EASI_CATEGORIES = {
7
+ 'erythema': {
8
+ 'name': 'Erythema (Redness)',
9
+ 'conditions': [
10
+ 'Post-Inflammatory hyperpigmentation',
11
+ 'Erythema ab igne', 'Erythema annulare centrifugum',
12
+ 'Erythema elevatum diutinum', 'Erythema gyratum repens',
13
+ 'Erythema multiforme', 'Erythema nodosum',
14
+ 'Flagellate erythema', 'Annular erythema',
15
+ 'Drug Rash', 'Allergic Contact Dermatitis',
16
+ 'Irritant Contact Dermatitis', 'Contact dermatitis',
17
+ 'Acute dermatitis', 'Chronic dermatitis',
18
+ 'Acute and chronic dermatitis',
19
+ 'Sunburn', 'Photodermatitis', 'Phytophotodermatitis',
20
+ 'Rosacea', 'Seborrheic Dermatitis', 'Stasis Dermatitis',
21
+ 'Perioral Dermatitis',
22
+ 'Burn erythema of abdominal wall',
23
+ 'Burn erythema of back of hand',
24
+ 'Burn erythema of lower leg',
25
+ 'Cellulitis', 'Infection of skin', 'Viral Exanthem',
26
+ 'Infected eczema', 'Crusted eczematous dermatitis',
27
+ 'Inflammatory dermatosis',
28
+ 'Vasculitis of the skin', 'Leukocytoclastic Vasculitis',
29
+ 'Cutaneous lupus',
30
+ 'CD - Contact dermatitis',
31
+ 'Acute dermatitis, NOS',
32
+ 'Herpes Simplex',
33
+ 'Hypersensitivity',
34
+ 'Impetigo',
35
+ 'Pigmented purpuric eruption',
36
+ 'Pityriasis rosea',
37
+ 'Tinea',
38
+ 'Tinea Versicolor'
39
+ ]
40
+ },
41
+ 'induration': {
42
+ 'name': 'Induration/Papulation (Swelling/Bumps)',
43
+ 'conditions': [
44
+ 'Prurigo nodularis', 'Urticaria', 'Granuloma annulare', 'Morphea',
45
+ 'Scleroderma', 'Lichen Simplex Chronicus',
46
+ 'Lichen planus', 'lichenoid eruption',
47
+ 'Lichen nitidus', 'Lichen spinulosus', 'Lichen striatus',
48
+ 'Keratosis pilaris', 'Molluscum Contagiosum',
49
+ 'Verruca vulgaris', 'Folliculitis', 'Acne',
50
+ 'Hidradenitis', 'Nodular vasculitis', 'Sweet syndrome',
51
+ 'Necrobiosis lipoidica', 'Basal Cell Carcinoma',
52
+ 'SCC', 'SCCIS', 'SK', 'ISK',
53
+ 'Cutaneous T Cell Lymphoma', 'Skin cancer',
54
+ 'Adnexal neoplasm', 'Insect Bite', 'Milia',
55
+ 'Miliaria', 'Xanthoma', 'Psoriasis',
56
+ 'Lichen planus/lichenoid eruption'
57
+ ]
58
+ },
59
+ 'excoriation': {
60
+ 'name': 'Excoriation (Scratching Damage)',
61
+ 'conditions': [
62
+ 'Inflicted skin lesions',
63
+ 'Scabies', 'Abrasion', 'Abrasion of wrist',
64
+ 'Superficial wound of body region', 'Scrape',
65
+ 'Animal bite - wound', 'Pruritic dermatitis',
66
+ 'Prurigo', 'Atopic dermatitis',
67
+ 'Scab'
68
+ ]
69
+ },
70
+ 'lichenification': {
71
+ 'name': 'Lichenification (Skin Thickening)',
72
+ 'conditions': [
73
+ 'Lichenified eczematous dermatitis',
74
+ 'Acanthosis nigricans', 'Hyperkeratosis of skin',
75
+ 'HK - Hyperkeratosis', 'Keratoderma',
76
+ 'Ichthyosis', 'Ichthyosiform dermatosis',
77
+ 'Chronic eczema', 'Psoriasis',
78
+ 'Xerosis'
79
+ ]
80
+ }
81
+ }
82
+
83
+
84
+ def probability_to_score(prob: float) -> int:
85
+ """Convert probability to EASI score (0-3)"""
86
+ if prob < 0.171:
87
+ return 0
88
+ elif prob < 0.238:
89
+ return 1
90
+ elif prob < 0.421:
91
+ return 2
92
+ elif prob < 0.614:
93
+ return 3
94
+ else:
95
+ return 3
96
+
97
+
98
+ def calculate_easi_scores(predictions: Dict) -> Tuple[Dict, int]:
99
+ """
100
+ Calculate EASI component scores based on condition probabilities
101
+
102
+ Args:
103
+ predictions: Dictionary containing prediction results
104
+
105
+ Returns:
106
+ Tuple of (easi_results dict, total_easi_score int)
107
+ """
108
+ easi_results = {}
109
+ all_condition_probs = predictions['all_condition_probabilities']
110
+
111
+ for component, category_info in EASI_CATEGORIES.items():
112
+ # Find all conditions in this category
113
+ category_conditions = []
114
+
115
+ for condition_name, probability in all_condition_probs.items():
116
+ # Skip "Eczema" as it should not be included
117
+ if condition_name.lower() == 'eczema':
118
+ continue
119
+
120
+ # Check if condition is in category
121
+ if condition_name in category_info['conditions']:
122
+ individual_score = probability_to_score(probability)
123
+ if individual_score > 0:
124
+ category_conditions.append({
125
+ 'condition': condition_name,
126
+ 'probability': probability,
127
+ 'individual_score': individual_score
128
+ })
129
+
130
+ # Sort by probability
131
+ category_conditions.sort(key=lambda x: x['probability'], reverse=True)
132
+
133
+ # Calculate component score (sum, capped at 3)
134
+ component_score = sum(c['individual_score'] for c in category_conditions)
135
+ component_score = min(component_score, 3)
136
+
137
+ easi_results[component] = {
138
+ 'name': category_info['name'],
139
+ 'score': component_score,
140
+ 'contributing_conditions': category_conditions
141
+ }
142
+
143
+ # Calculate total EASI score
144
+ total_easi = sum(result['score'] for result in easi_results.values())
145
+
146
+ return easi_results, total_easi
147
+
148
+
149
+ def format_easi_response(easi_results: Dict, total_easi: int) -> Dict:
150
+ """
151
+ Format EASI results for API response
152
+
153
+ Args:
154
+ easi_results: EASI calculation results
155
+ total_easi: Total EASI score
156
+
157
+ Returns:
158
+ Formatted dictionary for JSON response
159
+ """
160
+ return {
161
+ 'total_score': total_easi,
162
+ 'components': {
163
+ 'erythema': easi_results['erythema']['score'],
164
+ 'induration': easi_results['induration']['score'],
165
+ 'excoriation': easi_results['excoriation']['score'],
166
+ 'lichenification': easi_results['lichenification']['score']
167
+ },
168
+ 'severity': get_severity_level(total_easi),
169
+ 'component_details': {
170
+ component: {
171
+ 'name': data['name'],
172
+ 'score': data['score'],
173
+ 'contributing_conditions': [
174
+ {
175
+ 'condition': c['condition'],
176
+ 'probability': round(c['probability'], 4),
177
+ 'contribution': c['individual_score']
178
+ }
179
+ for c in data['contributing_conditions']
180
+ ]
181
+ }
182
+ for component, data in easi_results.items()
183
+ }
184
+ }
185
+
186
+
187
+ def get_severity_level(total_easi: int) -> str:
188
+ """Get severity level description from EASI score"""
189
+ if total_easi == 0:
190
+ return "No significant EASI features detected"
191
+ elif total_easi <= 3:
192
+ return "Mild EASI severity"
193
+ elif total_easi <= 6:
194
+ return "Moderate EASI severity"
195
+ elif total_easi <= 9:
196
+ return "Severe EASI severity"
197
+ else:
198
+ return "Very Severe EASI severity"
image_processing.py ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from PIL import Image
2
+ import tensorflow as tf
3
+ import numpy as np
4
+ from io import BytesIO
5
+ import logging
6
+
7
+ logger = logging.getLogger(__name__)
8
+
9
+
10
+ def smart_crop_to_square(image: Image.Image) -> Image.Image:
11
+ """Crop image to square focusing on center"""
12
+ width, height = image.size
13
+
14
+ if width == height:
15
+ return image
16
+
17
+ # Crop to square using center
18
+ size = min(width, height)
19
+ left = (width - size) // 2
20
+ top = (height - size) // 2
21
+ right = left + size
22
+ bottom = top + size
23
+
24
+ return image.crop((left, top, right, bottom))
25
+
26
+
27
+ def preprocess_image(image: Image.Image, target_size: int = 448) -> Image.Image:
28
+ """
29
+ Preprocess image for model input
30
+
31
+ Args:
32
+ image: PIL Image
33
+ target_size: Target size for square image (default 448)
34
+
35
+ Returns:
36
+ Preprocessed PIL Image
37
+ """
38
+ try:
39
+ # Ensure RGB
40
+ if image.mode != 'RGB':
41
+ image = image.convert('RGB')
42
+
43
+ # Smart crop to square
44
+ image = smart_crop_to_square(image)
45
+
46
+ # Resize to target size
47
+ image = image.resize((target_size, target_size), Image.Resampling.LANCZOS)
48
+
49
+ logger.info(f"Image preprocessed to {target_size}x{target_size}")
50
+ return image
51
+
52
+ except Exception as e:
53
+ logger.error(f"Error preprocessing image: {e}")
54
+ raise
55
+
56
+
57
+ def generate_embedding(model, image: Image.Image) -> np.ndarray:
58
+ """
59
+ Generate embedding from Derm Foundation model
60
+
61
+ Args:
62
+ model: Loaded Derm Foundation model
63
+ image: Preprocessed PIL Image (448x448)
64
+
65
+ Returns:
66
+ Embedding vector as numpy array
67
+ """
68
+ try:
69
+ # Save image to bytes
70
+ buf = BytesIO()
71
+ image.save(buf, format='JPEG')
72
+ image_bytes = buf.getvalue()
73
+
74
+ # Format input as TensorFlow Example
75
+ input_tensor = tf.train.Example(features=tf.train.Features(
76
+ feature={'image/encoded': tf.train.Feature(
77
+ bytes_list=tf.train.BytesList(value=[image_bytes]))
78
+ })).SerializeToString()
79
+
80
+ # Call inference
81
+ infer = model.get_inference_function()
82
+ output = infer(inputs=tf.constant([input_tensor]))
83
+
84
+ # Extract embedding
85
+ if 'embedding' in output:
86
+ embedding_vector = output['embedding'].numpy().flatten()
87
+ else:
88
+ # Use first available output
89
+ key = list(output.keys())[0]
90
+ embedding_vector = output[key].numpy().flatten()
91
+
92
+ logger.info(f"Generated embedding of shape {embedding_vector.shape}")
93
+ return embedding_vector
94
+
95
+ except Exception as e:
96
+ logger.error(f"Error generating embedding: {e}")
97
+ raise
98
+
99
+
100
+ def validate_image(file_content: bytes, max_size_mb: int = 10) -> bool:
101
+ """
102
+ Validate uploaded image
103
+
104
+ Args:
105
+ file_content: Raw image bytes
106
+ max_size_mb: Maximum allowed file size in MB
107
+
108
+ Returns:
109
+ True if valid, False otherwise
110
+ """
111
+ # Check file size
112
+ size_mb = len(file_content) / (1024 * 1024)
113
+ if size_mb > max_size_mb:
114
+ logger.warning(f"Image too large: {size_mb:.2f}MB > {max_size_mb}MB")
115
+ return False
116
+
117
+ # Try to open as image
118
+ try:
119
+ image = Image.open(BytesIO(file_content))
120
+ # Check if it's a valid image format
121
+ image.verify()
122
+ return True
123
+ except Exception as e:
124
+ logger.warning(f"Invalid image: {e}")
125
+ return False
model_loader.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import pickle
3
+ import tensorflow as tf
4
+ import logging
5
+ from typing import Optional
6
+
7
+ logging.basicConfig(level=logging.INFO)
8
+ logger = logging.getLogger(__name__)
9
+
10
+
11
+ class DermFoundationModel:
12
+ """Wrapper for Google's Derm Foundation model"""
13
+
14
+ def __init__(self, model_path: str):
15
+ self.model_path = model_path
16
+ self.model = None
17
+
18
+ def load(self) -> bool:
19
+ """Load the Derm Foundation SavedModel"""
20
+ try:
21
+ saved_model_pb = os.path.join(self.model_path, "saved_model.pb")
22
+
23
+ if not os.path.exists(saved_model_pb):
24
+ logger.error(f"Model file not found at {saved_model_pb}")
25
+ return False
26
+
27
+ self.model = tf.saved_model.load(self.model_path)
28
+ logger.info(f"Derm Foundation model loaded from {self.model_path}")
29
+ return True
30
+
31
+ except Exception as e:
32
+ logger.error(f"Error loading Derm Foundation model: {e}")
33
+ return False
34
+
35
+ def get_inference_function(self):
36
+ """Get the model's inference signature"""
37
+ if self.model is None:
38
+ raise RuntimeError("Model not loaded")
39
+ return self.model.signatures["serving_default"]
40
+
41
+
42
+ class EASIModel:
43
+ """Wrapper for EASI severity prediction model"""
44
+
45
+ def __init__(self, model_path: str):
46
+ self.model_path = model_path
47
+ self.model = None
48
+ self.mlb = None
49
+ self.embedding_scaler = None
50
+ self.confidence_scaler = None
51
+ self.weighted_scaler = None
52
+
53
+ def load(self) -> bool:
54
+ """Load the EASI model and preprocessors"""
55
+ try:
56
+ if not os.path.exists(self.model_path):
57
+ logger.error(f"EASI model not found at {self.model_path}")
58
+ return False
59
+
60
+ logger.info(f"Loading pickle from {self.model_path}")
61
+ with open(self.model_path, 'rb') as f:
62
+ model_data = pickle.load(f)
63
+
64
+ logger.info("Pickle loaded successfully")
65
+
66
+ # Load preprocessing components
67
+ self.mlb = model_data['mlb']
68
+ self.embedding_scaler = model_data['embedding_scaler']
69
+ self.confidence_scaler = model_data['confidence_scaler']
70
+ self.weighted_scaler = model_data['weighted_scaler']
71
+
72
+ logger.info("Preprocessors loaded")
73
+
74
+ # Load Keras model
75
+ keras_model_path = model_data['keras_model_path']
76
+ logger.info(f"Keras model path: {keras_model_path}")
77
+
78
+ if not os.path.exists(keras_model_path):
79
+ logger.error(f"Keras model not found at {keras_model_path}")
80
+ logger.error(f"Current working directory: {os.getcwd()}")
81
+ logger.error(f"Files in models/trained_model/: {os.listdir('./models/trained_model/')}")
82
+ return False
83
+
84
+ logger.info(f"Loading Keras model from {keras_model_path}")
85
+ self.model = tf.keras.models.load_model(keras_model_path)
86
+
87
+ logger.info(f"EASI model loaded successfully from {self.model_path}")
88
+ return True
89
+
90
+ except Exception as e:
91
+ logger.error(f"Error loading EASI model: {e}", exc_info=True)
92
+ return False
93
+
94
+ def predict(self, embedding):
95
+ """Make predictions on a single embedding"""
96
+ if self.model is None:
97
+ raise RuntimeError("Model not loaded")
98
+
99
+ import numpy as np
100
+
101
+ if len(embedding.shape) == 1:
102
+ embedding = embedding.reshape(1, -1)
103
+
104
+ # Scale embedding
105
+ embedding_scaled = self.embedding_scaler.transform(embedding)
106
+
107
+ # Make predictions
108
+ predictions = self.model.predict(embedding_scaled, verbose=0)
109
+
110
+ # Process outputs
111
+ condition_probs = predictions['conditions'][0]
112
+ individual_confidences = predictions['individual_confidences'][0]
113
+ individual_weights = predictions['individual_weights'][0]
114
+
115
+ # Threshold for predictions
116
+ condition_threshold = 0.3
117
+ predicted_indices = np.where(condition_probs > condition_threshold)[0]
118
+
119
+ # Build results
120
+ predicted_conditions = []
121
+ predicted_confidences = []
122
+ predicted_weights_dict = {}
123
+
124
+ for idx in predicted_indices:
125
+ condition_name = self.mlb.classes_[idx]
126
+ condition_prob = float(condition_probs[idx])
127
+
128
+ # Inverse transform individual outputs
129
+ if individual_confidences[idx] > 0:
130
+ confidence_orig = self.confidence_scaler.inverse_transform(
131
+ [[individual_confidences[idx]]]
132
+ )[0, 0]
133
+ else:
134
+ confidence_orig = 0.0
135
+
136
+ if individual_weights[idx] > 0:
137
+ weight_orig = self.weighted_scaler.inverse_transform(
138
+ [[individual_weights[idx]]]
139
+ )[0, 0]
140
+ else:
141
+ weight_orig = 0.0
142
+
143
+ predicted_conditions.append(condition_name)
144
+ predicted_confidences.append(max(0, confidence_orig))
145
+ predicted_weights_dict[condition_name] = max(0, weight_orig)
146
+
147
+ # All condition probabilities
148
+ all_condition_probs = {}
149
+ all_confidences = {}
150
+ all_weights = {}
151
+
152
+ for i, class_name in enumerate(self.mlb.classes_):
153
+ all_condition_probs[class_name] = float(condition_probs[i])
154
+
155
+ if individual_confidences[i] > 0:
156
+ conf_orig = self.confidence_scaler.inverse_transform(
157
+ [[individual_confidences[i]]]
158
+ )[0, 0]
159
+ all_confidences[class_name] = max(0, conf_orig)
160
+ else:
161
+ all_confidences[class_name] = 0.0
162
+
163
+ if individual_weights[i] > 0:
164
+ weight_orig = self.weighted_scaler.inverse_transform(
165
+ [[individual_weights[i]]]
166
+ )[0, 0]
167
+ all_weights[class_name] = max(0, weight_orig)
168
+ else:
169
+ all_weights[class_name] = 0.0
170
+
171
+ return {
172
+ 'dermatologist_skin_condition_on_label_name': predicted_conditions,
173
+ 'dermatologist_skin_condition_confidence': predicted_confidences,
174
+ 'weighted_skin_condition_label': predicted_weights_dict,
175
+ 'all_condition_probabilities': all_condition_probs,
176
+ 'all_individual_confidences': all_confidences,
177
+ 'all_individual_weights': all_weights,
178
+ 'condition_threshold': condition_threshold
179
+ }
180
+
181
+
182
+ class ModelManager:
183
+ """Singleton manager for all models"""
184
+
185
+ _instance = None
186
+
187
+ def __new__(cls):
188
+ if cls._instance is None:
189
+ cls._instance = super(ModelManager, cls).__new__(cls)
190
+ cls._instance.derm_model = None
191
+ cls._instance.easi_model = None
192
+ return cls._instance
193
+
194
+ def load_models(self, derm_path: str, easi_path: str) -> bool:
195
+ """Load both models"""
196
+ logger.info("Loading models...")
197
+
198
+ # Load Derm Foundation model
199
+ self.derm_model = DermFoundationModel(derm_path)
200
+ if not self.derm_model.load():
201
+ return False
202
+
203
+ # Load EASI model
204
+ self.easi_model = EASIModel(easi_path)
205
+ if not self.easi_model.load():
206
+ return False
207
+
208
+ logger.info("All models loaded successfully")
209
+ return True
210
+
211
+ def is_ready(self) -> bool:
212
+ """Check if both models are loaded"""
213
+ return (self.derm_model is not None and
214
+ self.derm_model.model is not None and
215
+ self.easi_model is not None and
216
+ self.easi_model.model is not None)
render.yaml ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ services:
2
+ - type: web
3
+ name: easi-prediction-api
4
+ env: python
5
+ region: oregon
6
+ plan: free
7
+ branch: main
8
+ buildCommand: pip install -r requirements.txt
9
+ startCommand: uvicorn api:app --host 0.0.0.0 --port $PORT
10
+ healthCheckPath: /health
11
+ envVars:
12
+ - key: PYTHON_VERSION
13
+ value: 3.11.0
14
+ - key: TF_CPP_MIN_LOG_LEVEL
15
+ value: 2
16
+ - key: TF_ENABLE_ONEDNN_OPTS
17
+ value: 0
requirements.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ fastapi==0.104.1
2
+ uvicorn[standard]==0.24.0
3
+ python-multipart==0.0.6
4
+ pillow==10.1.0
5
+ tensorflow>=2.13.0
6
+ numpy==1.26.4
7
+ scikit-learn==1.3.2
8
+ pandas==2.1.3
9
+ python-dotenv==1.0.0
10
+ huggingface-hub==0.35.3
11
+ requests==2.31.0