Upload app.py
Browse files
app.py
CHANGED
|
@@ -223,9 +223,24 @@ def load_model():
|
|
| 223 |
except Exception as e:
|
| 224 |
logger.error(f"Error loading model: {e}")
|
| 225 |
return f"Error loading model: {e}"
|
| 226 |
-
|
| 227 |
-
|
| 228 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 229 |
|
| 230 |
|
| 231 |
# ===================================================================
|
|
@@ -375,8 +390,7 @@ def classify_solution(question: str, solution: str):
|
|
| 375 |
|
| 376 |
|
| 377 |
|
| 378 |
-
|
| 379 |
-
load_model()
|
| 380 |
|
| 381 |
# Create Gradio interface
|
| 382 |
with gr.Blocks(title="Math Solution Classifier", theme=gr.themes.Soft()) as app:
|
|
|
|
| 223 |
except Exception as e:
|
| 224 |
logger.error(f"Error loading model: {e}")
|
| 225 |
return f"Error loading model: {e}"
|
| 226 |
+
|
| 227 |
+
def models_ready() -> bool:
|
| 228 |
+
ready = all(x is not None for x in [
|
| 229 |
+
gemma_model, gemma_tokenizer, classifier_model, classifier_tokenizer
|
| 230 |
+
])
|
| 231 |
+
if not ready:
|
| 232 |
+
logger.warning(
|
| 233 |
+
"models_ready=False gemma_model=%s gemma_tok=%s phi_model=%s phi_tok=%s",
|
| 234 |
+
type(gemma_model).__name__ if gemma_model is not None else None,
|
| 235 |
+
type(gemma_tokenizer).__name__ if gemma_tokenizer is not None else None,
|
| 236 |
+
type(classifier_model).__name__ if classifier_model is not None else None,
|
| 237 |
+
type(classifier_tokenizer).__name__ if classifier_tokenizer is not None else None,
|
| 238 |
+
)
|
| 239 |
+
return ready
|
| 240 |
+
|
| 241 |
+
# Load model on startup
|
| 242 |
+
msg = load_model()
|
| 243 |
+
logger.info("load_model(): %s", msg)
|
| 244 |
|
| 245 |
|
| 246 |
# ===================================================================
|
|
|
|
| 390 |
|
| 391 |
|
| 392 |
|
| 393 |
+
|
|
|
|
| 394 |
|
| 395 |
# Create Gradio interface
|
| 396 |
with gr.Blocks(title="Math Solution Classifier", theme=gr.themes.Soft()) as app:
|