Spaces:
Sleeping
Sleeping
update
Browse files- Dockerfile +1 -2
- app.py +4 -3
Dockerfile
CHANGED
|
@@ -24,5 +24,4 @@ COPY --chown=user ./out /app/out
|
|
| 24 |
COPY --chown=user . /app
|
| 25 |
|
| 26 |
RUN ls -R /app/out
|
| 27 |
-
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
|
| 28 |
-
|
|
|
|
| 24 |
COPY --chown=user . /app
|
| 25 |
|
| 26 |
RUN ls -R /app/out
|
| 27 |
+
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
|
|
|
app.py
CHANGED
|
@@ -22,8 +22,6 @@ class PredictionInput(BaseModel):
|
|
| 22 |
text: str = Field(..., title="Text to classify", description="The text to classify for malicious content")
|
| 23 |
context: Optional[str] = Field(None, title="Context for classification", description="Optional context to provide additional information for classification")
|
| 24 |
|
| 25 |
-
app.mount("/", StaticFiles(directory="/app/out", html=True), name="static")
|
| 26 |
-
|
| 27 |
@app.post("/predict")
|
| 28 |
def predict(input: PredictionInput):
|
| 29 |
try:
|
|
@@ -35,9 +33,12 @@ def predict(input: PredictionInput):
|
|
| 35 |
raise HTTPException(status_code=400, detail="Context input exceeds maximum length of 512 characters")
|
| 36 |
if not input.context:
|
| 37 |
result = model.predict(input.text, device="cpu")
|
|
|
|
| 38 |
return {"text": input.text, "prediction": result[0]["prediction"], "probabilities": result[0]["probabilities"]}
|
| 39 |
else:
|
| 40 |
result = model.predict([[input.text,input.context]], device="cpu")
|
| 41 |
return {"text": input.text, "context": input.context, "prediction": result[0]["prediction"], "probabilities": result[0]["probabilities"]}
|
| 42 |
except Exception as e:
|
| 43 |
-
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
|
|
|
| 22 |
text: str = Field(..., title="Text to classify", description="The text to classify for malicious content")
|
| 23 |
context: Optional[str] = Field(None, title="Context for classification", description="Optional context to provide additional information for classification")
|
| 24 |
|
|
|
|
|
|
|
| 25 |
@app.post("/predict")
|
| 26 |
def predict(input: PredictionInput):
|
| 27 |
try:
|
|
|
|
| 33 |
raise HTTPException(status_code=400, detail="Context input exceeds maximum length of 512 characters")
|
| 34 |
if not input.context:
|
| 35 |
result = model.predict(input.text, device="cpu")
|
| 36 |
+
print(result)
|
| 37 |
return {"text": input.text, "prediction": result[0]["prediction"], "probabilities": result[0]["probabilities"]}
|
| 38 |
else:
|
| 39 |
result = model.predict([[input.text,input.context]], device="cpu")
|
| 40 |
return {"text": input.text, "context": input.context, "prediction": result[0]["prediction"], "probabilities": result[0]["probabilities"]}
|
| 41 |
except Exception as e:
|
| 42 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 43 |
+
|
| 44 |
+
app.mount("/", StaticFiles(directory="out", html=True), name="static")
|