edgellm / backend /models.py
wu981526092's picture
add
6a50e97
raw
history blame
990 Bytes
"""
Pydantic models for API requests and responses
"""
from pydantic import BaseModel
from typing import Optional, List
class ChatMessage(BaseModel):
role: str # 'user', 'assistant', 'system'
content: str
class PromptRequest(BaseModel):
prompt: str
messages: Optional[List[ChatMessage]] = [] # Full conversation history
system_prompt: Optional[str] = None
model_name: Optional[str] = None
temperature: Optional[float] = 0.7
max_new_tokens: Optional[int] = 1024
class PromptResponse(BaseModel):
thinking_content: str
content: str
model_used: str
supports_thinking: bool
class ModelInfo(BaseModel):
model_name: str
name: str
supports_thinking: bool
description: str
size_gb: str
is_loaded: bool
type: str
class ModelsResponse(BaseModel):
models: List[ModelInfo]
current_model: str
class ModelLoadRequest(BaseModel):
model_name: str
class ModelUnloadRequest(BaseModel):
model_name: str