SmartMate / app /services /openai_service.py
ngwakomadikwe's picture
Create app/services/openai_service.py
de4e354 verified
"""
OpenAI service for handling chat completions
"""
import os
import logging
from typing import Dict, Any, Optional
from openai import OpenAI
logger = logging.getLogger(__name__)
class OpenAIService:
"""Service class for OpenAI API interactions"""
def __init__(self, api_key: str):
"""Initialize OpenAI client"""
self.client = OpenAI(api_key=api_key)
self.default_model = "gpt-3.5-turbo"
self.default_temperature = 0.7
self.max_tokens = 1000
def chat_completion(
self,
message: str,
model: Optional[str] = None,
temperature: Optional[float] = None,
max_tokens: Optional[int] = None,
system_message: Optional[str] = None
) -> Dict[str, Any]:
"""
Generate chat completion using OpenAI API
Args:
message: User message
model: OpenAI model to use
temperature: Response randomness (0-2)
max_tokens: Maximum tokens in response
system_message: Optional system message
Returns:
Dict containing response and metadata
Raises:
Exception: If API call fails
"""
try:
# Prepare messages
messages = []
if system_message:
messages.append({"role": "system", "content": system_message})
messages.append({"role": "user", "content": message})
# Use provided parameters or defaults
model = model or self.default_model
temperature = temperature if temperature is not None else self.default_temperature
max_tokens = max_tokens or self.max_tokens
logger.info(f"Making OpenAI API call with model: {model}")
# Make API call with new OpenAI client
response = self.client.chat.completions.create(
model=model,
messages=messages,
temperature=temperature,
max_tokens=max_tokens
)
# Extract response data
reply = response.choices[0].message.content
result = {
"reply": reply,
"model": model,
"usage": {
"prompt_tokens": response.usage.prompt_tokens,
"completion_tokens": response.usage.completion_tokens,
"total_tokens": response.usage.total_tokens
}
}
logger.info(f"OpenAI API call successful. Tokens used: {result['usage']['total_tokens']}")
return result
except Exception as e:
logger.error(f"OpenAI API call failed: {str(e)}")
raise Exception(f"Failed to generate response: {str(e)}")
def validate_message(self, message: str) -> tuple[bool, str]:
"""
Validate user message
Args:
message: User input message
Returns:
Tuple of (is_valid, error_message)
"""
if not message or not message.strip():
return False, "Message cannot be empty"
if len(message) > 4000: # Reasonable limit
return False, "Message too long (max 4000 characters)"
return True, ""