import { useState, useEffect } from 'react' import { useNavigate } from 'react-router-dom' import { Card, CardHeader, CardTitle, CardContent } from '@/components/ui/card' import { Button } from '@/components/ui/button' import { Badge } from '@/components/ui/badge' import { BookOpen, Brain, Zap, Download, Trash2, Loader2, Info, CheckCircle, Cloud, HardDrive } from 'lucide-react' interface ModelInfo { model_name: string name: string supports_thinking: boolean ram_required_gb: string size_gb: string is_loaded: boolean type: 'local' | 'api' } interface ModelsResponse { models: ModelInfo[] current_model: string } export function Models() { const navigate = useNavigate() const [models, setModels] = useState([]) const [loading, setLoading] = useState(true) const [modelLoading, setModelLoading] = useState(null) useEffect(() => { fetchModels() }, []) const fetchModels = async () => { try { const baseUrl = `${window.location.protocol}//${window.location.host}` const res = await fetch(`${baseUrl}/models`) if (!res.ok) { throw new Error(`Failed to fetch models: ${res.status}`) } const data: ModelsResponse = await res.json() setModels(data.models) } catch (error) { console.error('Error fetching models:', error) } finally { setLoading(false) } } const loadModel = async (modelName: string) => { setModelLoading(modelName) try { const baseUrl = `${window.location.protocol}//${window.location.host}` const res = await fetch(`${baseUrl}/load-model`, { method: 'POST', headers: { 'Content-Type': 'application/json', }, body: JSON.stringify({ model_name: modelName }), }) if (!res.ok) { throw new Error(`Failed to load model: ${res.status}`) } // Refresh models list fetchModels() } catch (error) { console.error('Error loading model:', error) } finally { setModelLoading(null) } } const unloadModel = async (modelName: string) => { setModelLoading(modelName) try { const baseUrl = `${window.location.protocol}//${window.location.host}` const res = await fetch(`${baseUrl}/unload-model`, { method: 'POST', headers: { 'Content-Type': 'application/json', }, body: JSON.stringify({ model_name: modelName }), }) if (!res.ok) { throw new Error(`Failed to unload model: ${res.status}`) } // Refresh models list fetchModels() } catch (error) { console.error('Error unloading model:', error) } finally { setModelLoading(null) } } if (loading) { return (
) } return (
{/* Header */}

Model Catalog

Browse and manage AI models for your conversations

{/* Info Card */}

Model Management

Load models to use them in the playground. Models are cached locally for faster access. Each model requires significant storage space and initial download time.

{/* API Models Section */}

API Models Cloud-Powered

{models.filter(m => m.type === 'api').map((model) => ( ))}
{/* Local Models Section */}

Local Models Self-Hosted

{models.filter(m => m.type === 'local').map((model) => ( ))}
) } // ModelCard component for reusability interface ModelCardProps { model: ModelInfo modelLoading: string | null onLoad: (modelName: string) => void onUnload: (modelName: string) => void } function ModelCard({ model, modelLoading, onLoad, onUnload }: ModelCardProps) { const isApiModel = model.type === 'api' const isLoading = modelLoading === model.model_name const isLoaded = model.is_loaded return (
{isApiModel ? ( ) : ( )} {model.name}

{model.model_name}

{isLoaded && }
{/* Model Info */}
{!isApiModel && ( <>
Size: {model.size_gb}
RAM Required: {model.ram_required_gb}
)}
Type: {isApiModel ? 'API' : 'Local'}
{model.supports_thinking && (
Features: Thinking
)}
{/* Action Button */}
{isLoaded ? ( ) : ( )}
) }