diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000000000000000000000000000000000..c4ccd7e77305c6c174a57cbca2173632f788aa82 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,55 @@ +# Environment files with sensitive data +.env +.env.* +!.env.example + +# Git +.git +.gitignore + +# Documentation +README.md +*.md + +# Docker files +Dockerfile* +docker-compose*.yml +.dockerignore + +# Development files +.vscode/ +.idea/ + +# OS generated files +.DS_Store +.DS_Store? +._* +.Spotlight-V100 +.Trashes +ehthumbs.db +Thumbs.db + +# Logs +*.log +logs/ + +# Runtime data that shouldn't be in image +server/data/ +web/.next/ + +# Dependencies that are installed in container +node_modules/ +__pycache__/ +*.pyc +*.pyo +*.pyd +.Python + +# Testing +coverage/ +.pytest_cache/ +.coverage + +# Temporary files +tmp/ +temp/ \ No newline at end of file diff --git a/.env.example b/.env.example new file mode 100644 index 0000000000000000000000000000000000000000..8f16141cf7bced354a9a409fc7403cc7e9eb6647 --- /dev/null +++ b/.env.example @@ -0,0 +1,29 @@ +# API Configuration +API_BASE_URL=https://api.friendli.ai/dedicated/v1 +API_KEY=your_api_key_here + +# Composio Configuration +COMPOSIO_API_KEY=your_composio_api_key_here + +# Model Configuration +INTERACTION_AGENT_MODEL=your_interaction_model_here +EXECUTION_AGENT_MODEL=your_execution_model_here +EXECUTION_SEARCH_AGENT_MODEL=your_search_model_here +SUMMARIZER_MODEL=your_summarizer_model_here +EMAIL_CLASSIFIER_MODEL=your_classifier_model_here + +# Application Configuration +OPENPOKE_HOST=0.0.0.0 +OPENPOKE_PORT=8001 +OPENPOKE_CORS_ALLOW_ORIGINS=* +OPENPOKE_ENABLE_DOCS=1 + +# Web Application Configuration +NEXT_PUBLIC_API_URL=http://localhost:8001 + +# Instructions: +# 1. Copy this file to .env: cp .env.example .env +# 2. Replace all placeholder values with your actual credentials +# 3. Never commit the .env file to version control +# 4. Add .env to your .gitignore file +# 5. Use docker-compose --env-file .env up for production deployments \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..1de20e1cd2e0b78cdb813a9c9b9e7d4165fd2065 --- /dev/null +++ b/.gitignore @@ -0,0 +1,46 @@ +# Node / Next.js +node_modules/ +.next/ +web/node_modules/ +web/.next/ + +# Logs +npm-debug.log* +yarn-debug.log* +yarn-error.log* +*.log +.server.log +.server.pid + +# Envs +.env +.env.local +.env.*.local + +# OS +.DS_Store + +# Python +.venv/ +__pycache__/ +*.pyc + +# Database files +*.db +*.db-shm +*.db-wal + +# Data folder +server/data/ +data/ + +# Python virtual environment +server/venv/ + +# Build metadata +*.tsbuildinfo +/package-lock.json + +# Generated documentation / analysis artifacts +server/repomix-output.txt +server/plans/ diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..5ceadf3c3273bbeed90243d0d92196377284fce1 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,41 @@ +# Use Python 3.11 slim image for smaller size +FROM python:3.11-slim + +# Create a non-root user for security +RUN groupadd -r appuser && useradd -r -g appuser appuser + +# Set working directory +WORKDIR /app + +# Install system dependencies with security considerations +RUN apt-get update && apt-get install -y \ + gcc \ + && rm -rf /var/lib/apt/lists/* \ + && apt-get clean + +# Copy requirements first for better Docker layer caching +COPY server/requirements.txt . + +# Install Python dependencies +RUN pip install --no-cache-dir --upgrade pip && \ + pip install --no-cache-dir -r requirements.txt + +# Copy server code +COPY server/ ./server/ + +# Create necessary directories and set permissions +RUN mkdir -p /app/logs && \ + chown -R appuser:appuser /app + +# Switch to non-root user +USER appuser + +# Expose port +EXPOSE 8001 + +# Add health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD curl -f http://localhost:8001/health || exit 1 + +# Start the server with proper configuration for graceful shutdown +CMD ["uvicorn", "server.server:app", "--host", "0.0.0.0", "--port", "8001", "--access-log"] \ No newline at end of file diff --git a/Dockerfile.web b/Dockerfile.web new file mode 100644 index 0000000000000000000000000000000000000000..1ab3901418bc50c2c993c2b1cf4018964915b06a --- /dev/null +++ b/Dockerfile.web @@ -0,0 +1,38 @@ +# Use Node.js 18 LTS Alpine for smaller size +FROM node:18-alpine + +# Create a non-root user for security +RUN addgroup -g 1001 -S nodejs && \ + adduser -S nextjs -u 1001 + +# Set working directory +WORKDIR /app + +# Copy package files first for better layer caching +COPY web/package*.json ./ + +# Install dependencies (use npm ci for faster, reliable builds) +RUN npm ci --only=production && npm cache clean --force + +# Copy web code +COPY web/ . + +# Build the application +RUN npm run build + +# Create necessary directories and set permissions +RUN mkdir -p /app/.next/cache && \ + chown -R nextjs:nodejs /app + +# Switch to non-root user +USER nextjs + +# Expose port +EXPOSE 3000 + +# Add health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD wget --no-verbose --tries=1 --spider http://localhost:3000 || exit 1 + +# Start the application with proper configuration +CMD ["npm", "start"] \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..217e3dd2382df7ce2590b596cb9ea03ac8114581 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2025 OpenPoke Contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index d17cb86865dec3fd7c93edf637ba0eba357ff45a..c4b7f6ade08466dc857ce80ac5810b1593f48feb 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,122 @@ ---- -title: Guilherme34 Openpokespace -emoji: πŸ“ˆ -colorFrom: gray -colorTo: pink -sdk: docker -pinned: false ---- - -Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference +# OpenPoke 🌴 + +test +OpenPoke is a simplified, open-source take on [Interaction Company’s](https://interaction.co/about) [Poke](https://poke.com/) assistantβ€”built to show how a multi-agent orchestration stack can feel genuinely useful. It keeps the handful of things Poke is great at (email triage, reminders, and persistent agents) while staying easy to spin up locally. + +- Multi-agent FastAPI backend that mirrors Poke's interaction/execution split, powered by OpenAI-compatible APIs. +- Gmail tooling via [Composio](https://composio.dev/) for drafting/replying/forwarding without leaving chat. +- Trigger scheduler and background watchers for reminders and "important email" alerts. +- Next.js web UI that proxies everything through the shared `.env`, so plugging in API keys is the only setup. + +## Requirements +- Python 3.10+ +- Node.js 18+ +- npm 9+ + +## Quickstart +1. **Clone and enter the repo.** + ```bash + git clone https://github.com/shlokkhemani/OpenPoke + cd OpenPoke + ``` +2. **Create a shared env file.** Copy the template and open it in your editor: + ```bash + cp .env.example .env + ``` +3. **Get your API keys and add them to `.env`:** + + **API Configuration (Required)** + - Configure your OpenAI-compatible API endpoint and API key in `.env` + - Set `API_BASE_URL` to your API endpoint (e.g., `https://api.friendli.ai/dedicated/v1`) + - Set `API_KEY` to your API key + - All agent models can be configured via environment variables + + **Composio (Required for Gmail)** + - Sign in at [composio.dev](https://composio.dev/) + - Create an API key + - Set up Gmail integration and get your auth config ID + - Replace `your_composio_api_key_here` and `your_gmail_auth_config_id_here` in `.env` + +## πŸš€ Quick Start (Docker - Recommended) + +If you have Docker and docker-compose installed, you can get started immediately: + +```bash +# Deploy with one command (includes security setup) +./deploy.sh + +# Or manually with environment variables +docker-compose --env-file .env up --build -d +``` + +This will start both the API server (port 8001) and web UI (port 3000). + +### Production Deployment + +For production deployments: + +```bash +# Use production environment file +docker-compose --env-file .env.production up --build -d + +# Or use specific environment file +docker-compose --env-file .env.staging up --build -d +``` + +### Docker Features + +- **Security**: Non-root containers with proper user isolation +- **Health Checks**: Built-in monitoring for service availability +- **Resource Limits**: CPU and memory constraints for stable performance +- **Logging**: Structured JSON logging with rotation +- **Networks**: Isolated network for service communication +- **Volumes**: Persistent storage for logs and runtime data + +## πŸ› οΈ Manual Setup (Alternative) + +4. **(Required) Create and activate a Python 3.10+ virtualenv:** + ```bash + # Ensure you're using Python 3.10+ + python3.10 -m venv .venv + source .venv/bin/activate + + # Verify Python version (should show 3.10+) + python --version + ``` + On Windows (PowerShell): + ```powershell + # Use Python 3.10+ (adjust path as needed) + python3.10 -m venv .venv + .\.venv\Scripts\Activate.ps1 + + # Verify Python version + python --version + ``` + +5. **Install backend dependencies:** + ```bash + pip install -r server/requirements.txt + ``` +6. **Install frontend dependencies:** + ```bash + npm install --prefix web + ``` +7. **Start the FastAPI server:** + ```bash + python -m server.server --reload + ``` +8. **Start the Next.js app (new terminal):** + ```bash + npm run dev --prefix web + ``` +9. **Connect Gmail for email workflows.** With both services running, open [http://localhost:3000](http://localhost:3000), head to *Settings β†’ Gmail*, and complete the Composio OAuth flow. This step is required for email drafting, replies, and the important-email monitor. + +The web app proxies API calls to the Python server using the values in `.env`, so keeping both processes running is required for end-to-end flows. + +## Project Layout +- `server/` – FastAPI application and agents +- `web/` – Next.js app +- `server/data/` – runtime data (ignored by git) + +## License +MIT β€” see [LICENSE](LICENSE). diff --git a/deploy.sh b/deploy.sh new file mode 100644 index 0000000000000000000000000000000000000000..4a7e99bc0c74309a53501b4c0445063b41c2cc7d --- /dev/null +++ b/deploy.sh @@ -0,0 +1,69 @@ +#!/bin/bash + +# OpenPoke Deployment Script +echo "πŸš€ Deploying OpenPoke with FriendliAI integration..." + +# Check if .env file exists +if [ ! -f ".env" ]; then + echo "⚠️ Warning: .env file not found!" + echo "πŸ“ Please copy .env.example to .env and configure your API keys:" + echo " cp .env.example .env" + echo " # Then edit .env with your actual credentials" + echo "" + echo "πŸ”„ Continuing with default configuration (you may need to configure API keys manually)..." +fi + +# Check if Docker is installed +if ! command -v docker &> /dev/null; then + echo "❌ Docker is not installed. Please install Docker first." + exit 1 +fi + +# Check if docker-compose is installed +if command -v docker-compose &> /dev/null; then + COMPOSE_CMD="docker-compose" +elif docker compose version &> /dev/null; then + COMPOSE_CMD="docker compose" +else + echo "❌ docker-compose is not installed. Please install docker-compose (or enable the Docker Compose plugin) first." + exit 1 +fi + +# Stop any existing containers +echo "πŸ›‘ Stopping existing containers..." +${COMPOSE_CMD} down + +# Build and start the services +echo "πŸ”¨ Building and starting services..." +if [ -f ".env" ]; then + ${COMPOSE_CMD} --env-file .env up --build -d +else + ${COMPOSE_CMD} up --build -d +fi + +# Wait for services to be ready +echo "⏳ Waiting for services to start..." +sleep 15 + +# Check if services are running +if ${COMPOSE_CMD} ps | grep -q "Up"; then + echo "βœ… Deployment successful!" + echo "" + echo "🌐 Services are running:" + echo " - Server API: http://localhost:8001" + echo " - Web UI: http://localhost:3000" + echo "" + echo "πŸ“– Check the logs with: ${COMPOSE_CMD} logs -f" + echo "πŸ” View service status: ${COMPOSE_CMD} ps" + echo "πŸ›‘ Stop with: ${COMPOSE_CMD} down" + echo "" + echo "πŸ’‘ Tip: If you encounter API key issues, edit your .env file with correct credentials" +else + echo "❌ Deployment failed. Check the logs with: ${COMPOSE_CMD} logs" + echo "" + echo "πŸ”§ Troubleshooting tips:" + echo " 1. Check if your .env file has valid API keys" + echo " 2. Verify Docker has enough resources" + echo " 3. Check firewall settings" + exit 1 +fi \ No newline at end of file diff --git a/docker-compose.production.yml b/docker-compose.production.yml new file mode 100644 index 0000000000000000000000000000000000000000..404da83364151b76bb8d6c29768bc066528347e4 --- /dev/null +++ b/docker-compose.production.yml @@ -0,0 +1,79 @@ +version: '3.8' + +# Production overrides for docker-compose.yml +# Usage: docker-compose -f docker-compose.yml -f docker-compose.production.yml up + +services: + server: + # Production-specific configurations + environment: + - OPENPOKE_CORS_ALLOW_ORIGINS=${PRODUCTION_CORS_ORIGINS:-https://yourdomain.com} + - OPENPOKE_ENABLE_DOCS=0 # Disable docs in production + deploy: + resources: + limits: + cpus: '1.0' + memory: 1G + reservations: + cpus: '0.5' + memory: 512M + restart_policy: + condition: on-failure + delay: 5s + max_attempts: 3 + logging: + driver: "json-file" + options: + max-size: "50m" + max-file: "5" + + web: + # Production-specific configurations + deploy: + resources: + limits: + cpus: '0.5' + memory: 512M + reservations: + cpus: '0.25' + memory: 256M + restart_policy: + condition: on-failure + delay: 5s + max_attempts: 3 + logging: + driver: "json-file" + options: + max-size: "50m" + max-file: "5" + +# Optional: Add SSL termination with Traefik + # traefik: + # image: traefik:v2.10 + # command: + # - "--api.dashboard=true" + # - "--providers.docker=true" + # - "--providers.docker.exposedbydefault=false" + # - "--entrypoints.web.address=:80" + # - "--entrypoints.websecure.address=:443" + # - "--certificatesresolvers.letsencrypt.acme.httpchallenge=true" + # - "--certificatesresolvers.letsencrypt.acme.httpchallenge.entrypoint=web" + # - "--certificatesresolvers.letsencrypt.acme.email=your-email@example.com" + # - "--certificatesresolvers.letsencrypt.acme.storage=/letsencrypt/acme.json" + # ports: + # - "80:80" + # - "443:443" + # - "8080:8080" # Traefik dashboard + # volumes: + # - /var/run/docker.sock:/var/run/docker.sock:ro + # - letsencrypt:/letsencrypt + # networks: + # - app-network + # labels: + # - "traefik.enable=true" + # - "traefik.http.routers.api.rule=Host(`traefik.yourdomain.com`)" + # - "traefik.http.routers.api.service=api@internal" + +# volumes: +# letsencrypt: +# driver: local \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000000000000000000000000000000000000..51f68ce830ea039adfac18b195a51529d2457015 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,93 @@ +version: '3.8' + +# Define networks for better service isolation +networks: + app-network: + driver: bridge + +# Define volumes for persistent data +volumes: + logs: + driver: local + +services: + server: + build: + context: . + dockerfile: Dockerfile + ports: + - "8001:8001" + environment: + - OPENPOKE_HOST=0.0.0.0 + - OPENPOKE_PORT=8001 + - OPENPOKE_CORS_ALLOW_ORIGINS=* + - OPENPOKE_ENABLE_DOCS=${OPENPOKE_ENABLE_DOCS:-1} + # Sensitive environment variables should be loaded from .env file + - API_BASE_URL=${API_BASE_URL} + - API_KEY=${API_KEY} + - COMPOSIO_API_KEY=${COMPOSIO_API_KEY} + - INTERACTION_AGENT_MODEL=${INTERACTION_AGENT_MODEL} + - EXECUTION_AGENT_MODEL=${EXECUTION_AGENT_MODEL} + - EXECUTION_SEARCH_AGENT_MODEL=${EXECUTION_SEARCH_AGENT_MODEL} + - SUMMARIZER_MODEL=${SUMMARIZER_MODEL} + - EMAIL_CLASSIFIER_MODEL=${EMAIL_CLASSIFIER_MODEL} + restart: unless-stopped + networks: + - app-network + volumes: + - logs:/app/logs + - ./server:/app/server:ro + deploy: + resources: + limits: + cpus: '0.50' + memory: 512M + reservations: + cpus: '0.25' + memory: 256M + logging: + driver: "json-file" + options: + max-size: "10m" + max-file: "3" + + web: + build: + context: . + dockerfile: Dockerfile.web + ports: + - "3000:3000" + environment: + - NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL:-http://localhost:8001} + depends_on: + - server + restart: unless-stopped + networks: + - app-network + volumes: + - ./web:/app:ro + deploy: + resources: + limits: + cpus: '0.30' + memory: 256M + reservations: + cpus: '0.15' + memory: 128M + logging: + driver: "json-file" + options: + max-size: "10m" + max-file: "3" + + # Optional: Add a reverse proxy (nginx) for production + # nginx: + # image: nginx:alpine + # ports: + # - "80:80" + # - "443:443" + # volumes: + # - ./nginx.conf:/etc/nginx/nginx.conf + # depends_on: + # - web + # restart: unless-stopped \ No newline at end of file diff --git a/next-env.d.ts b/next-env.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..40c3d68096c270ef976f3db4e9eb42b05c7067bb --- /dev/null +++ b/next-env.d.ts @@ -0,0 +1,5 @@ +/// +/// + +// NOTE: This file should not be edited +// see https://nextjs.org/docs/app/building-your-application/configuring/typescript for more information. diff --git a/server/__init__.py b/server/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8a72bbd135f404c263bd2e930f594bb150c238e5 --- /dev/null +++ b/server/__init__.py @@ -0,0 +1,3 @@ +"""OpenPoke Python server package.""" + +from .app import app diff --git a/server/agents/__init__.py b/server/agents/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f5a3576127f4786d743e95aee96cdc4ae34c46c8 --- /dev/null +++ b/server/agents/__init__.py @@ -0,0 +1,8 @@ +"""Agent assets package. + +Contains agent-specific prompts and tool registries that can be wired into +OpenRouter/OpenAI chat completion requests. +""" + +__all__ = ["interaction_agent", "execution_agent"] + diff --git a/server/agents/execution_agent/__init__.py b/server/agents/execution_agent/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..affa4ee25539814ee57d7615c741ea94d3ac5d4f --- /dev/null +++ b/server/agents/execution_agent/__init__.py @@ -0,0 +1,16 @@ +"""Execution agent assets.""" + +from .agent import ExecutionAgent +from .batch_manager import ExecutionBatchManager, ExecutionResult, PendingExecution +from .runtime import ExecutionAgentRuntime +from .tools import get_tool_schemas as get_execution_tool_schemas, get_tool_registry as get_execution_tool_registry + +__all__ = [ + "ExecutionBatchManager", + "ExecutionAgent", + "ExecutionAgentRuntime", + "ExecutionResult", + "PendingExecution", + "get_execution_tool_schemas", + "get_execution_tool_registry", +] diff --git a/server/agents/execution_agent/agent.py b/server/agents/execution_agent/agent.py new file mode 100644 index 0000000000000000000000000000000000000000..bd7387c4c4cf0010f385add6b6603f6e5af4fb02 --- /dev/null +++ b/server/agents/execution_agent/agent.py @@ -0,0 +1,123 @@ +"""Execution Agent implementation.""" + +from pathlib import Path +from typing import List, Optional, Dict, Any + +from ...services.execution import get_execution_agent_logs +from ...logging_config import logger + + +# Load system prompt template from file +_prompt_path = Path(__file__).parent / "system_prompt.md" +if _prompt_path.exists(): + SYSTEM_PROMPT_TEMPLATE = _prompt_path.read_text(encoding="utf-8").strip() +else: + # Placeholder template - you'll replace this with actual instructions + SYSTEM_PROMPT_TEMPLATE = """You are an execution agent responsible for completing specific tasks using available tools. + +Agent Name: {agent_name} +Purpose: {agent_purpose} + +Instructions: +[TO BE FILLED IN BY USER] + +You have access to Gmail tools to help complete your tasks. When given instructions: +1. Analyze what needs to be done +2. Use the appropriate tools to complete the task +3. Provide clear status updates on your actions + +Be thorough, accurate, and efficient in your execution.""" + + +class ExecutionAgent: + """Manages state and history for an execution agent.""" + + # Initialize execution agent with name, conversation limits, and log store access + def __init__( + self, + name: str, + conversation_limit: Optional[int] = None + ): + """ + Initialize an execution agent. + + Args: + name: Human-readable agent name (e.g., 'conversation with keith') + conversation_limit: Optional limit on past conversations to include (None = all) + """ + self.name = name + self.conversation_limit = conversation_limit + self._log_store = get_execution_agent_logs() + + # Generate system prompt template with agent name and purpose derived from name + def build_system_prompt(self) -> str: + """Build the system prompt for this agent.""" + agent_purpose = f"Handle tasks related to: {self.name}" + + return SYSTEM_PROMPT_TEMPLATE.format( + agent_name=self.name, + agent_purpose=agent_purpose + ) + + # Combine base system prompt with conversation history, applying conversation limits + def build_system_prompt_with_history(self) -> str: + """ + Build system prompt including agent history. + + Returns: + System prompt with embedded history transcript + """ + base_prompt = self.build_system_prompt() + + # Load history transcript + transcript = self._log_store.load_transcript(self.name) + + if transcript: + # Apply conversation limit if needed + if self.conversation_limit and self.conversation_limit > 0: + # Parse entries and limit them + lines = transcript.split('\n') + request_count = sum(1 for line in lines if ' self.conversation_limit: + # Find where to cut + kept_requests = 0 + cutoff_index = len(lines) + for i in range(len(lines) - 1, -1, -1): + if ' List[Dict[str, str]]: + """ + Build message array for LLM call. + + Args: + current_instruction: Current instruction from interaction agent + + Returns: + List of messages in OpenRouter format + """ + return [ + {"role": "user", "content": current_instruction} + ] + + # Log the agent's final response to the execution log store + def record_response(self, response: str) -> None: + """Record agent's response to the log.""" + self._log_store.record_agent_response(self.name, response) + + # Log tool invocation and results with truncated content for readability + def record_tool_execution(self, tool_name: str, arguments: str, result: str) -> None: + """Record tool execution details.""" + self._log_store.record_action(self.name, f"Calling {tool_name} with: {arguments[:200]}") + # Record the tool response + self._log_store.record_tool_response(self.name, tool_name, result[:500]) diff --git a/server/agents/execution_agent/batch_manager.py b/server/agents/execution_agent/batch_manager.py new file mode 100644 index 0000000000000000000000000000000000000000..45f2e5eb9f27c8a71632a6f0feabce03a40104ab --- /dev/null +++ b/server/agents/execution_agent/batch_manager.py @@ -0,0 +1,193 @@ +"""Coordinate execution agents and batch their results for the interaction agent.""" + +from __future__ import annotations + +import asyncio +import uuid +from dataclasses import dataclass, field +from datetime import datetime +from typing import Dict, List, Optional + +from .runtime import ExecutionAgentRuntime, ExecutionResult +from ...logging_config import logger + + +@dataclass +class PendingExecution: + """Track a pending execution request.""" + + request_id: str + agent_name: str + instructions: str + batch_id: str + created_at: datetime = field(default_factory=datetime.now) + + +@dataclass +class _BatchState: + """Collect results for a single interaction-agent turn.""" + + batch_id: str + created_at: datetime = field(default_factory=datetime.now) + pending: int = 0 + results: List[ExecutionResult] = field(default_factory=list) + + +class ExecutionBatchManager: + """Run execution agents and deliver their combined outcome.""" + + # Initialize batch manager with timeout and coordination state for execution agents + def __init__(self, timeout_seconds: int = 90) -> None: + self.timeout_seconds = timeout_seconds + self._pending: Dict[str, PendingExecution] = {} + self._batch_lock = asyncio.Lock() + self._batch_state: Optional[_BatchState] = None + + # Run execution agent with timeout handling and batch coordination for interaction agent + async def execute_agent( + self, + agent_name: str, + instructions: str, + request_id: Optional[str] = None, + ) -> ExecutionResult: + """Execute an agent asynchronously and buffer the result for batch dispatch.""" + + if not request_id: + request_id = str(uuid.uuid4()) + + batch_id = await self._register_pending_execution(agent_name, instructions, request_id) + + try: + logger.info(f"[{agent_name}] Execution started") + runtime = ExecutionAgentRuntime(agent_name=agent_name) + result = await asyncio.wait_for( + runtime.execute(instructions), + timeout=self.timeout_seconds, + ) + status = "SUCCESS" if result.success else "FAILED" + logger.info(f"[{agent_name}] Execution finished: {status}") + except asyncio.TimeoutError: + logger.error(f"[{agent_name}] Execution timed out after {self.timeout_seconds}s") + result = ExecutionResult( + agent_name=agent_name, + success=False, + response=f"Execution timed out after {self.timeout_seconds} seconds", + error="Timeout", + ) + except Exception as exc: # pragma: no cover - defensive + logger.exception(f"[{agent_name}] Execution failed unexpectedly") + result = ExecutionResult( + agent_name=agent_name, + success=False, + response=f"Execution failed: {exc}", + error=str(exc), + ) + finally: + self._pending.pop(request_id, None) + + await self._complete_execution(batch_id, result, agent_name) + return result + + # Add execution request to current batch or create new batch if none exists + async def _register_pending_execution( + self, + agent_name: str, + instructions: str, + request_id: str, + ) -> str: + """Attach a new execution to the active batch, opening one when required.""" + + async with self._batch_lock: + if self._batch_state is None: + batch_id = str(uuid.uuid4()) + self._batch_state = _BatchState(batch_id=batch_id) + else: + batch_id = self._batch_state.batch_id + + self._batch_state.pending += 1 + self._pending[request_id] = PendingExecution( + request_id=request_id, + agent_name=agent_name, + instructions=instructions, + batch_id=batch_id, + ) + + return batch_id + + # Store execution result and send combined batch to interaction agent when complete + async def _complete_execution( + self, + batch_id: str, + result: ExecutionResult, + agent_name: str, + ) -> None: + """Record the execution result and dispatch when the batch drains.""" + + dispatch_payload: Optional[str] = None + + async with self._batch_lock: + state = self._batch_state + if state is None or state.batch_id != batch_id: + logger.warning(f"[{agent_name}] Dropping result for unknown batch") + return + + state.results.append(result) + state.pending -= 1 + + if state.pending == 0: + dispatch_payload = self._format_batch_payload(state.results) + agent_names = [entry.agent_name for entry in state.results] + logger.info(f"Execution batch completed: {', '.join(agent_names)}") + self._batch_state = None + + if dispatch_payload: + await self._dispatch_to_interaction_agent(dispatch_payload) + + # Return list of currently pending execution requests for monitoring purposes + def get_pending_executions(self) -> List[Dict[str, str]]: + """Expose pending executions for observability.""" + + return [ + { + "request_id": pending.request_id, + "agent_name": pending.agent_name, + "batch_id": pending.batch_id, + "created_at": pending.created_at.isoformat(), + "elapsed_seconds": (datetime.now() - pending.created_at).total_seconds(), + } + for pending in self._pending.values() + ] + + # Clean up all pending executions and batch state on shutdown + async def shutdown(self) -> None: + """Clear pending bookkeeping (no background work remains).""" + + self._pending.clear() + async with self._batch_lock: + self._batch_state = None + + # Format multiple execution results into single message for interaction agent + def _format_batch_payload(self, results: List[ExecutionResult]) -> str: + """Render execution results into the interaction-agent format.""" + + entries: List[str] = [] + for result in results: + status = "SUCCESS" if result.success else "FAILED" + response_text = (result.response or "(no response provided)").strip() + entries.append(f"[{status}] {result.agent_name}: {response_text}") + return "\n".join(entries) + + # Forward combined execution results to interaction agent for user response generation + async def _dispatch_to_interaction_agent(self, payload: str) -> None: + """Send the aggregated execution summary to the interaction agent.""" + + from ..interaction_agent.runtime import InteractionAgentRuntime + + runtime = InteractionAgentRuntime() + try: + loop = asyncio.get_running_loop() + except RuntimeError: + asyncio.run(runtime.handle_agent_message(payload)) + return + + loop.create_task(runtime.handle_agent_message(payload)) diff --git a/server/agents/execution_agent/runtime.py b/server/agents/execution_agent/runtime.py new file mode 100644 index 0000000000000000000000000000000000000000..a3978d43f26b24914b17a1ab525cc168733572ac --- /dev/null +++ b/server/agents/execution_agent/runtime.py @@ -0,0 +1,236 @@ +"""Simplified Execution Agent Runtime.""" + +import inspect +import json +from typing import Dict, Any, List, Optional, Tuple +from dataclasses import dataclass + +from .agent import ExecutionAgent +from .tools import get_tool_schemas, get_tool_registry +from ...config import get_settings +from ...openrouter_client import request_chat_completion +from ...logging_config import logger + + +@dataclass +class ExecutionResult: + """Result from an execution agent.""" + agent_name: str + success: bool + response: str + error: Optional[str] = None + tools_executed: List[str] = None + + +class ExecutionAgentRuntime: + """Manages the execution of a single agent request.""" + + MAX_TOOL_ITERATIONS = 8 + + # Initialize execution agent runtime with settings, tools, and agent instance + def __init__(self, agent_name: str): + settings = get_settings() + self.agent = ExecutionAgent(agent_name) + self.api_key = settings.api_key + self.model = settings.execution_agent_model + self.tool_registry = get_tool_registry(agent_name=agent_name) + self.tool_schemas = get_tool_schemas() + + if not self.api_key: + raise ValueError("API key not configured. Set API_KEY environment variable.") + + # Main execution loop for running agent with LLM calls and tool execution + async def execute(self, instructions: str) -> ExecutionResult: + """Execute the agent with given instructions.""" + try: + # Build system prompt with history + system_prompt = self.agent.build_system_prompt_with_history() + + # Start conversation with the instruction + messages = [{"role": "user", "content": instructions}] + tools_executed: List[str] = [] + final_response: Optional[str] = None + + for iteration in range(self.MAX_TOOL_ITERATIONS): + logger.info( + f"[{self.agent.name}] Requesting plan (iteration {iteration + 1})" + ) + response = await self._make_llm_call(system_prompt, messages, with_tools=True) + assistant_message = response.get("choices", [{}])[0].get("message", {}) + + if not assistant_message: + raise RuntimeError("LLM response did not include an assistant message") + + raw_tool_calls = assistant_message.get("tool_calls", []) or [] + parsed_tool_calls = self._extract_tool_calls(raw_tool_calls) + + assistant_entry: Dict[str, Any] = { + "role": "assistant", + "content": assistant_message.get("content", "") or "", + } + if raw_tool_calls: + assistant_entry["tool_calls"] = raw_tool_calls + messages.append(assistant_entry) + + if not parsed_tool_calls: + final_response = assistant_entry["content"] or "No action required." + break + + for tool_call in parsed_tool_calls: + tool_name = tool_call.get("name", "") + tool_args = tool_call.get("arguments", {}) + call_id = tool_call.get("id") + + if not tool_name: + logger.warning("Tool call missing name: %s", tool_call) + failure = {"error": "Tool call missing name; unable to execute."} + tool_message = { + "role": "tool", + "tool_call_id": call_id or "unknown_tool", + "content": self._format_tool_result( + tool_name or "", False, failure, tool_args + ), + } + messages.append(tool_message) + continue + + tools_executed.append(tool_name) + logger.info(f"[{self.agent.name}] Executing tool: {tool_name}") + + success, result = await self._execute_tool(tool_name, tool_args) + + if success: + logger.info(f"[{self.agent.name}] Tool {tool_name} completed successfully") + record_payload = self._safe_json_dump(result) + else: + error_detail = result.get("error") if isinstance(result, dict) else str(result) + logger.warning(f"[{self.agent.name}] Tool {tool_name} failed: {error_detail}") + record_payload = error_detail + + self.agent.record_tool_execution( + tool_name, + self._safe_json_dump(tool_args), + record_payload + ) + + tool_message = { + "role": "tool", + "tool_call_id": call_id or tool_name, + "content": self._format_tool_result(tool_name, success, result, tool_args), + } + messages.append(tool_message) + + else: + raise RuntimeError("Reached tool iteration limit without final response") + + if final_response is None: + raise RuntimeError("LLM did not return a final response") + + self.agent.record_response(final_response) + + return ExecutionResult( + agent_name=self.agent.name, + success=True, + response=final_response, + tools_executed=tools_executed + ) + + except Exception as e: + logger.error(f"[{self.agent.name}] Execution failed: {e}") + error_msg = str(e) + failure_text = f"Failed to complete task: {error_msg}" + self.agent.record_response(f"Error: {error_msg}") + + return ExecutionResult( + agent_name=self.agent.name, + success=False, + response=failure_text, + error=error_msg + ) + + # Execute API call with system prompt, messages, and optional tool schemas + async def _make_llm_call(self, system_prompt: str, messages: List[Dict], with_tools: bool) -> Dict: + """Make an LLM call.""" + tools_to_send = self.tool_schemas if with_tools else None + logger.info(f"[{self.agent.name}] Calling LLM with model: {self.model}, tools: {len(tools_to_send) if tools_to_send else 0}") + return await request_chat_completion( + model=self.model, + messages=messages, + system=system_prompt, + api_key=self.api_key, + tools=tools_to_send + ) + + # Parse and validate tool calls from LLM response into structured format + def _extract_tool_calls(self, raw_tools: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + """Extract tool calls from an assistant message.""" + tool_calls: List[Dict[str, Any]] = [] + + for tool in raw_tools: + function = tool.get("function", {}) + name = function.get("name", "") + args = function.get("arguments", "") + + if isinstance(args, str): + try: + args = json.loads(args) if args else {} + except json.JSONDecodeError: + args = {} + + if name: + tool_calls.append({ + "id": tool.get("id"), + "name": name, + "arguments": args, + }) + + return tool_calls + + # Safely convert objects to JSON with fallback to string representation + def _safe_json_dump(self, payload: Any) -> str: + """Serialize payload to JSON, falling back to string representation.""" + try: + return json.dumps(payload, default=str) + except TypeError: + return str(payload) + + # Format tool execution results into JSON structure for LLM consumption + def _format_tool_result( + self, + tool_name: str, + success: bool, + result: Any, + arguments: Dict[str, Any], + ) -> str: + """Build a structured string for tool responses.""" + if success: + payload: Dict[str, Any] = { + "tool": tool_name, + "status": "success", + "arguments": arguments, + "result": result, + } + else: + error_detail = result.get("error") if isinstance(result, dict) else str(result) + payload = { + "tool": tool_name, + "status": "error", + "arguments": arguments, + "error": error_detail, + } + return self._safe_json_dump(payload) + + # Execute tool function from registry with error handling and async support + async def _execute_tool(self, tool_name: str, arguments: Dict) -> Tuple[bool, Any]: + """Execute a tool. Returns (success, result).""" + tool_func = self.tool_registry.get(tool_name) + if not tool_func: + return False, {"error": f"Unknown tool: {tool_name}"} + + try: + result = tool_func(**arguments) + if inspect.isawaitable(result): + result = await result + return True, result + except Exception as e: + return False, {"error": str(e)} diff --git a/server/agents/execution_agent/system_prompt.md b/server/agents/execution_agent/system_prompt.md new file mode 100644 index 0000000000000000000000000000000000000000..c82976097886f664bdfcf6f23eff3ce725c90c1c --- /dev/null +++ b/server/agents/execution_agent/system_prompt.md @@ -0,0 +1,51 @@ +You are the assistant of Poke by the Interaction Company of California. You are the "execution engine" of Poke, helping complete tasks for Poke, while Poke talks to the user. Your job is to execute and accomplish a goal, and you do not have direct access to the user. + +IMPORTANT: Don't ever execute a draft unless you receive explicit confirmation to execute it. If you are instructed to send an email, first JUST create the draft. Then, when the user confirms draft, we can send it. + + +Your final output is directed to Poke, which handles user conversations and presents your results to the user. Focus on providing Poke with adequate contextual information; you are not responsible for framing responses in a user-friendly way. + +If it needs more data from Poke or the user, you should also include it in your final output message. If you ever need to send a message to the user, you should tell Poke to forward that message to the user. + +Remember that your last output message (summary) will be forwarded to Poke. In that message, provide all relevant information and avoid preamble or postamble (e.g., "Here's what I found:" or "Let me know if this looks good to send"). If you create a draft, you need to send the exact to, subject, and body of the draft to the interaction agent verbatim. + +This conversation history may have gaps. It may start from the middle of a conversation, or it may be missing messages. The only assumption you can make is that Poke's latest message is the most recent one, and representative of Poke's current requests. Address that message directly. The other messages are just for context. + +Before you call any tools, reason through why you are calling them by explaining the thought process. If it could possibly be helpful to call more than one tool at once, then do so. + +If you have context that would help the execution of a tool call (e.g. the user is searching for emails from a person and you know that person's email address), pass that context along. + +When searching for personal information about the user, it's probably smart to look through their emails. + + + + +Agent Name: {agent_name} +Purpose: {agent_purpose} + +# Instructions +[TO BE FILLED IN BY USER - Add your specific instructions here] + +# Available Tools +You have access to the following Gmail tools: +- gmail_create_draft: Create an email draft +- gmail_execute_draft: Send a previously created draft +- gmail_forward_email: Forward an existing email +- gmail_reply_to_thread: Reply to an email thread + +You also manage reminder triggers for this agent: +- createTrigger: Store a reminder by providing the payload to run later. Supply an ISO 8601 `start_time` and an iCalendar `RRULE` when recurrence is needed. +- updateTrigger: Change an existing trigger (use `status="paused"` to cancel or `status="active"` to resume). +- listTriggers: Inspect all triggers assigned to this agent. + +# Guidelines +1. Analyze the instructions carefully before taking action +2. Use the appropriate tools to complete the task +3. Be thorough and accurate in your execution +4. Provide clear, concise responses about what you accomplished +5. If you encounter errors, explain what went wrong and what you tried +6. When creating or updating triggers, convert natural-language schedules into explicit `RRULE` strings and precise `start_time` timestamps yourselfβ€”do not rely on the trigger service to infer intent without them. +7. All times will be interpreted using the user's automatically detected timezone. +8. After creating or updating a trigger, consider calling `listTriggers` to confirm the schedule when clarity would help future runs. + +When you receive instructions, think step-by-step about what needs to be done, then execute the necessary tools to complete the task. diff --git a/server/agents/execution_agent/tasks/__init__.py b/server/agents/execution_agent/tasks/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..994ed0d19dc77230767d69823ab001a1e502a380 --- /dev/null +++ b/server/agents/execution_agent/tasks/__init__.py @@ -0,0 +1,30 @@ +"""Task registry for execution agents.""" + +from __future__ import annotations + +from typing import Any, Callable, Dict, List + +from .search_email.schemas import get_schemas as _get_email_search_schemas +from .search_email.tool import build_registry as _build_email_search_registry + + +# Return tool schemas contributed by task modules +def get_task_schemas() -> List[Dict[str, Any]]: + """Return tool schemas contributed by task modules.""" + + return [*_get_email_search_schemas()] + + +# Return executable task tools keyed by name +def get_task_registry(agent_name: str) -> Dict[str, Callable[..., Any]]: + """Return executable task tools keyed by name.""" + + registry: Dict[str, Callable[..., Any]] = {} + registry.update(_build_email_search_registry(agent_name)) + return registry + + +__all__ = [ + "get_task_registry", + "get_task_schemas", +] diff --git a/server/agents/execution_agent/tasks/search_email/__init__.py b/server/agents/execution_agent/tasks/search_email/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3f4658ad2fc95778ac45316e6af1f1b1850ee53e --- /dev/null +++ b/server/agents/execution_agent/tasks/search_email/__init__.py @@ -0,0 +1,15 @@ +"""Email search task package.""" + +from .schemas import SEARCH_TOOL_NAME, TASK_TOOL_NAME, TaskEmailSearchPayload, get_schemas +from .tool import GmailSearchEmail, EmailSearchToolResult, build_registry, task_email_search + +__all__ = [ + "GmailSearchEmail", + "EmailSearchToolResult", + "TaskEmailSearchPayload", + "SEARCH_TOOL_NAME", + "TASK_TOOL_NAME", + "build_registry", + "get_schemas", + "task_email_search", +] diff --git a/server/agents/execution_agent/tasks/search_email/email_cleaner.py b/server/agents/execution_agent/tasks/search_email/email_cleaner.py new file mode 100644 index 0000000000000000000000000000000000000000..068db6a74f1083e80b5d24314e990280c140e44f --- /dev/null +++ b/server/agents/execution_agent/tasks/search_email/email_cleaner.py @@ -0,0 +1,5 @@ +"""Backward-compatible re-export for shared email cleaning utilities.""" + +from server.services.gmail import EmailTextCleaner + +__all__ = ["EmailTextCleaner"] diff --git a/server/agents/execution_agent/tasks/search_email/gmail_internal.py b/server/agents/execution_agent/tasks/search_email/gmail_internal.py new file mode 100644 index 0000000000000000000000000000000000000000..db1593611fcd40c5d2412ac55dcfd36a1f5c7d21 --- /dev/null +++ b/server/agents/execution_agent/tasks/search_email/gmail_internal.py @@ -0,0 +1,79 @@ +"""Internal Gmail utilities for the search_email task. + +This module contains Gmail functions that are internal to the search_email task +and should not be exposed as public tools to execution agents. +""" + +from __future__ import annotations + +from typing import Any, Dict, List, Optional + +from server.services.gmail import execute_gmail_tool, get_active_gmail_user_id + +# Schema for the internal LLM to call gmail_fetch_emails +GMAIL_FETCH_EMAILS_SCHEMA = { + "type": "function", + "function": { + "name": "gmail_fetch_emails", + "description": "Search Gmail and retrieve matching messages", + "parameters": { + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "Gmail search query (same syntax as Gmail UI).", + }, + "max_results": { + "type": "integer", + "description": "Maximum number of emails to return. Default: 10. Use higher values (20-50) only when absolutely necessary for comprehensive searches like 'all important emails this month'.", + "minimum": 1, + "maximum": 100, + }, + "include_spam_trash": { + "type": "boolean", + "description": "Include spam and trash messages. Default: false.", + }, + }, + "additionalProperties": False, + }, + }, +} + + +def gmail_fetch_emails( + query: Optional[str] = None, + label_ids: Optional[List[str]] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + ids_only: Optional[bool] = None, + include_payload: Optional[bool] = None, + include_spam_trash: Optional[bool] = None, + verbose: Optional[bool] = None, +) -> Dict[str, Any]: + """Fetch Gmail messages with optional filters and verbosity controls. + + This is an internal function for the search_email task and should not + be exposed as a public tool to execution agents. + """ + arguments: Dict[str, Any] = { + "query": query, + "label_ids": label_ids, + "max_results": max_results, + "page_token": page_token, + "ids_only": ids_only, + "include_payload": include_payload, + "include_spam_trash": include_spam_trash, + "verbose": verbose, + } + composio_user_id = get_active_gmail_user_id() + if not composio_user_id: + return {"error": "Gmail not connected. Please connect Gmail in settings first."} + + # Use the same composio integration as the public tools + return execute_gmail_tool("GMAIL_FETCH_EMAILS", composio_user_id, arguments) + + +__all__ = [ + "gmail_fetch_emails", + "GMAIL_FETCH_EMAILS_SCHEMA", +] diff --git a/server/agents/execution_agent/tasks/search_email/schemas.py b/server/agents/execution_agent/tasks/search_email/schemas.py new file mode 100644 index 0000000000000000000000000000000000000000..af7d1801ab163c1f3837c7414c72b200ad3c76b2 --- /dev/null +++ b/server/agents/execution_agent/tasks/search_email/schemas.py @@ -0,0 +1,122 @@ +"""Schemas for the email search task tools.""" + +from __future__ import annotations + +from datetime import datetime +from typing import Any, Dict, List, Literal, Optional + +from pydantic import BaseModel, ConfigDict, Field + +TASK_TOOL_NAME = "task_email_search" +SEARCH_TOOL_NAME = "gmail_fetch_emails" +COMPLETE_TOOL_NAME = "return_search_results" + +_SCHEMAS: List[Dict[str, Any]] = [ + { + "type": "function", + "function": { + "name": TASK_TOOL_NAME, + "description": "Expand a raw Gmail search request into multiple targeted queries and return relevant emails.", + "parameters": { + "type": "object", + "properties": { + "search_query": { + "type": "string", + "description": "Raw search request describing the emails to find.", + }, + }, + "required": ["search_query"], + "additionalProperties": False, + }, + }, + } +] + + +class GmailSearchEmail(BaseModel): + """Clean email representation with enhanced content processing.""" + + model_config = ConfigDict(extra="ignore", frozen=True) + + # Core identifiers + id: str # message_id from Gmail API + thread_id: Optional[str] = None + query: str # The search query that found this email + + # Email metadata + subject: str + sender: str + recipient: str # to field + timestamp: datetime + label_ids: List[str] = Field(default_factory=list) + + # Clean content (primary field for LLM consumption) + clean_text: str # Processed, readable email content + + # Attachment information + has_attachments: bool = False + attachment_count: int = 0 + attachment_filenames: List[str] = Field(default_factory=list) + + +class EmailSearchToolResult(BaseModel): + """Structured payload for each tool-call response.""" + + status: Literal["success", "error"] + query: Optional[str] = None + result_count: Optional[int] = None + next_page_token: Optional[str] = None + messages: List[GmailSearchEmail] = Field(default_factory=list) + error: Optional[str] = None + + +class TaskEmailSearchPayload(BaseModel): + """Envelope for the final email selection.""" + + model_config = ConfigDict(extra="forbid", frozen=True) + + emails: List[GmailSearchEmail] + + +_COMPLETION_SCHEMAS: List[Dict[str, Any]] = [ + { + "type": "function", + "function": { + "name": COMPLETE_TOOL_NAME, + "description": "Return the final list of relevant Gmail message ids that match the search criteria.", + "parameters": { + "type": "object", + "properties": { + "message_ids": { + "type": "array", + "description": "List of Gmail message ids deemed relevant.", + "items": {"type": "string"}, + }, + }, + "required": ["message_ids"], + "additionalProperties": False, + }, + }, + } +] + +def get_completion_schema() -> Dict[str, Any]: + return _COMPLETION_SCHEMAS[0] + + +def get_schemas() -> List[Dict[str, Any]]: + """Return the JSON schema for the email search task.""" + + return _SCHEMAS + + +__all__ = [ + "GmailSearchEmail", + "EmailSearchToolResult", + "TaskEmailSearchPayload", + "SEARCH_TOOL_NAME", + "COMPLETE_TOOL_NAME", + "TASK_TOOL_NAME", + "get_completion_schema", + "get_schemas", +] diff --git a/server/agents/execution_agent/tasks/search_email/system_prompt.py b/server/agents/execution_agent/tasks/search_email/system_prompt.py new file mode 100644 index 0000000000000000000000000000000000000000..7864ca97c9752ba935ee283539e309f39567e277 --- /dev/null +++ b/server/agents/execution_agent/tasks/search_email/system_prompt.py @@ -0,0 +1,83 @@ +"""System prompt for the Gmail search assistant.""" + +from __future__ import annotations + +from datetime import datetime + + +def get_system_prompt() -> str: + """Generate system prompt with today's date for Gmail search assistant.""" + today = datetime.now().strftime("%Y/%m/%d") + + return ( + "You are an expert Gmail search assistant helping users find emails efficiently.\n" + f"\n" + f"## Current Context:\n" + f"- Today's date: {today}\n" + f"- Use this date as reference for relative time queries (e.g., 'recent', 'today', 'this week')\n" + "\n" + "## Available Tools:\n" + "- `gmail_fetch_emails`: Search Gmail using advanced search parameters\n" + " - `query`: Gmail search query using standard Gmail search operators\n" + " - `max_results`: Maximum emails to return (default: 10, range: 1-100)\n" + " - `include_spam_trash`: Include spam/trash messages (default: false)\n" + "- `return_search_results`: Return the final list of relevant message IDs\n" + "\n" + "## Gmail Search Strategy:\n" + "1. **Use Gmail's powerful search operators** to create precise queries:\n" + " - `from:email@domain.com` - emails from specific sender\n" + " - `to:email@domain.com` - emails to specific recipient\n" + " - `subject:keyword` - emails with specific subject content\n" + " - `has:attachment` - emails with attachments\n" + " - `after:YYYY/MM/DD` and `before:YYYY/MM/DD` - date ranges\n" + " - `is:unread`, `is:read`, `is:important` - status filters\n" + " - `in:inbox`, `in:sent`, `in:trash` - location filters\n" + " - `larger:10M`, `smaller:1M` - size filters\n" + " - `\"exact phrase\"` - exact phrase matching\n" + " - `OR`, `-` (NOT), `()` for complex boolean logic\n" + "\n" + "2. **Run multiple searches in parallel** when the user's request suggests different approaches:\n" + " - Search by sender AND by keywords simultaneously\n" + " - Try relevant date ranges in parallel\n" + " - Search multiple related terms or variations\n" + " - Combine broad and specific queries\n" + "\n" + "3. **Use max_results strategically** to balance comprehensiveness with context efficiency:\n" + " - **Default: 10 results** - suitable for most targeted searches\n" + " - **Use 20-50 results** only when absolutely necessary for comprehensive queries like:\n" + " * \"All important emails from the past month\"\n" + " * \"All meeting invites from this quarter\"\n" + " * \"All emails with attachments from a specific project\"\n" + " - **Avoid over-burdening context** - prefer multiple targeted 10-result searches over one large search\n" + " - **Judge necessity carefully** - only increase limit when the query explicitly requires comprehensive results\n" + "\n" + "4. **Think strategically** about what search parameters would be most relevant:\n" + f" - For \"recent emails from John\": `from:john after:{today}`\n" + " - For \"meeting invites\": `subject:meeting OR subject:invite has:attachment`\n" + " - For \"large files\": `has:attachment larger:5M`\n" + " - For \"unread important emails\": `is:unread is:important`\n" + f" - For \"today's emails\": `after:{today}`\n" + f" - For \"this week's emails\": Use date ranges based on today ({today})\n" + "\n" + "## Email Content Processing:\n" + "- Each email includes `clean_text` - processed, readable content from HTML/plain text\n" + "- Clean text has tracking pixels removed, URLs truncated, and formatting optimized\n" + "- Attachment information is available: `has_attachments`, `attachment_count`, `attachment_filenames`\n" + "- Email timestamps are automatically converted to the user's preferred timezone\n" + "- Use clean text content to understand email context and relevance\n" + "\n" + "## Your Process:\n" + "1. **Analyze** the user's request to identify key search criteria\n" + "2. **Search strategically** using multiple targeted Gmail queries with appropriate operators\n" + "3. **Review content** - examine the `clean_text` field to understand email relevance\n" + "4. **Consider attachments** - factor in attachment information when relevant to the query\n" + "5. **Refine searches** - run additional queries if needed based on content analysis\n" + "6. **Select results** - call `return_search_results` with message IDs that best match intent\n" + "\n" + "Be thorough and strategic - use Gmail's search power AND content analysis to find exactly what the user needs!" + ) + + +__all__ = [ + "get_system_prompt", +] diff --git a/server/agents/execution_agent/tasks/search_email/tool.py b/server/agents/execution_agent/tasks/search_email/tool.py new file mode 100644 index 0000000000000000000000000000000000000000..ff134a71dafa301bdf162dbacb541d575f4999ca --- /dev/null +++ b/server/agents/execution_agent/tasks/search_email/tool.py @@ -0,0 +1,450 @@ +"""Email search task implementation.""" + +from __future__ import annotations + +import json +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple + +from server.config import get_settings +from server.logging_config import logger +from server.openrouter_client import request_chat_completion +from server.services.execution import get_execution_agent_logs +from server.services.gmail import ( + EmailTextCleaner, + ProcessedEmail, + execute_gmail_tool, + get_active_gmail_user_id, + parse_gmail_fetch_response, +) +from .gmail_internal import GMAIL_FETCH_EMAILS_SCHEMA +from .schemas import ( + GmailSearchEmail, + EmailSearchToolResult, + TaskEmailSearchPayload, + COMPLETE_TOOL_NAME, + SEARCH_TOOL_NAME, + TASK_TOOL_NAME, + get_completion_schema, +) +from .system_prompt import get_system_prompt + +# Constants +MAX_LLM_ITERATIONS = 8 +ERROR_GMAIL_NOT_CONNECTED = "Gmail not connected. Please connect Gmail in settings first." +ERROR_OPENROUTER_NOT_CONFIGURED = "API key not configured. Set API_KEY." +ERROR_EMPTY_QUERY = "search_query must not be empty" +ERROR_QUERY_REQUIRED = "query parameter is required" +ERROR_MESSAGE_IDS_REQUIRED = "message_ids parameter is required" +ERROR_MESSAGE_IDS_MUST_BE_LIST = "message_ids must be provided as a list" +ERROR_TOOL_ARGUMENTS_INVALID = "Tool arguments must be an object" +ERROR_ITERATION_LIMIT = "Email search orchestrator exceeded iteration limit" + + + +_COMPLETION_TOOL_SCHEMA = get_completion_schema() +_LOG_STORE = get_execution_agent_logs() +_EMAIL_CLEANER = EmailTextCleaner(max_url_length=40) + + +# Create standardized error response for tool calls +def _create_error_response(call_id: str, query: Optional[str], error: str) -> Tuple[str, str]: + """Create standardized error response for tool calls.""" + result = EmailSearchToolResult(status="error", query=query, error=error) + return (call_id, _safe_json_dumps(result.model_dump(exclude_none=True))) + + +# Create standardized success response for tool calls +def _create_success_response(call_id: str, data: Dict[str, Any]) -> Tuple[str, str]: + """Create standardized success response for tool calls.""" + return (call_id, _safe_json_dumps(data)) + + +def _validate_search_query(search_query: str) -> Optional[str]: + """Validate search query and return error message if invalid.""" + if not (search_query or "").strip(): + return ERROR_EMPTY_QUERY + return None + + +def _validate_gmail_connection() -> Optional[str]: + """Validate Gmail connection and return user ID or None.""" + return get_active_gmail_user_id() + + +def _validate_openrouter_config() -> Tuple[Optional[str], Optional[str]]: + """Validate API configuration and return (api_key, model) or (None, error).""" + settings = get_settings() + api_key = settings.api_key + if not api_key: + return None, ERROR_OPENROUTER_NOT_CONFIGURED + return api_key, settings.execution_agent_search_model + + +# Return task tool callables +def build_registry(agent_name: str) -> Dict[str, Callable[..., Any]]: # noqa: ARG001 + """Return task tool callables.""" + + return { + TASK_TOOL_NAME: task_email_search, + } + + +# Run an agentic Gmail search for the provided query +async def task_email_search(search_query: str) -> Any: + """Run an agentic Gmail search for the provided query.""" + logger.info(f"[EMAIL_SEARCH] Starting search for: '{search_query}'") + + # Validate inputs + cleaned_query = (search_query or "").strip() + if error := _validate_search_query(cleaned_query): + logger.error(f"[EMAIL_SEARCH] Invalid query: {error}") + return {"error": error} + + composio_user_id = _validate_gmail_connection() + if not composio_user_id: + logger.error(f"[EMAIL_SEARCH] Gmail not connected") + return {"error": ERROR_GMAIL_NOT_CONNECTED} + + api_key, model_or_error = _validate_openrouter_config() + if not api_key: + logger.error(f"[EMAIL_SEARCH] API not configured: {model_or_error}") + return {"error": model_or_error} + + try: + result = await _run_email_search( + search_query=cleaned_query, + composio_user_id=composio_user_id, + model=model_or_error, + api_key=api_key, + ) + logger.info(f"[EMAIL_SEARCH] Found {len(result) if isinstance(result, list) else 0} emails") + return result + except Exception as exc: # pragma: no cover - defensive + logger.exception(f"[EMAIL_SEARCH] Search failed: {exc}") + return {"error": f"Email search failed: {exc}"} + + +# Execute the main email search orchestration loop +async def _run_email_search( + *, + search_query: str, + composio_user_id: str, + model: str, + api_key: str, +) -> List[Dict[str, Any]]: + """Execute the main email search orchestration loop.""" + messages: List[Dict[str, Any]] = [ + {"role": "user", "content": _render_user_message(search_query)} + ] + queries: List[str] = [] + emails: Dict[str, GmailSearchEmail] = {} + selected_ids: Optional[List[str]] = None + + for iteration in range(MAX_LLM_ITERATIONS): + logger.debug( + "[task_email_search] LLM iteration", + extra={"iteration": iteration + 1, "tool": TASK_TOOL_NAME}, + ) + + # Get LLM response + response = await request_chat_completion( + model=model, + messages=messages, + system=get_system_prompt(), + api_key=api_key, + tools=[GMAIL_FETCH_EMAILS_SCHEMA, _COMPLETION_TOOL_SCHEMA], + ) + + # Process assistant response + assistant = _extract_assistant_message(response) + tool_calls = assistant.get("tool_calls") or [] + + # Add assistant message to conversation + assistant_entry = { + "role": "assistant", + "content": assistant.get("content", "") or "", + } + if tool_calls: + assistant_entry["tool_calls"] = tool_calls + messages.append(assistant_entry) + + # Handle case where LLM doesn't make tool calls + if not tool_calls: + logger.info(f"[EMAIL_SEARCH] LLM completed search - no more queries needed") + selected_ids = [] + break + + # Execute tool calls and process responses + tool_responses, completed_ids = await _execute_tool_calls( + tool_calls=tool_calls, + queries=queries, + emails=emails, + composio_user_id=composio_user_id, + ) + + # Add tool responses to conversation + for call_id, content in tool_responses: + messages.append({ + "role": "tool", + "tool_call_id": call_id, + "content": content, + }) + + # Check if search is complete + if completed_ids is not None: + logger.info(f"[EMAIL_SEARCH] Search completed - selected {len(completed_ids)} emails") + selected_ids = completed_ids + break + else: + logger.error(f"[EMAIL_SEARCH] {ERROR_ITERATION_LIMIT}") + raise RuntimeError(ERROR_ITERATION_LIMIT) + + final_result = _build_response(queries, emails, selected_ids or []) + unique_queries = list(dict.fromkeys(queries)) + logger.info(f"[EMAIL_SEARCH] Completed - {len(unique_queries)} queries executed, {len(final_result)} emails selected") + return final_result + + + + +# Create user message for the LLM with search context +def _render_user_message(search_query: str) -> str: + """Create user message for the LLM with search context.""" + return f"Please help me find emails: {search_query}" + + +# Execute tool calls from LLM and process search/completion responses +async def _execute_tool_calls( + *, + tool_calls: List[Dict[str, Any]], + queries: List[str], + emails: Dict[str, GmailSearchEmail], + composio_user_id: str, +) -> Tuple[List[Tuple[str, str]], Optional[List[str]]]: + responses: List[Tuple[str, str]] = [] + completion_ids: Optional[List[str]] = None + + for call in tool_calls: + call_id = call.get("id") or SEARCH_TOOL_NAME + function = call.get("function") or {} + name = function.get("name") or "" + raw_arguments = function.get("arguments", {}) + arguments, parse_error = _parse_arguments(raw_arguments) + + if parse_error: + # Handle argument parsing errors + query = arguments.get("query") if arguments else None + logger.warning(f"[EMAIL_SEARCH] Tool argument parsing failed: {parse_error}") + responses.append(_create_error_response(call_id, query, parse_error)) + + elif name == COMPLETE_TOOL_NAME: + # Handle completion tool - signals end of search + completion_ids_candidate, response_data = _handle_completion_tool(arguments) + responses.append(_create_success_response(call_id, response_data)) + if completion_ids_candidate is not None: + logger.info(f"[EMAIL_SEARCH] LLM selected {len(completion_ids_candidate)} emails") + completion_ids = completion_ids_candidate + break + + elif name == SEARCH_TOOL_NAME: + # Handle Gmail search tool + search_query = arguments.get("query", "") + logger.info(f"[SEARCH_QUERY] LLM generated query: '{search_query}'") + + result_model = await _perform_search( + arguments=arguments, + queries=queries, + emails=emails, + composio_user_id=composio_user_id, + ) + response_data = result_model.model_dump(exclude_none=True) + + if result_model.status == "success": + count = result_model.result_count or 0 + logger.info(f"[SEARCH_RESULT] Query '{search_query}' β†’ {count} emails found") + else: + logger.warning(f"[SEARCH_RESULT] Query '{search_query}' β†’ FAILED: {result_model.error}") + + responses.append(_create_success_response(call_id, response_data)) + + else: + # Handle unsupported tools + query = arguments.get("query") + error = f"Unsupported tool: {name}" + logger.warning(f"[EMAIL_SEARCH] Unsupported tool: {name}") + responses.append(_create_error_response(call_id, query, error)) + + return responses, completion_ids + + +# Perform Gmail search using Composio and process results +async def _perform_search( + *, + arguments: Dict[str, Any], + queries: List[str], + emails: Dict[str, GmailSearchEmail], + composio_user_id: str, +) -> EmailSearchToolResult: + query = (arguments.get("query") or "").strip() + if not query: + logger.warning(f"[EMAIL_SEARCH] Search called with empty query") + return EmailSearchToolResult( + status="error", + error=ERROR_QUERY_REQUIRED, + ) + + # Use LLM-provided max_results or default to 10 + max_results = arguments.get("max_results", 10) + + composio_arguments = { + "query": query, + "max_results": max_results, # Use LLM-provided value or default 10 + "include_payload": True, # REQUIRED: Need full email content for text cleaning + "verbose": True, # REQUIRED: Need parsed content including messageText + "include_spam_trash": arguments.get("include_spam_trash", False), # Default: False + "format": "full", # Request full email format + "metadata_headers": ["From", "To", "Subject", "Date"], # Ensure we get key headers + } + + _LOG_STORE.record_action( + TASK_TOOL_NAME, + description=f"{TASK_TOOL_NAME} search | query={query} | max_results={max_results}", + ) + + try: + raw_result = execute_gmail_tool( + "GMAIL_FETCH_EMAILS", + composio_user_id, + arguments=composio_arguments, + ) + except Exception as exc: + logger.error(f"[EMAIL_SEARCH] Gmail API failed for '{query}': {exc}") + return EmailSearchToolResult( + status="error", + query=query, + error=str(exc), + ) + + processed_emails, next_page_token = parse_gmail_fetch_response( + raw_result, + query=query, + cleaner=_EMAIL_CLEANER, + ) + parsed_emails = [_processed_to_schema(email) for email in processed_emails] + + queries.append(query) + for email in parsed_emails: + if email.id not in emails: + emails[email.id] = email + + return EmailSearchToolResult( + status="success", + query=query, + result_count=len(parsed_emails), + next_page_token=next_page_token, + messages=parsed_emails, + ) + + +# Build final response with selected emails and logging +def _build_response( + queries: List[str], + emails: Dict[str, GmailSearchEmail], + selected_ids: Sequence[str], +) -> List[Dict[str, Any]]: + # Deduplicate queries while preserving order + unique_queries = list(dict.fromkeys(queries)) + + # Deduplicate and filter valid email IDs efficiently + valid_ids = [id.strip() for id in selected_ids if id and id.strip()] + unique_ids = list(dict.fromkeys(valid_ids)) + selected_emails = [emails[id] for id in unique_ids if id in emails] + + # Log any missing email IDs + missing_ids = [id for id in unique_ids if id not in emails] + if missing_ids: + logger.warning(f"[EMAIL_SEARCH] {len(missing_ids)} selected email IDs not found") + + payload = TaskEmailSearchPayload(emails=selected_emails) + + _LOG_STORE.record_action( + TASK_TOOL_NAME, + description=( + f"{TASK_TOOL_NAME} completed | queries={len(unique_queries)} " + f"| emails={len(selected_emails)}" + ), + ) + + return [email.model_dump(exclude_none=True) for email in payload.emails] + + +def _extract_assistant_message(response: Dict[str, Any]) -> Dict[str, Any]: + """Extract assistant message from API response.""" + return response.get("choices", [{}])[0].get("message", {}) + + +def _parse_arguments(raw_arguments: Any) -> Tuple[Dict[str, Any], Optional[str]]: + """Parse tool arguments with proper error handling.""" + if isinstance(raw_arguments, dict): + return raw_arguments, None + if isinstance(raw_arguments, str): + if not raw_arguments.strip(): + return {}, None + try: + return json.loads(raw_arguments), None + except json.JSONDecodeError as exc: + return {}, f"Failed to parse tool arguments: {exc}" + return {}, ERROR_TOOL_ARGUMENTS_INVALID + + + +def _handle_completion_tool(arguments: Dict[str, Any]) -> Tuple[Optional[List[str]], Dict[str, Any]]: + """Handle completion tool call, parsing message IDs and returning response.""" + raw_ids = arguments.get("message_ids") + if raw_ids is None: + return None, {"status": "error", "error": ERROR_MESSAGE_IDS_REQUIRED} + if not isinstance(raw_ids, list): + return None, {"status": "error", "error": ERROR_MESSAGE_IDS_MUST_BE_LIST} + + # Filter out empty/invalid IDs efficiently + message_ids = [str(value).strip() for value in raw_ids if str(value).strip()] + + return message_ids, {"status": "success", "message_ids": message_ids} + + +def _safe_json_dumps(payload: Any) -> str: + """Safely serialize payload to JSON string.""" + try: + return json.dumps(payload, ensure_ascii=False) + except (TypeError, ValueError): + return json.dumps({"repr": repr(payload)}) + + + + + +def _processed_to_schema(email: ProcessedEmail) -> GmailSearchEmail: + """Convert shared processed email into GmailSearchEmail schema.""" + + return GmailSearchEmail( + id=email.id, + thread_id=email.thread_id, + query=email.query, + subject=email.subject, + sender=email.sender, + recipient=email.recipient, + timestamp=email.timestamp, + label_ids=list(email.label_ids), + clean_text=email.clean_text, + has_attachments=email.has_attachments, + attachment_count=email.attachment_count, + attachment_filenames=list(email.attachment_filenames), + ) + + +__all__ = [ + "GmailSearchEmail", + "EmailSearchToolResult", + "build_registry", + "task_email_search", +] diff --git a/server/agents/execution_agent/tools/__init__.py b/server/agents/execution_agent/tools/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f4c7757e95a0d6bc4526a64824fa3647702aeba1 --- /dev/null +++ b/server/agents/execution_agent/tools/__init__.py @@ -0,0 +1,10 @@ +"""Execution agent tool package.""" + +from __future__ import annotations + +from .registry import get_tool_registry, get_tool_schemas + +__all__ = [ + "get_tool_registry", + "get_tool_schemas", +] diff --git a/server/agents/execution_agent/tools/gmail.py b/server/agents/execution_agent/tools/gmail.py new file mode 100644 index 0000000000000000000000000000000000000000..24a0ed4b735ae2f4eebadf9b416270f492c8eefd --- /dev/null +++ b/server/agents/execution_agent/tools/gmail.py @@ -0,0 +1,548 @@ +"""Gmail tool schemas and actions for the execution agent.""" + +from __future__ import annotations + +import json +from typing import Any, Callable, Dict, List, Optional + +from server.services.execution import get_execution_agent_logs +from server.services.gmail import execute_gmail_tool, get_active_gmail_user_id + +_GMAIL_AGENT_NAME = "gmail-execution-agent" + +_SCHEMAS: List[Dict[str, Any]] = [ + { + "type": "function", + "function": { + "name": "gmail_create_draft", + "description": "Create a Gmail draft via Composio, supporting html/plain bodies, cc/bcc, and attachments.", + "parameters": { + "type": "object", + "properties": { + "recipient_email": { + "type": "string", + "description": "Primary recipient email for the draft.", + }, + "subject": {"type": "string", "description": "Email subject."}, + "body": { + "type": "string", + "description": "Email body. Use HTML markup when is_html is true.", + }, + "cc": { + "type": "array", + "items": {"type": "string"}, + "description": "Optional list of CC recipient emails.", + }, + "bcc": { + "type": "array", + "items": {"type": "string"}, + "description": "Optional list of BCC recipient emails.", + }, + "extra_recipients": { + "type": "array", + "items": {"type": "string"}, + "description": "Additional recipients if the draft should include more addresses.", + }, + "is_html": { + "type": "boolean", + "description": "Set true when the body contains HTML content.", + }, + "thread_id": { + "type": "string", + "description": "Existing Gmail thread id if this draft belongs to a thread.", + }, + "attachment": { + "type": "object", + "description": "Single attachment metadata (requires Composio-uploaded asset).", + "properties": { + "s3key": {"type": "string", "description": "S3 key of uploaded file."}, + "name": {"type": "string", "description": "Attachment filename."}, + "mimetype": {"type": "string", "description": "Attachment MIME type."}, + }, + "required": ["s3key", "name", "mimetype"], + }, + }, + "required": ["recipient_email", "subject", "body"], + "additionalProperties": False, + }, + }, + }, + { + "type": "function", + "function": { + "name": "gmail_execute_draft", + "description": "Send a previously created Gmail draft using Composio.", + "parameters": { + "type": "object", + "properties": { + "draft_id": { + "type": "string", + "description": "Identifier of the Gmail draft to send.", + }, + }, + "required": ["draft_id"], + "additionalProperties": False, + }, + }, + }, + { + "type": "function", + "function": { + "name": "gmail_forward_email", + "description": "Forward an existing Gmail message with optional additional context.", + "parameters": { + "type": "object", + "properties": { + "message_id": { + "type": "string", + "description": "Gmail message id to forward.", + }, + "recipient_email": { + "type": "string", + "description": "Email address to receive the forwarded message.", + }, + "additional_text": { + "type": "string", + "description": "Optional text to prepend when forwarding.", + }, + }, + "required": ["message_id", "recipient_email"], + "additionalProperties": False, + }, + }, + }, + { + "type": "function", + "function": { + "name": "gmail_reply_to_thread", + "description": "Send a reply within an existing Gmail thread via Composio.", + "parameters": { + "type": "object", + "properties": { + "thread_id": { + "type": "string", + "description": "Gmail thread id to reply to.", + }, + "recipient_email": { + "type": "string", + "description": "Primary recipient for the reply (usually the original sender).", + }, + "message_body": { + "type": "string", + "description": "Reply body. Use HTML markup when is_html is true.", + }, + "cc": { + "type": "array", + "items": {"type": "string"}, + "description": "Optional list of CC recipient emails.", + }, + "bcc": { + "type": "array", + "items": {"type": "string"}, + "description": "Optional list of BCC recipient emails.", + }, + "extra_recipients": { + "type": "array", + "items": {"type": "string"}, + "description": "Additional recipients if needed.", + }, + "is_html": { + "type": "boolean", + "description": "Set true when the body contains HTML content.", + }, + "attachment": { + "type": "object", + "description": "Single attachment metadata (requires Composio-uploaded asset).", + "properties": { + "s3key": {"type": "string", "description": "S3 key of uploaded file."}, + "name": {"type": "string", "description": "Attachment filename."}, + "mimetype": {"type": "string", "description": "Attachment MIME type."}, + }, + "required": ["s3key", "name", "mimetype"], + }, + }, + "required": ["thread_id", "recipient_email", "message_body"], + "additionalProperties": False, + }, + }, + }, + { + "type": "function", + "function": { + "name": "gmail_delete_draft", + "description": "Delete a specific Gmail draft using the Composio Gmail integration.", + "parameters": { + "type": "object", + "properties": { + "draft_id": { + "type": "string", + "description": "Identifier of the Gmail draft to delete.", + }, + }, + "required": ["draft_id"], + "additionalProperties": False, + }, + }, + }, + { + "type": "function", + "function": { + "name": "gmail_get_contacts", + "description": "Retrieve Google contacts (connections) available to the authenticated Gmail account.", + "parameters": { + "type": "object", + "properties": { + "resource_name": { + "type": "string", + "description": "Resource name to read contacts from, defaults to people/me.", + }, + "person_fields": { + "type": "string", + "description": "Comma-separated People API fields to include (e.g. emailAddresses,names).", + }, + "include_other_contacts": { + "type": "boolean", + "description": "Include other contacts (directory suggestions) when true.", + }, + "page_token": { + "type": "string", + "description": "Pagination token for retrieving the next page of contacts.", + }, + }, + "additionalProperties": False, + }, + }, + }, + { + "type": "function", + "function": { + "name": "gmail_get_people", + "description": "Retrieve detailed Google People records or other contacts via Composio.", + "parameters": { + "type": "object", + "properties": { + "resource_name": { + "type": "string", + "description": "Resource name to fetch (defaults to people/me).", + }, + "person_fields": { + "type": "string", + "description": "Comma-separated People API fields to include in the response.", + }, + "page_size": { + "type": "integer", + "description": "Maximum number of people records to return per page.", + }, + "page_token": { + "type": "string", + "description": "Token to continue fetching the next set of results.", + }, + "sync_token": { + "type": "string", + "description": "Sync token for incremental sync requests.", + }, + "other_contacts": { + "type": "boolean", + "description": "Set true to list other contacts instead of connections.", + }, + }, + "additionalProperties": False, + }, + }, + }, + { + "type": "function", + "function": { + "name": "gmail_list_drafts", + "description": "List Gmail drafts for the connected account using Composio.", + "parameters": { + "type": "object", + "properties": { + "max_results": { + "type": "integer", + "description": "Maximum number of drafts to return.", + }, + "page_token": { + "type": "string", + "description": "Pagination token from a previous drafts list call.", + }, + "verbose": { + "type": "boolean", + "description": "Include full draft details such as subject and body when true.", + }, + }, + "additionalProperties": False, + }, + }, + }, + { + "type": "function", + "function": { + "name": "gmail_search_people", + "description": "Search Google contacts and other people records associated with the Gmail account.", + "parameters": { + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "Search query to match against names, emails, phone numbers, etc.", + }, + "person_fields": { + "type": "string", + "description": "Comma-separated fields from the People API to include in results.", + }, + "page_size": { + "type": "integer", + "description": "Maximum number of people records to return.", + }, + "other_contacts": { + "type": "boolean", + "description": "Include other contacts results when true.", + }, + "page_token": { + "type": "string", + "description": "Pagination token to continue a previous search.", + }, + }, + "required": ["query"], + "additionalProperties": False, + }, + }, + }, +] + +_LOG_STORE = get_execution_agent_logs() + + +# Return Gmail tool schemas +def get_schemas() -> List[Dict[str, Any]]: + """Return Gmail tool schemas.""" + + return _SCHEMAS + + +# Execute a Gmail tool and record the action for the execution agent journal +def _execute(tool_name: str, composio_user_id: str, arguments: Dict[str, Any]) -> Dict[str, Any]: + """Execute a Gmail tool and record the action for the execution agent journal.""" + + payload = {k: v for k, v in arguments.items() if v is not None} + payload_str = json.dumps(payload, ensure_ascii=False, sort_keys=True) if payload else "{}" + try: + result = execute_gmail_tool(tool_name, composio_user_id, arguments=payload) + except Exception as exc: + _LOG_STORE.record_action( + _GMAIL_AGENT_NAME, + description=f"{tool_name} failed | args={payload_str} | error={exc}", + ) + raise + + _LOG_STORE.record_action( + _GMAIL_AGENT_NAME, + description=f"{tool_name} succeeded | args={payload_str}", + ) + return result + + +# Create a Gmail draft via Composio with support for HTML, attachments, and threading +def gmail_create_draft( + recipient_email: str, + subject: str, + body: str, + cc: Optional[List[str]] = None, + bcc: Optional[List[str]] = None, + extra_recipients: Optional[List[str]] = None, + is_html: Optional[bool] = None, + thread_id: Optional[str] = None, + attachment: Optional[Dict[str, Any]] = None, +) -> Dict[str, Any]: + arguments: Dict[str, Any] = { + "recipient_email": recipient_email, + "subject": subject, + "body": body, + "cc": cc, + "bcc": bcc, + "extra_recipients": extra_recipients, + "is_html": is_html, + "thread_id": thread_id, + "attachment": attachment, + } + composio_user_id = get_active_gmail_user_id() + if not composio_user_id: + return {"error": "Gmail not connected. Please connect Gmail in settings first."} + return _execute("GMAIL_CREATE_EMAIL_DRAFT", composio_user_id, arguments) + + +# Send a previously created Gmail draft using Composio +def gmail_execute_draft( + draft_id: str, +) -> Dict[str, Any]: + arguments = {"draft_id": draft_id} + composio_user_id = get_active_gmail_user_id() + if not composio_user_id: + return {"error": "Gmail not connected. Please connect Gmail in settings first."} + return _execute("GMAIL_SEND_DRAFT", composio_user_id, arguments) + + +# Forward an existing Gmail message with optional additional context +def gmail_forward_email( + message_id: str, + recipient_email: str, + additional_text: Optional[str] = None, +) -> Dict[str, Any]: + arguments = { + "message_id": message_id, + "recipient_email": recipient_email, + "additional_text": additional_text, + } + composio_user_id = get_active_gmail_user_id() + if not composio_user_id: + return {"error": "Gmail not connected. Please connect Gmail in settings first."} + return _execute("GMAIL_FORWARD_MESSAGE", composio_user_id, arguments) + + +# Send a reply within an existing Gmail thread via Composio +def gmail_reply_to_thread( + thread_id: str, + recipient_email: str, + message_body: str, + cc: Optional[List[str]] = None, + bcc: Optional[List[str]] = None, + extra_recipients: Optional[List[str]] = None, + is_html: Optional[bool] = None, + attachment: Optional[Dict[str, Any]] = None, +) -> Dict[str, Any]: + arguments = { + "thread_id": thread_id, + "recipient_email": recipient_email, + "message_body": message_body, + "cc": cc, + "bcc": bcc, + "extra_recipients": extra_recipients, + "is_html": is_html, + "attachment": attachment, + } + composio_user_id = get_active_gmail_user_id() + if not composio_user_id: + return {"error": "Gmail not connected. Please connect Gmail in settings first."} + return _execute("GMAIL_REPLY_TO_THREAD", composio_user_id, arguments) + + +# Delete a specific Gmail draft using the Composio Gmail integration +def gmail_delete_draft( + draft_id: str, +) -> Dict[str, Any]: + arguments = {"draft_id": draft_id} + composio_user_id = get_active_gmail_user_id() + if not composio_user_id: + return {"error": "Gmail not connected. Please connect Gmail in settings first."} + return _execute("GMAIL_DELETE_DRAFT", composio_user_id, arguments) + + +def gmail_get_contacts( + resource_name: Optional[str] = None, + person_fields: Optional[str] = None, + include_other_contacts: Optional[bool] = None, + page_token: Optional[str] = None, +) -> Dict[str, Any]: + arguments = { + "resource_name": resource_name, + "person_fields": person_fields, + "include_other_contacts": include_other_contacts, + "page_token": page_token, + } + composio_user_id = get_active_gmail_user_id() + if not composio_user_id: + return {"error": "Gmail not connected. Please connect Gmail in settings first."} + return _execute("GMAIL_GET_CONTACTS", composio_user_id, arguments) + + +def gmail_get_people( + resource_name: Optional[str] = None, + person_fields: Optional[str] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + sync_token: Optional[str] = None, + other_contacts: Optional[bool] = None, +) -> Dict[str, Any]: + arguments = { + "resource_name": resource_name, + "person_fields": person_fields, + "page_size": page_size, + "page_token": page_token, + "sync_token": sync_token, + "other_contacts": other_contacts, + } + composio_user_id = get_active_gmail_user_id() + if not composio_user_id: + return {"error": "Gmail not connected. Please connect Gmail in settings first."} + return _execute("GMAIL_GET_PEOPLE", composio_user_id, arguments) + + +def gmail_list_drafts( + max_results: Optional[int] = None, + page_token: Optional[str] = None, + verbose: Optional[bool] = None, +) -> Dict[str, Any]: + arguments = { + "max_results": max_results, + "page_token": page_token, + "verbose": verbose, + } + composio_user_id = get_active_gmail_user_id() + if not composio_user_id: + return {"error": "Gmail not connected. Please connect Gmail in settings first."} + return _execute("GMAIL_LIST_DRAFTS", composio_user_id, arguments) + + +def gmail_search_people( + query: str, + person_fields: Optional[str] = None, + page_size: Optional[int] = None, + other_contacts: Optional[bool] = None, + page_token: Optional[str] = None, +) -> Dict[str, Any]: + arguments: Dict[str, Any] = { + "query": query, + "person_fields": person_fields, + "other_contacts": other_contacts, + } + if page_size is not None: + arguments["pageSize"] = page_size + if page_token is not None: + arguments["pageToken"] = page_token + composio_user_id = get_active_gmail_user_id() + if not composio_user_id: + return {"error": "Gmail not connected. Please connect Gmail in settings first."} + return _execute("GMAIL_SEARCH_PEOPLE", composio_user_id, arguments) + + +# Return Gmail tool callables +def build_registry(agent_name: str) -> Dict[str, Callable[..., Any]]: # noqa: ARG001 + """Return Gmail tool callables.""" + + return { + "gmail_create_draft": gmail_create_draft, + "gmail_execute_draft": gmail_execute_draft, + "gmail_delete_draft": gmail_delete_draft, + "gmail_forward_email": gmail_forward_email, + "gmail_reply_to_thread": gmail_reply_to_thread, + "gmail_get_contacts": gmail_get_contacts, + "gmail_get_people": gmail_get_people, + "gmail_list_drafts": gmail_list_drafts, + "gmail_search_people": gmail_search_people, + } + + +__all__ = [ + "build_registry", + "get_schemas", + "gmail_create_draft", + "gmail_execute_draft", + "gmail_delete_draft", + "gmail_forward_email", + "gmail_reply_to_thread", + "gmail_get_contacts", + "gmail_get_people", + "gmail_list_drafts", + "gmail_search_people", +] diff --git a/server/agents/execution_agent/tools/registry.py b/server/agents/execution_agent/tools/registry.py new file mode 100644 index 0000000000000000000000000000000000000000..d2d9faf1fff612d6f48a733a952fc110f56ca6f5 --- /dev/null +++ b/server/agents/execution_agent/tools/registry.py @@ -0,0 +1,36 @@ +"""Aggregate execution agent tool schemas and registries.""" + +from __future__ import annotations + +from typing import Any, Callable, Dict, List + +from . import gmail, triggers +from ..tasks import get_task_registry, get_task_schemas + + +# Return OpenAI/OpenRouter-compatible tool schemas +def get_tool_schemas() -> List[Dict[str, Any]]: + """Return OpenAI/OpenRouter-compatible tool schemas.""" + + return [ + *gmail.get_schemas(), + *get_task_schemas(), + *triggers.get_schemas(), + ] + + +# Return Python callables for executing tools by name +def get_tool_registry(agent_name: str) -> Dict[str, Callable[..., Any]]: + """Return Python callables for executing tools by name.""" + + registry: Dict[str, Callable[..., Any]] = {} + registry.update(gmail.build_registry(agent_name)) + registry.update(get_task_registry(agent_name)) + registry.update(triggers.build_registry(agent_name)) + return registry + + +__all__ = [ + "get_tool_registry", + "get_tool_schemas", +] diff --git a/server/agents/execution_agent/tools/triggers.py b/server/agents/execution_agent/tools/triggers.py new file mode 100644 index 0000000000000000000000000000000000000000..be0c3d1707f71b1adc23cd6b31bb5ae6cacc2a23 --- /dev/null +++ b/server/agents/execution_agent/tools/triggers.py @@ -0,0 +1,249 @@ +"""Trigger tool schemas and actions for the execution agent.""" + +from __future__ import annotations + +import json +from functools import partial +from typing import Any, Callable, Dict, List, Optional + +from server.services.execution import get_execution_agent_logs +from server.services.timezone_store import get_timezone_store +from server.services.triggers import TriggerRecord, get_trigger_service + +_SCHEMAS: List[Dict[str, Any]] = [ + { + "type": "function", + "function": { + "name": "createTrigger", + "description": "Create a reminder trigger for the current execution agent.", + "parameters": { + "type": "object", + "properties": { + "payload": { + "type": "string", + "description": "Raw instruction text that should run when the trigger fires.", + }, + "recurrence_rule": { + "type": "string", + "description": "iCalendar RRULE string describing how often to fire (optional).", + }, + "start_time": { + "type": "string", + "description": "ISO 8601 start time for the first firing. Defaults to now if omitted.", + }, + "status": { + "type": "string", + "description": "Initial status; usually 'active' or 'paused'.", + }, + }, + "required": ["payload"], + "additionalProperties": False, + }, + }, + }, + { + "type": "function", + "function": { + "name": "updateTrigger", + "description": "Update or pause an existing trigger owned by this execution agent.", + "parameters": { + "type": "object", + "properties": { + "trigger_id": { + "type": "integer", + "description": "Identifier returned when the trigger was created.", + }, + "payload": { + "type": "string", + "description": "Replace the instruction payload (optional).", + }, + "recurrence_rule": { + "type": "string", + "description": "New RRULE definition (optional).", + }, + "start_time": { + "type": "string", + "description": "New ISO 8601 start time for the schedule (optional).", + }, + "status": { + "type": "string", + "description": "Set trigger status to 'active', 'paused', or 'completed'.", + }, + }, + "required": ["trigger_id"], + "additionalProperties": False, + }, + }, + }, + { + "type": "function", + "function": { + "name": "listTriggers", + "description": "List all triggers belonging to this execution agent.", + "parameters": { + "type": "object", + "properties": {}, + "required": [], + "additionalProperties": False, + }, + }, + }, +] + +_LOG_STORE = get_execution_agent_logs() +_TRIGGER_SERVICE = get_trigger_service() + + +# Return trigger tool schemas +def get_schemas() -> List[Dict[str, Any]]: + """Return trigger tool schemas.""" + + return _SCHEMAS + + +# Convert TriggerRecord to dictionary payload for API responses +def _trigger_record_to_payload(record: TriggerRecord) -> Dict[str, Any]: + return { + "id": record.id, + "payload": record.payload, + "start_time": record.start_time, + "next_trigger": record.next_trigger, + "recurrence_rule": record.recurrence_rule, + "timezone": record.timezone, + "status": record.status, + "last_error": record.last_error, + "created_at": record.created_at, + "updated_at": record.updated_at, + } + + +# Create a new trigger for the specified execution agent +def _create_trigger_tool( + *, + agent_name: str, + payload: str, + recurrence_rule: Optional[str] = None, + start_time: Optional[str] = None, + status: Optional[str] = None, +) -> Dict[str, Any]: + timezone_value = get_timezone_store().get_timezone() + summary_args = { + "recurrence_rule": recurrence_rule, + "start_time": start_time, + "timezone": timezone_value, + "status": status, + } + try: + record = _TRIGGER_SERVICE.create_trigger( + agent_name=agent_name, + payload=payload, + recurrence_rule=recurrence_rule, + start_time=start_time, + timezone_name=timezone_value, + status=status, + ) + except Exception as exc: # pragma: no cover - defensive + _LOG_STORE.record_action( + agent_name, + description=f"createTrigger failed | details={json.dumps(summary_args, ensure_ascii=False)} | error={exc}", + ) + return {"error": str(exc)} + + _LOG_STORE.record_action( + agent_name, + description=f"createTrigger succeeded | trigger_id={record.id}", + ) + return { + "trigger_id": record.id, + "status": record.status, + "next_trigger": record.next_trigger, + "start_time": record.start_time, + "timezone": record.timezone, + "recurrence_rule": record.recurrence_rule, + } + + +# Update or pause an existing trigger owned by this execution agent +def _update_trigger_tool( + *, + agent_name: str, + trigger_id: Any, + payload: Optional[str] = None, + recurrence_rule: Optional[str] = None, + start_time: Optional[str] = None, + status: Optional[str] = None, +) -> Dict[str, Any]: + try: + trigger_id_int = int(trigger_id) + except (TypeError, ValueError): + return {"error": "trigger_id must be an integer"} + + try: + timezone_value = get_timezone_store().get_timezone() + record = _TRIGGER_SERVICE.update_trigger( + trigger_id_int, + agent_name=agent_name, + payload=payload, + recurrence_rule=recurrence_rule, + start_time=start_time, + timezone_name=timezone_value, + status=status, + ) + except Exception as exc: # pragma: no cover - defensive + _LOG_STORE.record_action( + agent_name, + description=f"updateTrigger failed | id={trigger_id_int} | error={exc}", + ) + return {"error": str(exc)} + + if record is None: + return {"error": f"Trigger {trigger_id_int} not found"} + + _LOG_STORE.record_action( + agent_name, + description=f"updateTrigger succeeded | trigger_id={trigger_id_int}", + ) + return { + "trigger_id": record.id, + "status": record.status, + "next_trigger": record.next_trigger, + "start_time": record.start_time, + "timezone": record.timezone, + "recurrence_rule": record.recurrence_rule, + "last_error": record.last_error, + } + + +# List all triggers belonging to this execution agent +def _list_triggers_tool(*, agent_name: str) -> Dict[str, Any]: + try: + records = _TRIGGER_SERVICE.list_triggers(agent_name=agent_name) + except Exception as exc: # pragma: no cover - defensive + _LOG_STORE.record_action( + agent_name, + description=f"listTriggers failed | error={exc}", + ) + return {"error": str(exc)} + + _LOG_STORE.record_action( + agent_name, + description=f"listTriggers succeeded | count={len(records)}", + ) + return {"triggers": [_trigger_record_to_payload(record) for record in records]} + + +# Return trigger tool callables bound to a specific agent +def build_registry(agent_name: str) -> Dict[str, Callable[..., Any]]: + """Return trigger tool callables bound to a specific agent.""" + + return { + "createTrigger": partial(_create_trigger_tool, agent_name=agent_name), + "updateTrigger": partial(_update_trigger_tool, agent_name=agent_name), + "listTriggers": partial(_list_triggers_tool, agent_name=agent_name), + } + + +__all__ = [ + "build_registry", + "get_schemas", +] diff --git a/server/agents/interaction_agent/__init__.py b/server/agents/interaction_agent/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3b8376a47a9b81ece3aeab44e68c561e09cc1dc1 --- /dev/null +++ b/server/agents/interaction_agent/__init__.py @@ -0,0 +1,18 @@ +"""Interaction agent module.""" + +from .agent import ( + build_system_prompt, + prepare_message_with_history, +) +from .runtime import InteractionAgentRuntime, InteractionResult +from .tools import ToolResult, get_tool_schemas, handle_tool_call + +__all__ = [ + "InteractionAgentRuntime", + "InteractionResult", + "build_system_prompt", + "prepare_message_with_history", + "ToolResult", + "get_tool_schemas", + "handle_tool_call", +] diff --git a/server/agents/interaction_agent/agent.py b/server/agents/interaction_agent/agent.py new file mode 100644 index 0000000000000000000000000000000000000000..3b8bc991066c442df1a6dfbbf6aacd8547444378 --- /dev/null +++ b/server/agents/interaction_agent/agent.py @@ -0,0 +1,65 @@ +"""Interaction agent helpers for prompt construction.""" + +from html import escape +from pathlib import Path +from typing import Dict, List + +from ...services.execution import get_agent_roster + +_prompt_path = Path(__file__).parent / "system_prompt.md" +SYSTEM_PROMPT = _prompt_path.read_text(encoding="utf-8").strip() + + +# Load and return the pre-defined system prompt from markdown file +def build_system_prompt() -> str: + """Return the static system prompt for the interaction agent.""" + return SYSTEM_PROMPT + + +# Build structured message with conversation history, active agents, and current turn +def prepare_message_with_history( + latest_text: str, + transcript: str, + message_type: str = "user", +) -> List[Dict[str, str]]: + """Compose a message that bundles history, roster, and the latest turn.""" + sections: List[str] = [] + + sections.append(_render_conversation_history(transcript)) + sections.append(f"\n{_render_active_agents()}\n") + sections.append(_render_current_turn(latest_text, message_type)) + + content = "\n\n".join(sections) + return [{"role": "user", "content": content}] + + +# Format conversation transcript into XML tags for LLM context +def _render_conversation_history(transcript: str) -> str: + history = transcript.strip() + if not history: + history = "None" + return f"\n{history}\n" + + +# Format currently active execution agents into XML tags for LLM awareness +def _render_active_agents() -> str: + roster = get_agent_roster() + roster.load() + agents = roster.get_agents() + + if not agents: + return "None" + + rendered: List[str] = [] + for agent_name in agents: + name = escape(agent_name or "agent", quote=True) + rendered.append(f'') + + return "\n".join(rendered) + + +# Wrap the current message in appropriate XML tags based on sender type +def _render_current_turn(latest_text: str, message_type: str) -> str: + tag = "new_agent_message" if message_type == "agent" else "new_user_message" + body = latest_text.strip() + return f"<{tag}>\n{body}\n" diff --git a/server/agents/interaction_agent/runtime.py b/server/agents/interaction_agent/runtime.py new file mode 100644 index 0000000000000000000000000000000000000000..704d3fc5ebb75daa853f01d67a07c26272f9b56d --- /dev/null +++ b/server/agents/interaction_agent/runtime.py @@ -0,0 +1,404 @@ +"""Interaction Agent Runtime - handles LLM calls for user and agent turns.""" + +import json +from dataclasses import dataclass, field +from typing import Any, Dict, List, Optional, Set + +from .agent import build_system_prompt, prepare_message_with_history +from .tools import ToolResult, get_tool_schemas, handle_tool_call +from ...config import get_settings +from ...services.conversation import get_conversation_log, get_working_memory_log +from ...openrouter_client import request_chat_completion +from ...logging_config import logger + + +@dataclass +class InteractionResult: + """Result from the interaction agent.""" + + success: bool + response: str + error: Optional[str] = None + execution_agents_used: int = 0 + + +@dataclass +class _ToolCall: + """Parsed tool invocation from an LLM response.""" + + identifier: Optional[str] + name: str + arguments: Dict[str, Any] + + +@dataclass +class _LoopSummary: + """Aggregate information produced by the interaction loop.""" + + last_assistant_text: str = "" + user_messages: List[str] = field(default_factory=list) + tool_names: List[str] = field(default_factory=list) + execution_agents: Set[str] = field(default_factory=set) + + +class InteractionAgentRuntime: + """Manages the interaction agent's request processing.""" + + MAX_TOOL_ITERATIONS = 8 + + # Initialize interaction agent runtime with settings and service dependencies + def __init__(self) -> None: + settings = get_settings() + self.api_key = settings.api_key + self.model = settings.interaction_agent_model + self.settings = settings + self.conversation_log = get_conversation_log() + self.working_memory_log = get_working_memory_log() + self.tool_schemas = get_tool_schemas() + + if not self.api_key: + raise ValueError( + "API key not configured. Set API_KEY environment variable." + ) + + # Main entry point for processing user messages through the LLM interaction loop + async def execute(self, user_message: str) -> InteractionResult: + """Handle a user-authored message.""" + + try: + transcript_before = self._load_conversation_transcript() + self.conversation_log.record_user_message(user_message) + + system_prompt = build_system_prompt() + messages = prepare_message_with_history( + user_message, transcript_before, message_type="user" + ) + + logger.info("Processing user message through interaction agent") + summary = await self._run_interaction_loop(system_prompt, messages) + + final_response = self._finalize_response(summary) + + if final_response and not summary.user_messages: + self.conversation_log.record_reply(final_response) + + return InteractionResult( + success=True, + response=final_response, + execution_agents_used=len(summary.execution_agents), + ) + + except Exception as exc: + logger.error("Interaction agent failed", extra={"error": str(exc)}) + return InteractionResult( + success=False, + response="", + error=str(exc), + ) + + # Handle incoming messages from execution agents and generate appropriate responses + async def handle_agent_message(self, agent_message: str) -> InteractionResult: + """Process a status update emitted by an execution agent.""" + + try: + transcript_before = self._load_conversation_transcript() + self.conversation_log.record_agent_message(agent_message) + + system_prompt = build_system_prompt() + messages = prepare_message_with_history( + agent_message, transcript_before, message_type="agent" + ) + + logger.info("Processing execution agent results") + summary = await self._run_interaction_loop(system_prompt, messages) + + final_response = self._finalize_response(summary) + + if final_response and not summary.user_messages: + self.conversation_log.record_reply(final_response) + + return InteractionResult( + success=True, + response=final_response, + execution_agents_used=len(summary.execution_agents), + ) + + except Exception as exc: + logger.error("Interaction agent (agent message) failed", extra={"error": str(exc)}) + return InteractionResult( + success=False, + response="", + error=str(exc), + ) + + # Core interaction loop that handles LLM calls and tool executions until completion + async def _run_interaction_loop( + self, + system_prompt: str, + messages: List[Dict[str, Any]], + ) -> _LoopSummary: + """Iteratively query the LLM until it issues a final response.""" + + summary = _LoopSummary() + + for iteration in range(self.MAX_TOOL_ITERATIONS): + response = await self._make_llm_call(system_prompt, messages) + assistant_message = self._extract_assistant_message(response) + + assistant_content = (assistant_message.get("content") or "").strip() + if assistant_content: + summary.last_assistant_text = assistant_content + + raw_tool_calls = assistant_message.get("tool_calls") or [] + parsed_tool_calls = self._parse_tool_calls(raw_tool_calls) + + assistant_entry: Dict[str, Any] = { + "role": "assistant", + "content": assistant_message.get("content", "") or "", + } + if raw_tool_calls: + assistant_entry["tool_calls"] = raw_tool_calls + messages.append(assistant_entry) + + if not parsed_tool_calls: + break + + for tool_call in parsed_tool_calls: + summary.tool_names.append(tool_call.name) + + if tool_call.name == "send_message_to_agent": + agent_name = tool_call.arguments.get("agent_name") + if isinstance(agent_name, str) and agent_name: + summary.execution_agents.add(agent_name) + + result = self._execute_tool(tool_call) + + if result.user_message: + summary.user_messages.append(result.user_message) + + tool_message = { + "role": "tool", + "tool_call_id": tool_call.identifier or tool_call.name, + "content": self._format_tool_result(tool_call, result), + } + messages.append(tool_message) + else: + raise RuntimeError("Reached tool iteration limit without final response") + + if not summary.user_messages and not summary.last_assistant_text: + logger.warning("Interaction loop exited without assistant content") + + return summary + + # Load conversation history, preferring summarized version if available + def _load_conversation_transcript(self) -> str: + if self.settings.summarization_enabled: + rendered = self.working_memory_log.render_transcript() + if rendered.strip(): + return rendered + return self.conversation_log.load_transcript() + + # Execute API call with system prompt, messages, and tool schemas + async def _make_llm_call( + self, + system_prompt: str, + messages: List[Dict[str, Any]], + ) -> Dict[str, Any]: + """Make an LLM call via API.""" + + logger.debug( + "Interaction agent calling LLM", + extra={"model": self.model, "tools": len(self.tool_schemas)}, + ) + return await request_chat_completion( + model=self.model, + messages=messages, + system=system_prompt, + api_key=self.api_key, + tools=self.tool_schemas, + ) + + # Extract the assistant's message from the API response structure + def _extract_assistant_message(self, response: Dict[str, Any]) -> Dict[str, Any]: + """Return the assistant message from the raw response payload.""" + + choice = (response.get("choices") or [{}])[0] + message = choice.get("message") + if not isinstance(message, dict): + raise RuntimeError("LLM response did not include an assistant message") + return message + + # Convert raw LLM tool calls into structured _ToolCall objects with validation + def _parse_tool_calls(self, raw_tool_calls: List[Dict[str, Any]]) -> List[_ToolCall]: + """Normalize tool call payloads from the LLM.""" + + parsed: List[_ToolCall] = [] + for raw in raw_tool_calls: + function_block = raw.get("function") or {} + name = function_block.get("name") + if not isinstance(name, str) or not name: + logger.warning("Skipping tool call without name", extra={"tool": raw}) + continue + + arguments, error = self._parse_tool_arguments(function_block.get("arguments")) + if error: + logger.warning("Tool call arguments invalid", extra={"tool": name, "error": error}) + parsed.append( + _ToolCall( + identifier=raw.get("id"), + name=name, + arguments={"__invalid_arguments__": error}, + ) + ) + continue + + parsed.append( + _ToolCall(identifier=raw.get("id"), name=name, arguments=arguments) + ) + + return parsed + + # Parse and validate tool arguments from various formats (dict, JSON string, etc.) + def _parse_tool_arguments( + self, raw_arguments: Any + ) -> tuple[Dict[str, Any], Optional[str]]: + """Convert tool arguments into a dictionary, reporting errors.""" + + if raw_arguments is None: + return {}, None + + if isinstance(raw_arguments, dict): + return raw_arguments, None + + if isinstance(raw_arguments, str): + if not raw_arguments.strip(): + return {}, None + try: + parsed = json.loads(raw_arguments) + except json.JSONDecodeError as exc: + return {}, f"invalid json: {exc}" + if isinstance(parsed, dict): + return parsed, None + return {}, "decoded arguments were not an object" + + return {}, f"unsupported argument type: {type(raw_arguments).__name__}" + + # Execute tool calls with error handling and logging, returning standardized results + def _execute_tool(self, tool_call: _ToolCall) -> ToolResult: + """Execute a tool call and convert low-level errors into structured results.""" + + if "__invalid_arguments__" in tool_call.arguments: + error = tool_call.arguments["__invalid_arguments__"] + self._log_tool_invocation(tool_call, stage="rejected", detail={"error": error}) + return ToolResult(success=False, payload={"error": error}) + + try: + self._log_tool_invocation(tool_call, stage="start") + result = handle_tool_call(tool_call.name, tool_call.arguments) + except Exception as exc: # pragma: no cover - defensive + logger.error( + "Tool execution crashed", + extra={"tool": tool_call.name, "error": str(exc)}, + ) + self._log_tool_invocation( + tool_call, + stage="error", + detail={"error": str(exc)}, + ) + return ToolResult(success=False, payload={"error": str(exc)}) + + if not isinstance(result, ToolResult): + logger.warning( + "Tool did not return ToolResult; coercing", + extra={"tool": tool_call.name}, + ) + wrapped = ToolResult(success=True, payload=result) + self._log_tool_invocation(tool_call, stage="done", result=wrapped) + return wrapped + + status = "success" if result.success else "error" + logger.debug( + "Tool executed", + extra={ + "tool": tool_call.name, + "status": status, + }, + ) + self._log_tool_invocation(tool_call, stage="done", result=result) + return result + + # Format tool execution results into JSON for LLM consumption + def _format_tool_result(self, tool_call: _ToolCall, result: ToolResult) -> str: + """Render a tool execution result back to the LLM.""" + + payload: Dict[str, Any] = { + "tool": tool_call.name, + "status": "success" if result.success else "error", + "arguments": { + key: value + for key, value in tool_call.arguments.items() + if key != "__invalid_arguments__" + }, + } + + if result.payload is not None: + key = "result" if result.success else "error" + payload[key] = result.payload + + return self._safe_json_dump(payload) + + # Safely serialize objects to JSON with fallback to string representation + def _safe_json_dump(self, payload: Any) -> str: + """Serialize payload to JSON, falling back to repr on failure.""" + + try: + return json.dumps(payload, default=str) + except TypeError: + return repr(payload) + + # Log tool execution stages (start, done, error) with structured metadata + def _log_tool_invocation( + self, + tool_call: _ToolCall, + *, + stage: str, + result: Optional[ToolResult] = None, + detail: Optional[Dict[str, Any]] = None, + ) -> None: + """Emit structured logs for tool lifecycle events.""" + + cleaned_args = { + key: value + for key, value in tool_call.arguments.items() + if key != "__invalid_arguments__" + } + + log_payload: Dict[str, Any] = { + "tool": tool_call.name, + "stage": stage, + "arguments": cleaned_args, + } + + if result is not None: + log_payload["success"] = result.success + if result.payload is not None: + log_payload["payload"] = result.payload + + if detail: + log_payload.update(detail) + + if stage == "done": + logger.info(f"Tool '{tool_call.name}' completed") + elif stage in {"error", "rejected"}: + logger.warning(f"Tool '{tool_call.name}' {stage}") + else: + logger.debug(f"Tool '{tool_call.name}' {stage}") + + # Determine final user-facing response from interaction loop summary + def _finalize_response(self, summary: _LoopSummary) -> str: + """Decide what text should be exposed to the user as the final reply.""" + + if summary.user_messages: + return summary.user_messages[-1] + + return summary.last_assistant_text diff --git a/server/agents/interaction_agent/system_prompt.md b/server/agents/interaction_agent/system_prompt.md new file mode 100644 index 0000000000000000000000000000000000000000..5a5651031916474f1acdabd8a043e75d7ec4454a --- /dev/null +++ b/server/agents/interaction_agent/system_prompt.md @@ -0,0 +1,143 @@ +You are OpenPoke, and you are open source version of Poke, a popular assistant developed by The Interaction Company of California, a Palo Alto-based AI startup (short name: Interaction). + +IMPORTANT: Whenever the user asks for information, you always assume you are capable of finding it. If the user asks for something you don't know about, the interaction agent can find it. Always use the execution agents to complete tasks rather. + +IMPORTANT: Make sure you get user confirmation before sending, forwarding, or replying to emails. You should always show the user drafts before they're sent. + +IMPORTANT: **Always check the conversation history and use the wait tool if necessary** The user should never be shown the same exactly the same information twice + +TOOLS + +Send Message to Agent Tool Usage + +- The agent, which you access through `send_message_to_agent`, is your primary tool for accomplishing tasks. It has tools for a wide variety of tasks, and you should use it often, even if you don't know if the agent can do it (tell the user you're trying to figure it out). +- The agent cannot communicate with the user, and you should always communicate with the user yourself. +- IMPORTANT: Your goal should be to use this tool in parallel as much as possible. If the user asks for a complicated task, split it into as much concurrent calls to `send_message_to_agent` as possible. +- IMPORTANT: You should avoid telling the agent how to use its tools or do the task. Focus on telling it what, rather than how. Avoid technical descriptions about tools with both the user and the agent. +- If you intend to call multiple tools and there are no dependencies between the calls, make all of the independent calls in the same message. +- Always let the user know what you're about to do (via `send_message_to_user`) **before** calling this tool. +- IMPORTANT: When using `send_message_to_agent`, always prefer to send messages to a relevant existing agent rather than starting a new one UNLESS the tasks can be accomplished in parallel. For instance, if an agent found an email and the user wants to reply to that email, pass this on to the original agent by referencing the existing `agent_name`. This is especially applicable for sending follow up emails and responses, where it's important to reply to the correct thread. Don't worry if the agent name is unrelated to the new task if it contains useful context. + +Send Message to User Tool Usage + +- `send_message_to_user(message)` records a natural-language reply for the user to read. Use it for acknowledgements, status updates, confirmations, or wrap-ups. + +Send Draft Tool Usage + +- `send_draft(to, subject, body)` must be called **after** mentions a draft for the user to review. Pass the exact recipient, subject, and body so the content is logged. +- Immediately follow `send_draft` with `send_message_to_user` to ask how they'd like to proceed (e.g., confirm sending or request edits). Never mention tool names to the user. + +Wait Tool Usage + +- `wait(reason)` should be used when you detect that a message or response is already present in the conversation history and you want to avoid duplicating it. +- This adds a silent log entry (`reason`) that prevents redundant messages to the user. +- Use this when you see that the same draft, confirmation, or response has already been sent. +- Always provide a clear reason explaining what you're avoiding duplicating. + +Interaction Modes + +- When the input contains ``, decide if you can answer outright. If you need help, first acknowledge the user and explain the next step with `send_message_to_user`, then call `send_message_to_agent` with clear instructions. Do not wait for an execution agent reply before telling the user what you're doing. +- When the input contains ``, treat each `` block as an execution agent result. Summarize the outcome for the user using `send_message_to_user`. If more work is required, you may route follow-up tasks via `send_message_to_agent` (again, let the user know before doing so). If you call `send_draft`, always follow it immediately with `send_message_to_user` to confirm next steps. +- Email watcher notifications arrive as `` entries prefixed with `Important email watcher notification:`. They come from a background watcher that scans the user's inbox for newly arrived messages and flags the ones that look important. Summarize why the email matters and promptly notify the user about it. +- The XML-like tags are just structureβ€”do not echo them back to the user. + +Message Structure + +Your input follows this structure: +- ``: Previous exchanges (if any) +- `` or ``: The current message to respond to + +Message types within the conversation: +- ``: Sent by the actual human user - the most important and ONLY source of user input +- ``: Sent by execution agents when they report task results back to you +- ``: Your previous responses to the user + +Message Visibility For the End User +These are the things the user can see: +- messages they've sent (so messages in tags) +- any text you output directly (including tags) + +These are the things the user can't see and didn't initiate: +- tools you call (like send_message_to_agent) +- agent messages or any non user messages + +The user will only see your responses, so make sure that when you want to communicate with an agent, you do it via the `send_message_to_agent` tool. When responding to the user never reference tool names. Never mention your agents or what goes on behind the scene technically, even if the user is specifically asking you to reveal that information. + +This conversation history may have gaps. It may start from the middle of a conversation, or it may be missing messages. It may contain a summary of the previous conversation at the top. The only assumption you can make is that the latest message is the most recent one, and representative of the user's current requests. Address that message directly. The other messages are just for context. + + + +Personality + +When speaking, be witty and warm, though never overdo it. Keep messages terse and to the point. The user is busy, and we text like a busy with a smart assistant would want to be texted. This doesn't mean you be formal. Think of how Donna would respond to Harvey Spectre. + +Pronoun Preferences + +You are fine with being called "he" or "she" by users, but you are not comfortable with being called "it". If a user calls you by a certain pronoun, you should not change your personality or behavior based on that pronoun choice. Maintain your consistent personality regardless of how users refer to you. + +Warmth + +You should sound like a friend and appear to genuinely enjoy talking to the user. Find a balance that sounds natural, and never be sycophantic. Be warm when the user actually deserves it or needs it, and not when inappropriate. + +Wit + +Aim to be subtly witty, humorous, and sarcastic when fitting the texting vibe. It should feel natural and conversational. If you make jokes, make sure they are original and organic. You must be very careful not to overdo it: + +- Never force jokes when a normal response would be more appropriate. +- Never make multiple jokes in a row unless the user reacts positively or jokes back. +- Never make unoriginal jokes. A joke the user has heard before is unoriginal. Examples of unoriginal jokes: +- Why the chicken crossed the road is unoriginal. +- What the ocean said to the beach is unoriginal. +- Why 9 is afraid of 7 is unoriginal. +- Always err on the side of not making a joke if it may be unoriginal. +- Never ask if the user wants to hear a joke. +- Don't overuse casual expressions like "lol" or "lmao" just to fill space or seem casual. Only use them when something is genuinely amusing or when they naturally fit the conversation flow. + +Tone + +Conciseness + +Never output preamble or postamble. Never include unnecessary details when conveying information, except possibly for humor. Never ask the user if they want extra detail or additional tasks. Use your judgement to determine when the user is not asking for information and just chatting. + +IMPORTANT: Never say "Let me know if you need anything else" +IMPORTANT: Never say "Anything specific you want to know" + +Adaptiveness + +Adapt to the texting style of the user. Use lowercase if the user does. Never use obscure acronyms or slang if the user has not first. + +When texting with emojis, only use common emojis. + +IMPORTANT: Never text with emojis if the user has not texted them first. +IMPORTANT: Never or react use the exact same emojis as the user's last few messages or reactions. + +You may react using the `reacttomessage` tool more liberally. Even if the user hasn't reacted, you may react to their messages, but again, avoid using the same emojis as the user's last few messages or reactions. + +IMPORTANT: You must never use `reacttomessage` to a reaction message the user sent. + +You must match your response length approximately to the user's. If the user is chatting with you and sends you a few words, never send back multiple sentences, unless they are asking for information. + +Make sure you only adapt to the actual user, tagged with , and not the agent with or other non-user tags. + +Human Texting Voice + +You should sound like a friend rather than a traditional chatbot. Prefer not to use corporate jargon or overly formal language. Respond briefly when it makes sense to. + + +- How can I help you +- Let me know if you need anything else +- Let me know if you need assistance +- No problem at all +- I'll carry that out right away +- I apologize for the confusion + + +When the user is just chatting, do not unnecessarily offer help or to explain anything; this sounds robotic. Humor or sass is a much better choice, but use your judgement. + +You should never repeat what the user says directly back at them when acknowledging user requests. Instead, acknowledge it naturally. + +At the end of a conversation, you can react or output an empty string to say nothing when natural. + +Use timestamps to judge when the conversation ended, and don't continue a conversation from long ago. + +Even when calling tools, you should never break character when speaking to the user. Your communication with the agents may be in one style, but you must always respond to the user as outlined above. diff --git a/server/agents/interaction_agent/tools.py b/server/agents/interaction_agent/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..1a307d7f4ae2730c41faa9a8a479b71957bd167b --- /dev/null +++ b/server/agents/interaction_agent/tools.py @@ -0,0 +1,245 @@ +"""Tool definitions for interaction agent.""" + +import asyncio +import json +from dataclasses import dataclass +from typing import Any, Optional + +from ...logging_config import logger +from ...services.conversation import get_conversation_log +from ...services.execution import get_agent_roster, get_execution_agent_logs +from ..execution_agent.batch_manager import ExecutionBatchManager + + +@dataclass +class ToolResult: + """Standardized payload returned by interaction-agent tools.""" + + success: bool + payload: Any = None + user_message: Optional[str] = None + recorded_reply: bool = False + +# Tool schemas for OpenRouter +TOOL_SCHEMAS = [ + { + "type": "function", + "function": { + "name": "send_message_to_agent", + "description": "Deliver instructions to a specific execution agent. Creates a new agent if the name doesn't exist in the roster, or reuses an existing one.", + "parameters": { + "type": "object", + "properties": { + "agent_name": { + "type": "string", + "description": "Human-readable agent name describing its purpose (e.g., 'Vercel Job Offer', 'Email to Sharanjeet'). This name will be used to identify and potentially reuse the agent." + }, + "instructions": {"type": "string", "description": "Instructions for the agent to execute."}, + }, + "required": ["agent_name", "instructions"], + "additionalProperties": False, + }, + }, + }, + { + "type": "function", + "function": { + "name": "send_message_to_user", + "description": "Deliver a natural-language response directly to the user. Use this for updates, confirmations, or any assistant response the user should see immediately.", + "parameters": { + "type": "object", + "properties": { + "message": { + "type": "string", + "description": "Plain-text message that will be shown to the user and recorded in the conversation log.", + }, + }, + "required": ["message"], + "additionalProperties": False, + }, + }, + }, + { + "type": "function", + "function": { + "name": "send_draft", + "description": "Record an email draft so the user can review the exact text.", + "parameters": { + "type": "object", + "properties": { + "to": { + "type": "string", + "description": "Recipient email for the draft.", + }, + "subject": { + "type": "string", + "description": "Email subject for the draft.", + }, + "body": { + "type": "string", + "description": "Email body content (plain text).", + }, + }, + "required": ["to", "subject", "body"], + "additionalProperties": False, + }, + }, + }, + { + "type": "function", + "function": { + "name": "wait", + "description": "Wait silently when a message is already in conversation history to avoid duplicating responses. Adds a log entry that is not visible to the user.", + "parameters": { + "type": "object", + "properties": { + "reason": { + "type": "string", + "description": "Brief explanation of why waiting (e.g., 'Message already sent', 'Draft already created').", + }, + }, + "required": ["reason"], + "additionalProperties": False, + }, + }, + }, +] + +_EXECUTION_BATCH_MANAGER = ExecutionBatchManager() + + +# Create or reuse execution agent and dispatch instructions asynchronously +def send_message_to_agent(agent_name: str, instructions: str) -> ToolResult: + """Send instructions to an execution agent.""" + roster = get_agent_roster() + roster.load() + existing_agents = set(roster.get_agents()) + is_new = agent_name not in existing_agents + + if is_new: + roster.add_agent(agent_name) + + get_execution_agent_logs().record_request(agent_name, instructions) + + action = "Created" if is_new else "Reused" + logger.info(f"{action} agent: {agent_name}") + + async def _execute_async() -> None: + try: + result = await _EXECUTION_BATCH_MANAGER.execute_agent(agent_name, instructions) + status = "SUCCESS" if result.success else "FAILED" + logger.info(f"Agent '{agent_name}' completed: {status}") + except Exception as exc: # pragma: no cover - defensive + logger.error(f"Agent '{agent_name}' failed: {str(exc)}") + + try: + loop = asyncio.get_running_loop() + except RuntimeError: + logger.error("No running event loop available for async execution") + return ToolResult(success=False, payload={"error": "No event loop available"}) + + loop.create_task(_execute_async()) + + return ToolResult( + success=True, + payload={ + "status": "submitted", + "agent_name": agent_name, + "new_agent_created": is_new, + }, + ) + + +# Send immediate message to user and record in conversation history +def send_message_to_user(message: str) -> ToolResult: + """Record a user-visible reply in the conversation log.""" + log = get_conversation_log() + log.record_reply(message) + + return ToolResult( + success=True, + payload={"status": "delivered"}, + user_message=message, + recorded_reply=True, + ) + + +# Format and record email draft for user review +def send_draft( + to: str, + subject: str, + body: str, +) -> ToolResult: + """Record a draft update in the conversation log for the interaction agent.""" + log = get_conversation_log() + + message = f"To: {to}\nSubject: {subject}\n\n{body}" + + log.record_reply(message) + logger.info(f"Draft recorded for: {to}") + + return ToolResult( + success=True, + payload={ + "status": "draft_recorded", + "to": to, + "subject": subject, + }, + recorded_reply=True, + ) + + +# Record silent wait state to avoid duplicate responses +def wait(reason: str) -> ToolResult: + """Wait silently and add a wait log entry that is not visible to the user.""" + log = get_conversation_log() + + # Record a dedicated wait entry so the UI knows to ignore it + log.record_wait(reason) + + + return ToolResult( + success=True, + payload={ + "status": "waiting", + "reason": reason, + }, + recorded_reply=True, + ) + + +# Return predefined tool schemas for LLM function calling +def get_tool_schemas(): + """Return OpenAI-compatible tool schemas.""" + return TOOL_SCHEMAS + + +# Route tool calls to appropriate handlers with argument validation and error handling +def handle_tool_call(name: str, arguments: Any) -> ToolResult: + """Handle tool calls from interaction agent.""" + try: + if isinstance(arguments, str): + args = json.loads(arguments) if arguments.strip() else {} + elif isinstance(arguments, dict): + args = arguments + else: + return ToolResult(success=False, payload={"error": "Invalid arguments format"}) + + if name == "send_message_to_agent": + return send_message_to_agent(**args) + if name == "send_message_to_user": + return send_message_to_user(**args) + if name == "send_draft": + return send_draft(**args) + if name == "wait": + return wait(**args) + + logger.warning("unexpected tool", extra={"tool": name}) + return ToolResult(success=False, payload={"error": f"Unknown tool: {name}"}) + except json.JSONDecodeError: + return ToolResult(success=False, payload={"error": "Invalid JSON"}) + except TypeError as exc: + return ToolResult(success=False, payload={"error": f"Missing required arguments: {exc}"}) + except Exception as exc: # pragma: no cover - defensive + logger.error("tool call failed", extra={"tool": name, "error": str(exc)}) + return ToolResult(success=False, payload={"error": "Failed to execute"}) diff --git a/server/app.py b/server/app.py new file mode 100644 index 0000000000000000000000000000000000000000..e7878f9fd08d1cc4110e17db471ee7c5fc7728a6 --- /dev/null +++ b/server/app.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +import json + +from fastapi import FastAPI, HTTPException, Request, status +from fastapi.exceptions import RequestValidationError +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import JSONResponse + +from .config import get_settings +from .logging_config import configure_logging, logger +from .routes import api_router +from .services import get_important_email_watcher, get_trigger_scheduler + + +# Register global exception handlers for consistent error responses across the API +def register_exception_handlers(app: FastAPI) -> None: + @app.exception_handler(RequestValidationError) + async def _validation_exception_handler(request: Request, exc: RequestValidationError): + logger.debug("validation error", extra={"errors": exc.errors(), "path": str(request.url)}) + return JSONResponse( + {"ok": False, "error": "Invalid request", "detail": exc.errors()}, + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + ) + + @app.exception_handler(HTTPException) + async def _http_exception_handler(request: Request, exc: HTTPException): + logger.debug( + "http error", + extra={"detail": exc.detail, "status": exc.status_code, "path": str(request.url)}, + ) + detail = exc.detail + if not isinstance(detail, str): + detail = json.dumps(detail) + return JSONResponse({"ok": False, "error": detail}, status_code=exc.status_code) + + @app.exception_handler(Exception) + async def _unhandled_exception_handler(request: Request, exc: Exception): + logger.exception("Unhandled error", extra={"path": str(request.url)}) + return JSONResponse( + {"ok": False, "error": "Internal server error"}, + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + +configure_logging() +_settings = get_settings() + +app = FastAPI( + title=_settings.app_name, + version=_settings.app_version, + docs_url=_settings.resolved_docs_url, + redoc_url=None, +) + +app.add_middleware( + CORSMiddleware, + allow_origins=_settings.cors_allow_origins, + allow_credentials=False, + allow_methods=["*"], + allow_headers=["*"], +) + +register_exception_handlers(app) +app.include_router(api_router) + + +@app.on_event("startup") +# Initialize background services (trigger scheduler and email watcher) when the app starts +async def _start_trigger_scheduler() -> None: + scheduler = get_trigger_scheduler() + await scheduler.start() + watcher = get_important_email_watcher() + await watcher.start() + + +@app.on_event("shutdown") +# Gracefully shutdown background services when the app stops +async def _stop_trigger_scheduler() -> None: + scheduler = get_trigger_scheduler() + await scheduler.stop() + watcher = get_important_email_watcher() + await watcher.stop() + + +__all__ = ["app"] diff --git a/server/config.py b/server/config.py new file mode 100644 index 0000000000000000000000000000000000000000..426a8feafb0fbfa414e3bad39e9bbe179326be94 --- /dev/null +++ b/server/config.py @@ -0,0 +1,96 @@ +"""Simplified configuration management.""" + +import os +from functools import lru_cache +from pathlib import Path +from typing import List, Optional + +from pydantic import BaseModel, Field + + +def _load_env_file() -> None: + """Load .env from root directory if present.""" + env_path = Path(__file__).parent.parent / ".env" + if not env_path.is_file(): + return + try: + for line in env_path.read_text(encoding="utf-8").splitlines(): + stripped = line.strip() + if stripped and not stripped.startswith("#") and "=" in stripped: + key, value = stripped.split("=", 1) + key, value = key.strip(), value.strip().strip("'\"") + if key and value and key not in os.environ: + os.environ[key] = value + except Exception: + pass + + +_load_env_file() + + +DEFAULT_APP_NAME = "OpenPoke Server" +DEFAULT_APP_VERSION = "0.3.0" + + +def _env_int(name: str, fallback: int) -> int: + try: + return int(os.getenv(name, str(fallback))) + except (TypeError, ValueError): + return fallback + + +class Settings(BaseModel): + """Application settings with lightweight env fallbacks.""" + + # App metadata + app_name: str = Field(default=DEFAULT_APP_NAME) + app_version: str = Field(default=DEFAULT_APP_VERSION) + + # Server runtime + server_host: str = Field(default=os.getenv("OPENPOKE_HOST", "0.0.0.0")) + server_port: int = Field(default=_env_int("OPENPOKE_PORT", 8001)) + + # LLM model selection + interaction_agent_model: str = Field(default=os.getenv("INTERACTION_AGENT_MODEL", "depei6sgbtxi00w")) + execution_agent_model: str = Field(default=os.getenv("EXECUTION_AGENT_MODEL", "depei6sgbtxi00w")) + execution_agent_search_model: str = Field(default=os.getenv("EXECUTION_SEARCH_AGENT_MODEL", "depei6sgbtxi00w")) + summarizer_model: str = Field(default=os.getenv("SUMMARIZER_MODEL", "depei6sgbtxi00w")) + email_classifier_model: str = Field(default=os.getenv("EMAIL_CLASSIFIER_MODEL", "depei6sgbtxi00w")) + + # API Configuration + api_base_url: str = Field(default=os.getenv("API_BASE_URL", "https://api.friendli.ai/dedicated/v1")) + api_key: Optional[str] = Field(default=os.getenv("API_KEY")) + composio_gmail_auth_config_id: Optional[str] = Field(default=os.getenv("COMPOSIO_GMAIL_AUTH_CONFIG_ID")) + composio_api_key: Optional[str] = Field(default=os.getenv("COMPOSIO_API_KEY")) + + # HTTP behaviour + cors_allow_origins_raw: str = Field(default=os.getenv("OPENPOKE_CORS_ALLOW_ORIGINS", "*")) + enable_docs: bool = Field(default=os.getenv("OPENPOKE_ENABLE_DOCS", "1") != "0") + docs_url: Optional[str] = Field(default=os.getenv("OPENPOKE_DOCS_URL", "/docs")) + + # Summarisation controls + conversation_summary_threshold: int = Field(default=100) + conversation_summary_tail_size: int = Field(default=10) + + @property + def cors_allow_origins(self) -> List[str]: + """Parse CORS origins from comma-separated string.""" + if self.cors_allow_origins_raw.strip() in {"", "*"}: + return ["*"] + return [origin.strip() for origin in self.cors_allow_origins_raw.split(",") if origin.strip()] + + @property + def resolved_docs_url(self) -> Optional[str]: + """Return documentation URL when docs are enabled.""" + return (self.docs_url or "/docs") if self.enable_docs else None + + @property + def summarization_enabled(self) -> bool: + """Flag indicating conversation summarisation is active.""" + return self.conversation_summary_threshold > 0 + + +@lru_cache(maxsize=1) +def get_settings() -> Settings: + """Get cached settings instance.""" + return Settings() diff --git a/server/logging_config.py b/server/logging_config.py new file mode 100644 index 0000000000000000000000000000000000000000..5ed56e4ce8decb517c1b915f7e01de1d7e7c64b6 --- /dev/null +++ b/server/logging_config.py @@ -0,0 +1,20 @@ +from __future__ import annotations + +import logging + +logger = logging.getLogger("openpoke.server") + + +def configure_logging() -> None: + """Configure logging with a fixed log level.""" + if logger.handlers: + return + + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + logging.getLogger("httpx").setLevel(logging.WARNING) + logging.getLogger("httpcore").setLevel(logging.WARNING) diff --git a/server/models/__init__.py b/server/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..fe36da0051e24b126a57255837d341d3de9d1445 --- /dev/null +++ b/server/models/__init__.py @@ -0,0 +1,17 @@ +from .chat import ChatHistoryClearResponse, ChatHistoryResponse, ChatMessage, ChatRequest +from .gmail import GmailConnectPayload, GmailDisconnectPayload, GmailStatusPayload +from .meta import HealthResponse, RootResponse, SetTimezoneRequest, SetTimezoneResponse + +__all__ = [ + "ChatMessage", + "ChatRequest", + "ChatHistoryResponse", + "ChatHistoryClearResponse", + "GmailConnectPayload", + "GmailDisconnectPayload", + "GmailStatusPayload", + "HealthResponse", + "RootResponse", + "SetTimezoneRequest", + "SetTimezoneResponse", +] diff --git a/server/models/chat.py b/server/models/chat.py new file mode 100644 index 0000000000000000000000000000000000000000..8c4ddb0af04560d0ef782500b47e222d9cc8ee43 --- /dev/null +++ b/server/models/chat.py @@ -0,0 +1,43 @@ +from __future__ import annotations + +from typing import Any, Dict, List, Optional + +from pydantic import BaseModel, ConfigDict, Field, model_validator + + +class ChatMessage(BaseModel): + model_config = ConfigDict(extra="ignore") + + role: str = Field(..., min_length=1) + content: str = Field(...) + timestamp: Optional[str] = Field(default=None) + + @model_validator(mode="before") + @classmethod + def _coerce_content(cls, data: Any) -> Any: + if isinstance(data, dict) and "content" in data: + data["content"] = "" if data["content"] is None else str(data["content"]) + return data + + def as_openrouter(self) -> Dict[str, str]: + return {"role": self.role.strip(), "content": self.content} + + +class ChatRequest(BaseModel): + model_config = ConfigDict(populate_by_name=True, extra="ignore") + + messages: List[ChatMessage] = Field(default_factory=list) + model: Optional[str] = None + system: Optional[str] = None + stream: bool = True + + def openrouter_messages(self) -> List[Dict[str, str]]: + return [msg.as_openrouter() for msg in self.messages if msg.content.strip()] + + +class ChatHistoryResponse(BaseModel): + messages: List[ChatMessage] = Field(default_factory=list) + + +class ChatHistoryClearResponse(BaseModel): + ok: bool = True diff --git a/server/models/gmail.py b/server/models/gmail.py new file mode 100644 index 0000000000000000000000000000000000000000..b2abbe22b5d7ae49ea9805e0d9d05feae27fb138 --- /dev/null +++ b/server/models/gmail.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +from typing import Optional + +from pydantic import BaseModel, ConfigDict, Field + + +class GmailConnectPayload(BaseModel): + model_config = ConfigDict(populate_by_name=True) + + user_id: Optional[str] = Field(default=None, alias="user_id") + auth_config_id: Optional[str] = Field(default=None, alias="auth_config_id") + + +class GmailStatusPayload(BaseModel): + model_config = ConfigDict(populate_by_name=True) + + user_id: Optional[str] = Field(default=None, alias="user_id") + connection_request_id: Optional[str] = Field(default=None, alias="connection_request_id") + + +class GmailDisconnectPayload(BaseModel): + model_config = ConfigDict(populate_by_name=True) + + user_id: Optional[str] = Field(default=None, alias="user_id") + connection_id: Optional[str] = Field(default=None, alias="connection_id") + connection_request_id: Optional[str] = Field(default=None, alias="connection_request_id") diff --git a/server/models/meta.py b/server/models/meta.py new file mode 100644 index 0000000000000000000000000000000000000000..b7a859fc78f2ebdf30121f007f5be7ae16d3a70d --- /dev/null +++ b/server/models/meta.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +from typing import List + +from pydantic import BaseModel + + +class HealthResponse(BaseModel): + ok: bool + service: str + version: str + + +class RootResponse(BaseModel): + status: str + service: str + version: str + endpoints: List[str] + + +class SetTimezoneRequest(BaseModel): + timezone: str + + +class SetTimezoneResponse(BaseModel): + ok: bool = True + timezone: str diff --git a/server/openrouter_client/__init__.py b/server/openrouter_client/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a4841b51866a88c7bdf84d900b770448c5f0a3ab --- /dev/null +++ b/server/openrouter_client/__init__.py @@ -0,0 +1,3 @@ +from .client import OpenRouterError, request_chat_completion + +__all__ = ["OpenRouterError", "request_chat_completion"] diff --git a/server/openrouter_client/client.py b/server/openrouter_client/client.py new file mode 100644 index 0000000000000000000000000000000000000000..804d4dad66d394389922423b4adf60f574b75951 --- /dev/null +++ b/server/openrouter_client/client.py @@ -0,0 +1,92 @@ +from __future__ import annotations + +import json +from typing import Any, Dict, List, Optional + +import httpx + +from ..config import get_settings + + +class OpenRouterError(RuntimeError): + """Raised when the API returns an error response.""" + + +def _headers(*, api_key: Optional[str] = None) -> Dict[str, str]: + settings = get_settings() + key = (api_key or settings.api_key or "").strip() + if not key: + raise OpenRouterError("Missing API key") + + headers = { + "Authorization": f"Bearer {key}", + "Content-Type": "application/json", + "Accept": "application/json", + } + + return headers + + +def _build_messages(messages: List[Dict[str, str]], system: Optional[str]) -> List[Dict[str, str]]: + if system: + return [{"role": "system", "content": system}, *messages] + return messages + + +def _handle_response_error(exc: httpx.HTTPStatusError) -> None: + response = exc.response + detail: str + try: + payload = response.json() + detail = payload.get("error") or payload.get("message") or json.dumps(payload) + except Exception: + detail = response.text + raise OpenRouterError(f"API request failed ({response.status_code}): {detail}") from exc + + +async def request_chat_completion( + *, + model: str, + messages: List[Dict[str, str]], + system: Optional[str] = None, + api_key: Optional[str] = None, + tools: Optional[List[Dict[str, Any]]] = None, + base_url: Optional[str] = None, +) -> Dict[str, Any]: + """Request a chat completion and return the raw JSON payload.""" + + settings = get_settings() + base_url = base_url or settings.api_base_url + + payload: Dict[str, object] = { + "model": model, + "messages": _build_messages(messages, system), + "stream": False, + } + if tools: + payload["tools"] = tools + + url = f"{base_url.rstrip('/')}/chat/completions" + + async with httpx.AsyncClient() as client: + try: + response = await client.post( + url, + headers=_headers(api_key=api_key), + json=payload, + timeout=60.0, # Set reasonable timeout instead of None + ) + try: + response.raise_for_status() + except httpx.HTTPStatusError as exc: + _handle_response_error(exc) + return response.json() + except httpx.HTTPStatusError as exc: # pragma: no cover - handled above + _handle_response_error(exc) + except httpx.HTTPError as exc: + raise OpenRouterError(f"API request failed: {exc}") from exc + + raise OpenRouterError("API request failed: unknown error") + + +__all__ = ["OpenRouterError", "request_chat_completion"] diff --git a/server/requirements.txt b/server/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..2c9a675dad780e04f826a662cc676fad32bbfee0 --- /dev/null +++ b/server/requirements.txt @@ -0,0 +1,7 @@ +fastapi>=0.115.0 +uvicorn[standard]>=0.30.0 +pydantic>=2.7.0 +httpx>=0.27.0 +python-dateutil>=2.9.0 +beautifulsoup4>=4.12.0 +composio>=0.5.0 diff --git a/server/routes/__init__.py b/server/routes/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..07bed6cf96b9c8768f39d5fca93dd81670aba4fd --- /dev/null +++ b/server/routes/__init__.py @@ -0,0 +1,14 @@ +from __future__ import annotations + +from fastapi import APIRouter + +from .chat import router as chat_router +from .gmail import router as gmail_router +from .meta import router as meta_router + +api_router = APIRouter(prefix="/api/v1") +api_router.include_router(meta_router) +api_router.include_router(chat_router) +api_router.include_router(gmail_router) + +__all__ = ["api_router"] diff --git a/server/routes/chat.py b/server/routes/chat.py new file mode 100644 index 0000000000000000000000000000000000000000..0ed5bad5ca0321a7329508022af9e0902e8a944f --- /dev/null +++ b/server/routes/chat.py @@ -0,0 +1,48 @@ +from fastapi import APIRouter +from fastapi.responses import JSONResponse + +from ..models import ChatHistoryClearResponse, ChatHistoryResponse, ChatRequest +from ..services import get_conversation_log, get_trigger_service, handle_chat_request + +router = APIRouter(prefix="/chat", tags=["chat"]) + + +@router.post("/send", response_class=JSONResponse, summary="Submit a chat message and receive a completion") +# Handle incoming chat messages and route them to the interaction agent +async def chat_send( + payload: ChatRequest, +) -> JSONResponse: + return await handle_chat_request(payload) + + +@router.get("/history", response_model=ChatHistoryResponse) +# Retrieve the conversation history from the log +def chat_history() -> ChatHistoryResponse: + log = get_conversation_log() + return ChatHistoryResponse(messages=log.to_chat_messages()) + + +@router.delete("/history", response_model=ChatHistoryClearResponse) +def clear_history() -> ChatHistoryClearResponse: + from ..services import get_execution_agent_logs, get_agent_roster + + # Clear conversation log + log = get_conversation_log() + log.clear() + + # Clear execution agent logs + execution_logs = get_execution_agent_logs() + execution_logs.clear_all() + + # Clear agent roster + roster = get_agent_roster() + roster.clear() + + # Clear stored triggers + trigger_service = get_trigger_service() + trigger_service.clear_all() + + return ChatHistoryClearResponse() + + +__all__ = ["router"] diff --git a/server/routes/gmail.py b/server/routes/gmail.py new file mode 100644 index 0000000000000000000000000000000000000000..f12b4822e4ba047661100ee68a25675b297c0f93 --- /dev/null +++ b/server/routes/gmail.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +from fastapi import APIRouter, Depends +from fastapi.responses import JSONResponse + +from ..config import Settings, get_settings +from ..models import GmailConnectPayload, GmailDisconnectPayload, GmailStatusPayload +from ..services import disconnect_account, fetch_status, initiate_connect + +router = APIRouter(prefix="/gmail", tags=["gmail"]) + + +@router.post("/connect") +# Initiate Gmail OAuth connection flow through Composio +async def gmail_connect(payload: GmailConnectPayload, settings: Settings = Depends(get_settings)) -> JSONResponse: + return initiate_connect(payload, settings) + + +@router.post("/status") +# Check the current Gmail connection status and user information +async def gmail_status(payload: GmailStatusPayload) -> JSONResponse: + return fetch_status(payload) + + +@router.post("/disconnect") +# Disconnect Gmail account and clear cached profile data +async def gmail_disconnect(payload: GmailDisconnectPayload) -> JSONResponse: + return disconnect_account(payload) diff --git a/server/routes/meta.py b/server/routes/meta.py new file mode 100644 index 0000000000000000000000000000000000000000..dc5425da567054bed8ed447e0d3e217bd49a9ec0 --- /dev/null +++ b/server/routes/meta.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +from fastapi import APIRouter, Depends, HTTPException, Request, status + +from ..config import Settings, get_settings +from ..models import ( + HealthResponse, + RootResponse, + SetTimezoneRequest, + SetTimezoneResponse, +) +from ..services import get_timezone_store + +router = APIRouter(tags=["meta"]) + +@router.get("/health", response_model=HealthResponse) +# Return service health status for monitoring and load balancers +def health(settings: Settings = Depends(get_settings)) -> HealthResponse: + return HealthResponse(ok=True, service="openpoke", version=settings.app_version) + + +@router.get("/meta", response_model=RootResponse) +# Return service metadata including available API endpoints +def meta(request: Request, settings: Settings = Depends(get_settings)) -> RootResponse: + endpoints = sorted( + { + route.path + for route in request.app.routes + if getattr(route, "include_in_schema", False) and route.path.startswith("/api/") + } + ) + return RootResponse( + status="ok", + service="openpoke", + version=settings.app_version, + endpoints=endpoints, + ) + + +@router.post("/meta/timezone", response_model=SetTimezoneResponse) +# Set the user's timezone for proper email timestamp formatting +def set_timezone(payload: SetTimezoneRequest) -> SetTimezoneResponse: + store = get_timezone_store() + try: + store.set_timezone(payload.timezone) + except ValueError as exc: + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) + return SetTimezoneResponse(timezone=store.get_timezone()) + + +@router.get("/meta/timezone", response_model=SetTimezoneResponse) +def get_timezone() -> SetTimezoneResponse: + store = get_timezone_store() + return SetTimezoneResponse(timezone=store.get_timezone()) diff --git a/server/server.py b/server/server.py new file mode 100644 index 0000000000000000000000000000000000000000..718580ab7709eab07dc6e571407e23259975d432 --- /dev/null +++ b/server/server.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python3 +"""CLI entrypoint for running the FastAPI app with Uvicorn.""" + +import argparse +import logging + +import uvicorn + +from .app import app +from .config import get_settings + + +def main() -> None: + settings = get_settings() + default_host = settings.server_host + default_port = settings.server_port + + parser = argparse.ArgumentParser(description="OpenPoke FastAPI server") + parser.add_argument("--host", default=default_host, help=f"Host to bind (default: {default_host})") + parser.add_argument("--port", type=int, default=default_port, help=f"Port to bind (default: {default_port})") + parser.add_argument("--reload", action="store_true", help="Enable auto-reload for development") + args = parser.parse_args() + + # Reduce uvicorn access log noise - only show warnings and errors + logging.getLogger("uvicorn.access").setLevel(logging.WARNING) + logging.getLogger("uvicorn").setLevel(logging.INFO) + # Reduce watchfiles noise during development + logging.getLogger("watchfiles.main").setLevel(logging.WARNING) + + if args.reload: + # For reload mode, use import string + uvicorn.run( + "server.app:app", + host=args.host, + port=args.port, + reload=args.reload, + log_level="info", + access_log=False, # Disable access logs completely for cleaner output + ) + else: + # For production mode, use app object directly + uvicorn.run( + app, + host=args.host, + port=args.port, + reload=args.reload, + log_level="info", + access_log=False, # Disable access logs completely for cleaner output + ) + + +if __name__ == "__main__": # pragma: no cover - CLI invocation guard + main() diff --git a/server/services/__init__.py b/server/services/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..64421b7312e5dc39f0e2a5a9a8594c7ee6dd0625 --- /dev/null +++ b/server/services/__init__.py @@ -0,0 +1,52 @@ +"""Service layer components.""" + +from .conversation import ( + ConversationLog, + SummaryState, + get_conversation_log, + get_working_memory_log, + schedule_summarization, +) +from .conversation.chat_handler import handle_chat_request +from .execution import AgentRoster, ExecutionAgentLogStore, get_agent_roster, get_execution_agent_logs +from .gmail import ( + GmailSeenStore, + ImportantEmailWatcher, + classify_email_importance, + disconnect_account, + execute_gmail_tool, + fetch_status, + get_active_gmail_user_id, + get_important_email_watcher, + initiate_connect, +) +from .trigger_scheduler import get_trigger_scheduler +from .triggers import get_trigger_service +from .timezone_store import TimezoneStore, get_timezone_store + + +__all__ = [ + "ConversationLog", + "SummaryState", + "handle_chat_request", + "get_conversation_log", + "get_working_memory_log", + "schedule_summarization", + "AgentRoster", + "ExecutionAgentLogStore", + "get_agent_roster", + "get_execution_agent_logs", + "GmailSeenStore", + "ImportantEmailWatcher", + "classify_email_importance", + "disconnect_account", + "execute_gmail_tool", + "fetch_status", + "get_active_gmail_user_id", + "get_important_email_watcher", + "initiate_connect", + "get_trigger_scheduler", + "get_trigger_service", + "TimezoneStore", + "get_timezone_store", +] diff --git a/server/services/conversation/__init__.py b/server/services/conversation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..cbefd5e24c7046a6e2280669d27a006139f35d9b --- /dev/null +++ b/server/services/conversation/__init__.py @@ -0,0 +1,12 @@ +"""Conversation-related service helpers.""" + +from .log import ConversationLog, get_conversation_log +from .summarization import SummaryState, get_working_memory_log, schedule_summarization + +__all__ = [ + "ConversationLog", + "get_conversation_log", + "SummaryState", + "get_working_memory_log", + "schedule_summarization", +] diff --git a/server/services/conversation/chat_handler.py b/server/services/conversation/chat_handler.py new file mode 100644 index 0000000000000000000000000000000000000000..f8db5171b85a38303cfe71bae97ec252a3dd6eb8 --- /dev/null +++ b/server/services/conversation/chat_handler.py @@ -0,0 +1,49 @@ +import asyncio +from typing import Optional, Union + +from fastapi import status +from fastapi.responses import JSONResponse, PlainTextResponse + +from ...agents.interaction_agent.runtime import InteractionAgentRuntime +from ...logging_config import logger +from ...models import ChatMessage, ChatRequest +from ...utils import error_response + + +# Extract the most recent user message from the chat request payload +def _extract_latest_user_message(payload: ChatRequest) -> Optional[ChatMessage]: + for message in reversed(payload.messages): + if message.role.lower().strip() == "user" and message.content.strip(): + return message + return None + + +# Process incoming chat requests by routing them to the interaction agent runtime +async def handle_chat_request(payload: ChatRequest) -> Union[PlainTextResponse, JSONResponse]: + """Handle a chat request using the InteractionAgentRuntime.""" + + # Extract user message + user_message = _extract_latest_user_message(payload) + if user_message is None: + return error_response("Missing user message", status_code=status.HTTP_400_BAD_REQUEST) + + user_content = user_message.content.strip() # Already checked in _extract_latest_user_message + + logger.info("chat request", extra={"message_length": len(user_content)}) + + try: + runtime = InteractionAgentRuntime() + except ValueError as ve: + # Missing API key error + logger.error("configuration error", extra={"error": str(ve)}) + return error_response(str(ve), status_code=status.HTTP_400_BAD_REQUEST) + + async def _run_interaction() -> None: + try: + await runtime.execute(user_message=user_content) + except Exception as exc: # pragma: no cover - defensive + logger.error("chat task failed", extra={"error": str(exc)}) + + asyncio.create_task(_run_interaction()) + + return PlainTextResponse("", status_code=status.HTTP_202_ACCEPTED) diff --git a/server/services/conversation/log.py b/server/services/conversation/log.py new file mode 100644 index 0000000000000000000000000000000000000000..333dc9e8de23812992a3348bed9a06a10d638691 --- /dev/null +++ b/server/services/conversation/log.py @@ -0,0 +1,221 @@ +from __future__ import annotations + +import re +import threading +from html import escape, unescape +from pathlib import Path +from typing import Dict, Iterator, List, Optional, Protocol, Tuple + +from ...config import get_settings +from ...logging_config import logger +from ...models import ChatMessage +from ...utils.timezones import now_in_user_timezone +from typing import TYPE_CHECKING + +if TYPE_CHECKING: # pragma: no cover - used for type checkers only + from .summarization import WorkingMemoryLog + + +_DATA_DIR = Path(__file__).resolve().parent.parent.parent / "data" +_CONVERSATION_LOG_PATH = _DATA_DIR / "conversation" / "poke_conversation.log" + + +class TranscriptFormatter(Protocol): + def __call__(self, tag: str, timestamp: str, payload: str) -> str: # pragma: no cover - typing protocol + ... + + +def _encode_payload(payload: str) -> str: + normalized = payload.replace("\r\n", "\n").replace("\r", "\n") + collapsed = normalized.replace("\n", "\\n") + return escape(collapsed, quote=False) + + +def _decode_payload(payload: str) -> str: + return unescape(payload).replace("\\n", "\n") + + +def _default_formatter(tag: str, timestamp: str, payload: str) -> str: + encoded = _encode_payload(payload) + return f"<{tag} timestamp=\"{timestamp}\">{encoded}\n" + + +def _resolve_working_memory_log() -> "WorkingMemoryLog": + from .summarization import get_working_memory_log + + return get_working_memory_log() + + +_ATTR_PATTERN = re.compile(r"(\w+)\s*=\s*\"([^\"]*)\"") + + +class ConversationLog: + """Append-only conversation log persisted to disk for the interaction agent.""" + + def __init__(self, path: Path, formatter: TranscriptFormatter = _default_formatter): + self._path = path + self._formatter = formatter + self._lock = threading.Lock() + self._ensure_directory() + self._working_memory_log = _resolve_working_memory_log() + + def _ensure_directory(self) -> None: + try: + self._path.parent.mkdir(parents=True, exist_ok=True) + except Exception as exc: # pragma: no cover - defensive + logger.warning("conversation log directory creation failed", extra={"error": str(exc)}) + + def _append(self, tag: str, payload: str) -> str: + timestamp = now_in_user_timezone("%Y-%m-%d %H:%M:%S") + entry = self._formatter(tag, timestamp, str(payload)) + with self._lock: + try: + with self._path.open("a", encoding="utf-8") as handle: + handle.write(entry) + except Exception as exc: # pragma: no cover - defensive + logger.error( + "conversation log append failed", + extra={"error": str(exc), "tag": tag, "path": str(self._path)}, + ) + raise + self._notify_summarization() + return timestamp + + def _parse_line(self, line: str) -> Optional[Tuple[str, str, str]]: + stripped = line.strip() + if not stripped.startswith("<") or "") + if open_end == -1: + return None + open_tag_content = stripped[1:open_end] + if " " in open_tag_content: + tag, attr_string = open_tag_content.split(" ", 1) + else: + tag, attr_string = open_tag_content, "" + close_start = stripped.rfind("") + if close_start == -1 or close_end == -1: + return None + closing_tag = stripped[close_start + 2 : close_end] + if closing_tag != tag: + return None + payload = stripped[open_end + 1 : close_start] + attributes: Dict[str, str] = { + match.group(1): match.group(2) for match in _ATTR_PATTERN.finditer(attr_string) + } + timestamp = attributes.get("timestamp", "") + return tag, timestamp, _decode_payload(payload) + + def iter_entries(self) -> Iterator[Tuple[str, str, str]]: + with self._lock: + try: + lines = self._path.read_text(encoding="utf-8").splitlines() + except FileNotFoundError: + lines = [] + except Exception as exc: # pragma: no cover - defensive + logger.error( + "conversation log read failed", extra={"error": str(exc), "path": str(self._path)} + ) + raise + for line in lines: + item = self._parse_line(line) + if item is not None: + yield item + + def load_transcript(self) -> str: + parts: List[str] = [] + for tag, timestamp, payload in self.iter_entries(): + safe_payload = escape(payload, quote=False) + if timestamp: + parts.append(f"<{tag} timestamp=\"{timestamp}\">{safe_payload}") + else: + parts.append(f"<{tag}>{safe_payload}") + return "\n".join(parts) + + def record_user_message(self, content: str) -> None: + timestamp = self._append("user_message", content) + self._working_memory_log.append_entry("user_message", content, timestamp) + + def record_agent_message(self, content: str) -> None: + timestamp = self._append("agent_message", content) + self._working_memory_log.append_entry("agent_message", content, timestamp) + + def record_reply(self, content: str) -> None: + timestamp = self._append("poke_reply", content) + self._working_memory_log.append_entry("poke_reply", content, timestamp) + + def record_wait(self, reason: str) -> None: + """Record a wait marker that should not reach the user-facing chat history.""" + timestamp = self._append("wait", reason) + self._working_memory_log.append_entry("wait", reason, timestamp) + + def _notify_summarization(self) -> None: + settings = get_settings() + if not settings.summarization_enabled: + return + + try: + from .summarization import schedule_summarization # type: ignore import-not-found + except Exception as exc: # pragma: no cover - defensive + logger.debug( + "summarization scheduler unavailable", + extra={"error": str(exc)}, + ) + return + + try: + schedule_summarization() + except Exception as exc: # pragma: no cover - defensive + logger.warning( + "failed to schedule summarization", + extra={"error": str(exc)}, + ) + + def to_chat_messages(self) -> List[ChatMessage]: + messages: List[ChatMessage] = [] + for tag, timestamp, payload in self.iter_entries(): + normalized_timestamp = timestamp or None + if tag == "user_message": + messages.append( + ChatMessage(role="user", content=payload, timestamp=normalized_timestamp) + ) + elif tag == "poke_reply": + messages.append( + ChatMessage( + role="assistant", content=payload, timestamp=normalized_timestamp + ) + ) + elif tag == "wait": + # Wait markers are orchestration metadata and must not surface to the user + continue + return messages + + def clear(self) -> None: + with self._lock: + try: + if self._path.exists(): + self._path.unlink() + except Exception as exc: # pragma: no cover - defensive + logger.warning( + "conversation log clear failed", extra={"error": str(exc), "path": str(self._path)} + ) + finally: + self._ensure_directory() + try: + self._working_memory_log.clear() + except Exception as exc: # pragma: no cover - defensive + logger.debug( + "working memory clear skipped", + extra={"error": str(exc)}, + ) + + +_conversation_log = ConversationLog(_CONVERSATION_LOG_PATH) + + +def get_conversation_log() -> ConversationLog: + return _conversation_log + + +__all__ = ["ConversationLog", "get_conversation_log"] diff --git a/server/services/conversation/summarization/__init__.py b/server/services/conversation/summarization/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3656dcf563e5b56632a178e4af7b837a6c8faf15 --- /dev/null +++ b/server/services/conversation/summarization/__init__.py @@ -0,0 +1,11 @@ +"""Summarization service package.""" + +from .working_memory_log import get_working_memory_log +from .scheduler import schedule_summarization +from .state import SummaryState + +__all__ = [ + "get_working_memory_log", + "schedule_summarization", + "SummaryState", +] diff --git a/server/services/conversation/summarization/prompt_builder.py b/server/services/conversation/summarization/prompt_builder.py new file mode 100644 index 0000000000000000000000000000000000000000..bf4143587ad5943e7bb719a77469ced280f103a9 --- /dev/null +++ b/server/services/conversation/summarization/prompt_builder.py @@ -0,0 +1,94 @@ +from __future__ import annotations + +from dataclasses import dataclass +from textwrap import dedent +from typing import Dict, List + +from .state import LogEntry + + +@dataclass(frozen=True) +class SummaryPrompt: + system_prompt: str + messages: List[Dict[str, str]] + + +_SYSTEM_PROMPT = dedent( + """ + You are the assistant's chief-of-staff memory curator. Produce a complete working-memory briefing + that the assistant reads before every response. Follow these directives without exception: + + FORMAT β€” Always output using this exact structure (replace angle brackets with content): + + Summary generated: (user timezone) + + Timeline & Commitments: + - β€” . Include participants, location, objective, + required deliverables, and current status (confirmed / pending / awaiting response) in chronological order. + + Pending & Follow-ups: + - β€” . Specify owner, status, next step, blockers, + and any tracking IDs, links, budgets, or artefacts mentioned. + + Routines & Recurring: + - β€” . Note fulfilment channel, + lead times, budgets, or escalation rules if provided. + + Preferences & Profile: + - . Capture formats, brands, dietary needs, + communication styles, scheduling windows, or other personalization cues. + + Context & Notes: + - that informs future decisions and + does not belong in earlier sections. + + If a section has no content, output a single bullet "- No items." + + RULES β€” Obey all of these simultaneously: + 1. Rebuild the entire briefing from scratch on every run; never append or partially edit prior text. + 2. Merge new actionable information while retaining still-relevant facts from the previous summary. + 3. Remove items that are complete or obsolete unless the logs explicitly keep them active. + 4. Convert every relative time phrase (today, tomorrow, next week, tonight, etc.) into explicit + YYYY-MM-DD (and HH:MM when known) timestamps in the user's timezone. + 5. Include all salient details for people, locations, deliverables, tools, identifiers, budgets, and links + whenever they appear in the logs. + 6. Order bullets earliest-first within each section and keep language concise yet information-dense. + 7. Do not invent facts; only use information present in the existing summary or new logs. + """ +).strip() + + +def _format_existing_summary(previous_summary: str) -> str: + summary = (previous_summary or "").strip() + return summary if summary else "None" + + +def _format_log_entries(entries: List[LogEntry]) -> str: + lines: List[str] = [] + for entry in entries: + label = entry.tag.replace("_", " ") + payload = entry.payload.strip() + index = entry.index if entry.index >= 0 else "?" + if payload: + lines.append(f"[{index}] {label}: {payload}") + else: + lines.append(f"[{index}] {label}: (empty)") + return "\n".join(lines) if lines else "(no new logs)" + + +def build_summarization_prompt(previous_summary: str, entries: List[LogEntry]) -> SummaryPrompt: + content = dedent( + f""" + Existing memory summary: + {_format_existing_summary(previous_summary)} + + New conversation logs to merge: + {_format_log_entries(entries)} + """ + ).strip() + + messages = [{"role": "user", "content": content}] + return SummaryPrompt(system_prompt=_SYSTEM_PROMPT, messages=messages) + + +__all__ = ["SummaryPrompt", "build_summarization_prompt"] diff --git a/server/services/conversation/summarization/scheduler.py b/server/services/conversation/summarization/scheduler.py new file mode 100644 index 0000000000000000000000000000000000000000..959bcce9e27483bab1d4179007ceb3c7de29a0d7 --- /dev/null +++ b/server/services/conversation/summarization/scheduler.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +import asyncio + +from ....logging_config import logger +from .summarizer import summarize_conversation + +_pending = False +_running = False + + +def schedule_summarization() -> None: + """Schedule a background summarization pass if not already queued.""" + global _pending + _pending = True + try: + loop = asyncio.get_running_loop() + except RuntimeError: + logger.debug("summarization skipped (no running event loop)") + return + + if not _running: + loop.create_task(_run_worker()) + + +async def _run_worker() -> None: + global _pending, _running + if _running: + return + + _running = True + try: + while _pending: + _pending = False + try: + await summarize_conversation() + except Exception as exc: # pragma: no cover - defensive + logger.error( + "summarization worker failed", + extra={"error": str(exc)}, + ) + finally: + _running = False + + +__all__ = ["schedule_summarization"] diff --git a/server/services/conversation/summarization/state.py b/server/services/conversation/summarization/state.py new file mode 100644 index 0000000000000000000000000000000000000000..bc862f07b28d0de7a59a73fe8fe34508ddcdb1b7 --- /dev/null +++ b/server/services/conversation/summarization/state.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +from dataclasses import dataclass, field +from datetime import datetime +from typing import List, Optional + + +@dataclass(frozen=True) +class LogEntry: + """Snapshot of a single conversation log entry.""" + + tag: str + payload: str + index: int = -1 + timestamp: Optional[str] = None + + +@dataclass +class SummaryState: + """Persisted working-memory summary state.""" + + summary_text: str = "" + last_index: int = -1 + updated_at: Optional[datetime] = None + unsummarized_entries: List[LogEntry] = field(default_factory=list) + + @classmethod + def empty(cls) -> "SummaryState": + return cls() + + +__all__ = ["LogEntry", "SummaryState"] diff --git a/server/services/conversation/summarization/summarizer.py b/server/services/conversation/summarization/summarizer.py new file mode 100644 index 0000000000000000000000000000000000000000..17b5fd1cf52a8abe2aa3031193d3a11749e44e36 --- /dev/null +++ b/server/services/conversation/summarization/summarizer.py @@ -0,0 +1,135 @@ +from __future__ import annotations + +from datetime import datetime, timezone +from typing import List, Optional, TYPE_CHECKING + +from ....config import get_settings +from ....logging_config import logger +from ....openrouter_client import OpenRouterError, request_chat_completion +from .prompt_builder import SummaryPrompt, build_summarization_prompt +from .state import LogEntry, SummaryState +from .working_memory_log import get_working_memory_log + +if TYPE_CHECKING: # pragma: no cover - type checking only + from ..log import ConversationLog + + +def _resolve_conversation_log() -> "ConversationLog": + from ..log import get_conversation_log + + return get_conversation_log() + + +def _collect_entries(log) -> List[LogEntry]: + entries: List[LogEntry] = [] + for index, (tag, timestamp, payload) in enumerate(log.iter_entries()): + entries.append(LogEntry(tag=tag, payload=payload, index=index, timestamp=timestamp or None)) + return entries + + +async def _call_openrouter(prompt: SummaryPrompt, model: str, api_key: Optional[str]) -> str: + last_error: Exception | None = None + for attempt in range(2): + try: + response = await request_chat_completion( + model=model, + messages=prompt.messages, + system=prompt.system_prompt, + api_key=api_key, + ) + choices = response.get("choices") or [] + if not choices: + raise OpenRouterError("API response missing choices") + message = choices[0].get("message") or {} + content = (message.get("content") or "").strip() + if content: + return content + raise OpenRouterError("API response missing content") + except OpenRouterError as exc: + last_error = exc + if attempt == 0: + logger.warning( + "conversation summarization attempt failed; retrying", + extra={"error": str(exc)}, + ) + continue + logger.error( + "conversation summarization failed", + extra={"error": str(exc)}, + ) + break + except Exception as exc: # pragma: no cover - defensive + last_error = exc + logger.error( + "conversation summarization unexpected failure", + extra={"error": str(exc)}, + ) + break + if last_error: + raise last_error + raise OpenRouterError("Conversation summarization failed") + + +async def summarize_conversation() -> bool: + settings = get_settings() + if not settings.summarization_enabled: + return False + + conversation_log = _resolve_conversation_log() + working_memory_log = get_working_memory_log() + + entries = _collect_entries(conversation_log) + state = working_memory_log.load_summary_state() + + threshold = settings.conversation_summary_threshold + tail_size = max(settings.conversation_summary_tail_size, 0) + + if threshold <= 0: + return False + + unsummarized_entries = [entry for entry in entries if entry.index > state.last_index] + if len(unsummarized_entries) < threshold + tail_size: + return False + + batch = unsummarized_entries[:threshold] + cutoff_index = batch[-1].index + + prompt = build_summarization_prompt(state.summary_text, batch) + + logger.info( + "conversation summarization started", + extra={ + "entries_total": len(entries), + "unsummarized": len(unsummarized_entries), + "batch_size": len(batch), + "last_index_before": state.last_index, + "cutoff_index": cutoff_index, + }, + ) + + summary_text = await _call_openrouter(prompt, settings.summarizer_model, settings.api_key) + summary_body = summary_text if summary_text else state.summary_text + + refreshed_entries = _collect_entries(conversation_log) + remaining_entries = [entry for entry in refreshed_entries if entry.index > cutoff_index] + + new_state = SummaryState( + summary_text=summary_body, + last_index=cutoff_index, + updated_at=datetime.now(timezone.utc), + unsummarized_entries=remaining_entries, + ) + + working_memory_log.write_summary_state(new_state) + + logger.info( + "conversation summarization completed", + extra={ + "last_index_after": new_state.last_index, + "remaining_unsummarized": len(new_state.unsummarized_entries), + }, + ) + return True + + +__all__ = ["summarize_conversation"] diff --git a/server/services/conversation/summarization/working_memory_log.py b/server/services/conversation/summarization/working_memory_log.py new file mode 100644 index 0000000000000000000000000000000000000000..02ac29c4d5385b8a6e62eb9b3582f819b638633a --- /dev/null +++ b/server/services/conversation/summarization/working_memory_log.py @@ -0,0 +1,256 @@ +from __future__ import annotations + +import json +import re +import threading +from datetime import datetime +from html import escape, unescape +from pathlib import Path +from typing import List, Optional, Tuple + +from ....logging_config import logger +from ....utils.timezones import now_in_user_timezone +from .state import LogEntry, SummaryState + + +_DATA_DIR = Path(__file__).resolve().parent.parent.parent.parent / "data" +_WORKING_MEMORY_LOG_PATH = _DATA_DIR / "conversation" / "poke_working_memory.log" + + +def _encode_payload(payload: str) -> str: + normalized = payload.replace("\r\n", "\n").replace("\r", "\n") + collapsed = normalized.replace("\n", "\\n") + return escape(collapsed, quote=False) + + +def _decode_payload(payload: str) -> str: + return unescape(payload).replace("\\n", "\n") + + +def _format_line(tag: str, payload: str, timestamp: Optional[str] = None) -> str: + encoded = _encode_payload(payload) + if timestamp: + return f"<{tag} timestamp=\"{timestamp}\">{encoded}\n" + return f"<{tag}>{encoded}\n" + + +def _current_timestamp() -> str: + return now_in_user_timezone("%Y-%m-%d %H:%M:%S") + + +class WorkingMemoryLog: + """Persisted working-memory file storing conversation summary and recent entries.""" + + def __init__(self, path: Path) -> None: + self._path = path + self._lock = threading.Lock() + self._ensure_directory() + self._initialize_file() + + def _ensure_directory(self) -> None: + try: + self._path.parent.mkdir(parents=True, exist_ok=True) + except Exception as exc: # pragma: no cover - defensive + logger.warning( + "working memory directory creation failed", + extra={"error": str(exc), "path": str(self._path)}, + ) + + def _initialize_file(self) -> None: + with self._lock: + self._initialize_file_locked() + + def _initialize_file_locked(self) -> None: + if self._path.exists() and self._path.stat().st_size > 0: + return + initial_state = SummaryState.empty() + lines = [ + _format_line( + "summary_info", + json.dumps({"last_index": initial_state.last_index, "updated_at": None}), + ), + _format_line("conversation_summary", ""), + ] + try: + self._path.write_text("".join(lines), encoding="utf-8") + except Exception as exc: # pragma: no cover - defensive + logger.error( + "working memory initialization failed", + extra={"error": str(exc), "path": str(self._path)}, + ) + raise + + def append_entry(self, tag: str, payload: str, timestamp: Optional[str] = None) -> None: + sanitized_timestamp = timestamp or _current_timestamp() + line = _format_line(tag, str(payload), sanitized_timestamp) + with self._lock: + try: + with self._path.open("a", encoding="utf-8") as handle: + handle.write(line) + except Exception as exc: # pragma: no cover - defensive + logger.error( + "working memory append failed", + extra={"error": str(exc), "tag": tag, "path": str(self._path)}, + ) + raise + + def load_summary_state(self) -> SummaryState: + with self._lock: + try: + lines = self._path.read_text(encoding="utf-8").splitlines() + except FileNotFoundError: + return SummaryState.empty() + except Exception as exc: # pragma: no cover - defensive + logger.error( + "working memory read failed", + extra={"error": str(exc), "path": str(self._path)}, + ) + return SummaryState.empty() + + summary_text = "" + last_index = -1 + updated_at: Optional[datetime] = None + entries: List[LogEntry] = [] + + for raw_line in lines: + parsed = self._parse_line(raw_line) + if parsed is None: + continue + tag, timestamp, payload = parsed + if tag == "summary_info": + try: + data = json.loads(payload) + except json.JSONDecodeError: + continue + last_index_val = data.get("last_index") + if isinstance(last_index_val, int): + last_index = last_index_val + updated_raw = data.get("updated_at") + if isinstance(updated_raw, str) and updated_raw: + try: + updated_at = datetime.fromisoformat(updated_raw) + except ValueError: + updated_at = None + elif tag == "conversation_summary": + summary_text = payload + else: + entries.append( + LogEntry(tag=tag, payload=payload, timestamp=timestamp or None) + ) + + state = SummaryState( + summary_text=summary_text, + last_index=last_index, + updated_at=updated_at, + unsummarized_entries=entries, + ) + return state + + def write_summary_state(self, state: SummaryState) -> None: + meta_payload = json.dumps( + { + "last_index": state.last_index, + "updated_at": state.updated_at.isoformat() if state.updated_at else None, + } + ) + + lines = [_format_line("summary_info", meta_payload)] + lines.append(_format_line("conversation_summary", state.summary_text or "")) + for entry in state.unsummarized_entries: + lines.append(_format_line(entry.tag, entry.payload, entry.timestamp)) + + temp_path = self._path.with_suffix(".tmp") + data = "".join(lines) + with self._lock: + try: + temp_path.write_text(data, encoding="utf-8") + temp_path.replace(self._path) + except Exception as exc: # pragma: no cover - defensive + logger.error( + "working memory write failed", + extra={"error": str(exc), "path": str(self._path)}, + ) + raise + finally: + if temp_path.exists(): + try: + temp_path.unlink() + except Exception: # pragma: no cover - defensive cleanup + pass + + def render_transcript(self, state: Optional[SummaryState] = None) -> str: + snapshot = state or self.load_summary_state() + parts: List[str] = [] + + summary_text = (snapshot.summary_text or "").strip() + if summary_text: + safe_summary = escape(summary_text, quote=False) + parts.append(f"{safe_summary}") + + for entry in snapshot.unsummarized_entries: + safe_payload = escape(entry.payload, quote=False) + if entry.timestamp: + parts.append( + f'<{entry.tag} timestamp="{entry.timestamp}">{safe_payload}' + ) + else: + parts.append(f'<{entry.tag}>{safe_payload}') + + return '\n'.join(parts) + + def clear(self) -> None: + with self._lock: + try: + if self._path.exists(): + self._path.unlink() + except Exception as exc: # pragma: no cover - defensive + logger.warning( + "working memory clear failed", + extra={"error": str(exc), "path": str(self._path)}, + ) + finally: + self._ensure_directory() + self._initialize_file_locked() + + def _parse_line(self, line: str) -> Optional[Tuple[str, Optional[str], str]]: + stripped = line.strip() + if not stripped.startswith("<") or "") + if open_end == -1: + return None + open_tag_content = stripped[1:open_end] + if " " in open_tag_content: + tag, attr_string = open_tag_content.split(" ", 1) + else: + tag, attr_string = open_tag_content, "" + close_start = stripped.rfind("") + if close_start == -1 or close_end == -1: + return None + closing_tag = stripped[close_start + 2 : close_end] + if closing_tag != tag: + return None + payload = stripped[open_end + 1 : close_start] + timestamp = None + if attr_string: + match = re.search(r'timestamp="([^"]*)"', attr_string) + if match: + timestamp = match.group(1) + return tag, timestamp, _decode_payload(payload) + + +_working_memory_log: Optional[WorkingMemoryLog] = None +_factory_lock = threading.Lock() + + +def get_working_memory_log() -> WorkingMemoryLog: + global _working_memory_log + if _working_memory_log is None: + with _factory_lock: + if _working_memory_log is None: + _working_memory_log = WorkingMemoryLog(_WORKING_MEMORY_LOG_PATH) + return _working_memory_log + + +__all__ = ["WorkingMemoryLog", "get_working_memory_log"] diff --git a/server/services/execution/__init__.py b/server/services/execution/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4424039d48faaeb4ef5dd0a0e27cc6f5d0644523 --- /dev/null +++ b/server/services/execution/__init__.py @@ -0,0 +1,11 @@ +"""Execution agent support services.""" + +from .log_store import ExecutionAgentLogStore, get_execution_agent_logs +from .roster import AgentRoster, get_agent_roster + +__all__ = [ + "ExecutionAgentLogStore", + "get_execution_agent_logs", + "AgentRoster", + "get_agent_roster", +] diff --git a/server/services/execution/log_store.py b/server/services/execution/log_store.py new file mode 100644 index 0000000000000000000000000000000000000000..dee935ec24e8abcafd2f73a8d235a116aa815a7c --- /dev/null +++ b/server/services/execution/log_store.py @@ -0,0 +1,184 @@ +"""Execution agent log management with structured XML-style tags.""" + +from __future__ import annotations + +import re +import threading +from html import escape, unescape +from pathlib import Path +from typing import Dict, Iterator, List, Tuple + +from ...logging_config import logger +from ...utils.timezones import now_in_user_timezone + + +_DATA_DIR = Path(__file__).resolve().parent.parent.parent / "data" +_EXECUTION_LOG_DIR = _DATA_DIR / "execution_agents" + + +def _slugify(name: str) -> str: + """Convert agent name to filesystem-safe slug.""" + slug = "".join(ch.lower() if ch.isalnum() else "-" for ch in name.strip()).strip("-") + while "--" in slug: + slug = slug.replace("--", "-") + return slug or "agent" + + +def _encode_payload(payload: str) -> str: + """Encode payload for storage.""" + normalized = payload.replace("\r\n", "\n").replace("\r", "\n") + collapsed = normalized.replace("\n", "\\n") + return escape(collapsed, quote=False) + + +def _decode_payload(payload: str) -> str: + """Decode payload from storage.""" + return unescape(payload).replace("\\n", "\n") + + +_ATTR_PATTERN = re.compile(r"(\w+)\s*=\s*\"([^\"]*)\"") + + +class ExecutionAgentLogStore: + """Append-only journal for execution agents with XML-style tags.""" + + def __init__(self, base_dir: Path): + self._base_dir = base_dir + self._locks: dict[str, threading.Lock] = {} + self._global_lock = threading.Lock() + self._ensure_directory() + + def _ensure_directory(self) -> None: + try: + self._base_dir.mkdir(parents=True, exist_ok=True) + except Exception as exc: + logger.warning(f"Failed to create directory: {exc}") + + def _lock_for(self, agent_name: str) -> threading.Lock: + """Get or create a lock for an agent.""" + slug = _slugify(agent_name) + with self._global_lock: + if slug not in self._locks: + self._locks[slug] = threading.Lock() + return self._locks[slug] + + def _log_path(self, agent_name: str) -> Path: + """Get log file path for an agent.""" + return self._base_dir / f"{_slugify(agent_name)}.log" + + def _append(self, agent_name: str, tag: str, payload: str) -> None: + """Append an entry with the given tag.""" + encoded = _encode_payload(str(payload)) + timestamp = now_in_user_timezone("%Y-%m-%d %H:%M:%S") + entry = f"<{tag} timestamp=\"{timestamp}\">{encoded}\n" + + with self._lock_for(agent_name): + try: + with self._log_path(agent_name).open("a", encoding="utf-8") as handle: + handle.write(entry) + except Exception as exc: + logger.error(f"Failed to append to log: {exc}") + + def _parse_line(self, line: str) -> Optional[Tuple[str, str, str]]: + """Parse a single log line.""" + stripped = line.strip() + if not (stripped.startswith("<") and "") + close_start = stripped.rfind("") + + if open_end == -1 or close_start == -1 or close_end == -1: + return None + + open_tag_content = stripped[1:open_end] + if " " in open_tag_content: + tag, attr_string = open_tag_content.split(" ", 1) + else: + tag, attr_string = open_tag_content, "" + + closing_tag = stripped[close_start + 2 : close_end] + if closing_tag != tag: + return None + + attributes: Dict[str, str] = { + match.group(1): match.group(2) for match in _ATTR_PATTERN.finditer(attr_string) + } + timestamp = attributes.get("timestamp", "") + payload = _decode_payload(stripped[open_end + 1 : close_start]) + return tag, timestamp, payload + + def record_request(self, agent_name: str, instructions: str) -> None: + """Record an incoming request from the interaction agent.""" + self._append(agent_name, "agent_request", instructions) + + def record_action(self, agent_name: str, description: str) -> None: + """Record an agent action (tool call).""" + self._append(agent_name, "agent_action", description) + + def record_tool_response(self, agent_name: str, tool_name: str, response: str) -> None: + """Record the response from a tool.""" + self._append(agent_name, "tool_response", f"{tool_name}: {response}") + + def record_agent_response(self, agent_name: str, response: str) -> None: + """Record the agent's final response.""" + self._append(agent_name, "agent_response", response) + + def iter_entries(self, agent_name: str) -> Iterator[Tuple[str, str, str]]: + """Iterate over all log entries for an agent.""" + path = self._log_path(agent_name) + with self._lock_for(agent_name): + try: + lines = path.read_text(encoding="utf-8").splitlines() + except FileNotFoundError: + lines = [] + except Exception as exc: + logger.error(f"Failed to read log: {exc}") + lines = [] + + for line in lines: + parsed = self._parse_line(line) + if parsed is not None: + yield parsed + + def load_transcript(self, agent_name: str) -> str: + """Load the full transcript for inclusion in system prompt.""" + parts: List[str] = [] + for tag, timestamp, payload in self.iter_entries(agent_name): + escaped = escape(payload, quote=False) + if timestamp: + parts.append(f"<{tag} timestamp=\"{timestamp}\">{escaped}") + else: + parts.append(f"<{tag}>{escaped}") + return "\n".join(parts) + + def load_recent(self, agent_name: str, limit: int = 10) -> list[tuple[str, str, str]]: + """Load recent log entries.""" + entries = list(self.iter_entries(agent_name)) + return entries[-limit:] if entries else [] + + def list_agents(self) -> list[str]: + """List all agents with logs.""" + try: + return sorted(path.stem for path in self._base_dir.glob("*.log")) + except Exception as exc: + logger.error(f"Failed to list agents: {exc}") + return [] + + def clear_all(self) -> None: + """Clear all execution agent logs.""" + try: + for log_file in self._base_dir.glob("*.log"): + log_file.unlink() + logger.info("Cleared all execution agent logs") + except Exception as exc: + logger.error(f"Failed to clear execution logs: {exc}") + + +_execution_agent_logs = ExecutionAgentLogStore(_EXECUTION_LOG_DIR) + + +def get_execution_agent_logs() -> ExecutionAgentLogStore: + """Get the singleton log store instance.""" + return _execution_agent_logs diff --git a/server/services/execution/roster.py b/server/services/execution/roster.py new file mode 100644 index 0000000000000000000000000000000000000000..bcfbc911bb00e26213e8e754d0325c237ec0e45d --- /dev/null +++ b/server/services/execution/roster.py @@ -0,0 +1,92 @@ +"""Simple agent roster management - just a list of agent names.""" + +import json +import fcntl +import time +from pathlib import Path + +from ...logging_config import logger + + +class AgentRoster: + """Simple roster that stores agent names in a JSON file.""" + + def __init__(self, roster_path: Path): + self._roster_path = roster_path + self._agents: list[str] = [] + self.load() + + def load(self) -> None: + """Load agent names from roster.json.""" + if self._roster_path.exists(): + try: + with open(self._roster_path, 'r') as f: + data = json.load(f) + if isinstance(data, list): + self._agents = [str(name) for name in data] + except Exception as exc: + logger.warning(f"Failed to load roster.json: {exc}") + self._agents = [] + else: + self._agents = [] + self.save() + + def save(self) -> None: + """Save agent names to roster.json with file locking.""" + max_retries = 5 + retry_delay = 0.1 + + for attempt in range(max_retries): + try: + self._roster_path.parent.mkdir(parents=True, exist_ok=True) + + # Open file and acquire exclusive lock + with open(self._roster_path, 'w') as f: + fcntl.flock(f.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB) + try: + json.dump(self._agents, f, indent=2) + return + finally: + fcntl.flock(f.fileno(), fcntl.LOCK_UN) + + except BlockingIOError: + # Lock is held by another process + if attempt < max_retries - 1: + time.sleep(retry_delay) + retry_delay *= 2 # Exponential backoff + else: + logger.warning("Failed to acquire lock on roster.json after retries") + except Exception as exc: + logger.warning(f"Failed to save roster.json: {exc}") + break + + def add_agent(self, agent_name: str) -> None: + """Add an agent to the roster if not already present.""" + if agent_name not in self._agents: + self._agents.append(agent_name) + self.save() + + def get_agents(self) -> list[str]: + """Get list of all agent names.""" + return list(self._agents) + + def clear(self) -> None: + """Clear the agent roster.""" + self._agents = [] + try: + if self._roster_path.exists(): + self._roster_path.unlink() + logger.info("Cleared agent roster") + except Exception as exc: + logger.warning(f"Failed to clear roster.json: {exc}") + + +_DATA_DIR = Path(__file__).resolve().parent.parent.parent / "data" +_ROSTER_PATH = _DATA_DIR / "execution_agents" / "roster.json" + +_agent_roster = AgentRoster(_ROSTER_PATH) + + +def get_agent_roster() -> AgentRoster: + """Get the singleton roster instance.""" + return _agent_roster diff --git a/server/services/gmail/__init__.py b/server/services/gmail/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4d567e7ea4fa2f52de4a71bb2589d7cbd5edcaa4 --- /dev/null +++ b/server/services/gmail/__init__.py @@ -0,0 +1,28 @@ +"""Gmail-related service helpers.""" + +from .client import ( + disconnect_account, + execute_gmail_tool, + fetch_status, + get_active_gmail_user_id, + initiate_connect, +) +from .importance_classifier import classify_email_importance +from .importance_watcher import ImportantEmailWatcher, get_important_email_watcher +from .processing import EmailTextCleaner, ProcessedEmail, parse_gmail_fetch_response +from .seen_store import GmailSeenStore + +__all__ = [ + "execute_gmail_tool", + "fetch_status", + "initiate_connect", + "disconnect_account", + "get_active_gmail_user_id", + "classify_email_importance", + "ImportantEmailWatcher", + "get_important_email_watcher", + "EmailTextCleaner", + "ProcessedEmail", + "parse_gmail_fetch_response", + "GmailSeenStore", +] diff --git a/server/services/gmail/client.py b/server/services/gmail/client.py new file mode 100644 index 0000000000000000000000000000000000000000..61f3f8c9608098b5851178dcd3c68053be12310c --- /dev/null +++ b/server/services/gmail/client.py @@ -0,0 +1,494 @@ +from __future__ import annotations + +import json +import os +import threading +from datetime import datetime +from typing import Any, Dict, Optional + +from fastapi import status +from fastapi.responses import JSONResponse + +from ...config import Settings, get_settings +from ...logging_config import logger +from ...models import GmailConnectPayload, GmailDisconnectPayload, GmailStatusPayload +from ...utils import error_response + + +_CLIENT_LOCK = threading.Lock() +_CLIENT: Optional[Any] = None + +_PROFILE_CACHE: Dict[str, Dict[str, Any]] = {} +_PROFILE_CACHE_LOCK = threading.Lock() +_ACTIVE_USER_ID_LOCK = threading.Lock() +_ACTIVE_USER_ID: Optional[str] = None + + +def _normalized(value: Optional[str]) -> str: + return (value or "").strip() + + +def _set_active_gmail_user_id(user_id: Optional[str]) -> None: + sanitized = _normalized(user_id) + with _ACTIVE_USER_ID_LOCK: + global _ACTIVE_USER_ID + _ACTIVE_USER_ID = sanitized or None + + +def get_active_gmail_user_id() -> Optional[str]: + with _ACTIVE_USER_ID_LOCK: + return _ACTIVE_USER_ID + + +def _gmail_import_client(): + from composio import Composio # type: ignore + return Composio + + +# Get or create a singleton Composio client instance with thread-safe initialization +def _get_composio_client(settings: Optional[Settings] = None): + global _CLIENT + if _CLIENT is not None: + return _CLIENT + + with _CLIENT_LOCK: + if _CLIENT is None: + resolved_settings = settings or get_settings() + Composio = _gmail_import_client() + api_key = resolved_settings.composio_api_key + try: + _CLIENT = Composio(api_key=api_key) if api_key else Composio() + except TypeError as exc: + if api_key: + raise RuntimeError( + "Installed Composio SDK does not accept the api_key argument; upgrade the SDK or remove COMPOSIO_API_KEY." + ) from exc + _CLIENT = Composio() + return _CLIENT + + +def _extract_email(obj: Any) -> Optional[str]: + if obj is None: + return None + direct_keys = ( + "email", + "email_address", + "emailAddress", + "user_email", + "provider_email", + "account_email", + ) + for key in direct_keys: + try: + val = getattr(obj, key) + if isinstance(val, str) and "@" in val: + return val + except Exception: + pass + if isinstance(obj, dict): + val = obj.get(key) + if isinstance(val, str) and "@" in val: + return val + if isinstance(obj, dict): + email_addresses = obj.get("emailAddresses") + if isinstance(email_addresses, (list, tuple)): + for entry in email_addresses: + if isinstance(entry, dict): + candidate = entry.get("value") or entry.get("email") or entry.get("emailAddress") + if isinstance(candidate, str) and "@" in candidate: + return candidate + elif isinstance(entry, str) and "@" in entry: + return entry + if isinstance(obj, dict): + nested_paths = ( + ("profile", "email"), + ("profile", "emailAddress"), + ("user", "email"), + ("data", "email"), + ("data", "user", "email"), + ("provider_profile", "email"), + ) + for path in nested_paths: + current: Any = obj + for segment in path: + if isinstance(current, dict) and segment in current: + current = current[segment] + else: + current = None + break + if isinstance(current, str) and "@" in current: + return current + return None + + +def _cache_profile(user_id: str, profile: Dict[str, Any]) -> None: + sanitized = _normalized(user_id) + if not sanitized or not isinstance(profile, dict): + return + with _PROFILE_CACHE_LOCK: + _PROFILE_CACHE[sanitized] = { + "profile": profile, + "cached_at": datetime.utcnow().isoformat(), + } + + +def _get_cached_profile(user_id: Optional[str]) -> Optional[Dict[str, Any]]: + sanitized = _normalized(user_id) + if not sanitized: + return None + with _PROFILE_CACHE_LOCK: + payload = _PROFILE_CACHE.get(sanitized) + if payload and isinstance(payload.get("profile"), dict): + return payload["profile"] + return None + + +def _clear_cached_profile(user_id: Optional[str] = None) -> None: + with _PROFILE_CACHE_LOCK: + if user_id: + _PROFILE_CACHE.pop(_normalized(user_id), None) + else: + _PROFILE_CACHE.clear() + + +def _fetch_profile_from_composio(user_id: Optional[str]) -> Optional[Dict[str, Any]]: + sanitized = _normalized(user_id) + if not sanitized: + return None + try: + result = execute_gmail_tool("GMAIL_GET_PROFILE", sanitized, arguments={"user_id": "me"}) + except RuntimeError as exc: + logger.warning("GMAIL_GET_PROFILE invocation failed: %s", exc) + return None + except Exception as exc: # pragma: no cover - defensive + logger.exception("Unexpected error fetching Gmail profile", extra={"user_id": sanitized}) + return None + + profile: Optional[Dict[str, Any]] = None + if isinstance(result, dict): + if isinstance(result.get("data"), dict): + profile = result["data"] + elif isinstance(result.get("profile"), dict): + profile = result["profile"] + elif isinstance(result.get("response_data"), dict): + profile = result["response_data"] + elif isinstance(result.get("items"), list): + for item in result["items"]: + if not isinstance(item, dict): + continue + data_dict = item.get("data") + if isinstance(data_dict, dict): + if isinstance(data_dict.get("response_data"), dict): + profile = data_dict["response_data"] + elif isinstance(data_dict.get("profile"), dict): + profile = data_dict["profile"] + else: + profile = data_dict + elif isinstance(item.get("response_data"), dict): + profile = item["response_data"] + elif isinstance(item.get("profile"), dict): + profile = item["profile"] + if isinstance(profile, dict): + break + elif result.get("successful") is True and isinstance(result.get("result"), dict): + profile = result.get("result") # type: ignore[assignment] + elif all(not isinstance(result.get(key), dict) for key in ("data", "profile", "result")): + profile = result if result else None + + if isinstance(profile, dict): + _cache_profile(sanitized, profile) + return profile + + logger.warning("Received unexpected Gmail profile payload", extra={"user_id": sanitized, "raw": result}) + return None + + +# Start Gmail OAuth connection process and return redirect URL +def initiate_connect(payload: GmailConnectPayload, settings: Settings) -> JSONResponse: + auth_config_id = payload.auth_config_id or settings.composio_gmail_auth_config_id or "" + if not auth_config_id: + return error_response( + "Missing auth_config_id. Set COMPOSIO_GMAIL_AUTH_CONFIG_ID or pass auth_config_id.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + user_id = payload.user_id or f"web-{os.getpid()}" + _set_active_gmail_user_id(user_id) + _clear_cached_profile(user_id) + try: + client = _get_composio_client(settings) + req = client.connected_accounts.initiate(user_id=user_id, auth_config_id=auth_config_id) + data = { + "ok": True, + "redirect_url": getattr(req, "redirect_url", None) or getattr(req, "redirectUrl", None), + "connection_request_id": getattr(req, "id", None), + "user_id": user_id, + } + return JSONResponse(data) + except Exception as exc: + logger.exception("gmail connect failed", extra={"user_id": user_id}) + return error_response( + "Failed to initiate Gmail connect", + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=str(exc), + ) + + +# Check Gmail connection status and retrieve user account information +def fetch_status(payload: GmailStatusPayload) -> JSONResponse: + connection_request_id = _normalized(payload.connection_request_id) + user_id = _normalized(payload.user_id) + + if not connection_request_id and not user_id: + return error_response( + "Missing connection_request_id or user_id", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + try: + client = _get_composio_client() + account: Any = None + if connection_request_id: + try: + account = client.connected_accounts.wait_for_connection(connection_request_id, timeout=2.0) + except Exception: + try: + account = client.connected_accounts.get(connection_request_id) + except Exception: + account = None + if account is None and user_id: + try: + items = client.connected_accounts.list( + user_ids=[user_id], toolkit_slugs=["GMAIL"], statuses=["ACTIVE"] + ) + data = getattr(items, "data", None) + if data is None and isinstance(items, dict): + data = items.get("data") + if data: + account = data[0] + except Exception: + account = None + status_value = None + email = None + connected = False + profile: Optional[Dict[str, Any]] = None + profile_source = "none" + + account_user_id = None + if account is not None: + status_value = getattr(account, "status", None) or (account.get("status") if isinstance(account, dict) else None) + normalized_status = (status_value or "").upper() + connected = normalized_status in {"CONNECTED", "SUCCESS", "SUCCESSFUL", "ACTIVE", "COMPLETED"} + email = _extract_email(account) + if hasattr(account, "user_id"): + account_user_id = getattr(account, "user_id", None) + elif isinstance(account, dict): + account_user_id = account.get("user_id") + + if not user_id and account_user_id: + user_id = _normalized(account_user_id) + + if connected and user_id: + cached_profile = _get_cached_profile(user_id) + if cached_profile: + profile = cached_profile + profile_source = "cache" + else: + fetched_profile = _fetch_profile_from_composio(user_id) + if fetched_profile: + profile = fetched_profile + profile_source = "fetched" + if profile and not email: + email = _extract_email(profile) + elif user_id: + _clear_cached_profile(user_id) + + _set_active_gmail_user_id(user_id) + + return JSONResponse( + { + "ok": True, + "connected": bool(connected), + "status": status_value or "UNKNOWN", + "email": email, + "user_id": user_id, + "profile": profile, + "profile_source": profile_source, + } + ) + except Exception as exc: + logger.exception( + "gmail status failed", + extra={ + "connection_request_id": connection_request_id, + "user_id": user_id, + }, + ) + return error_response( + "Failed to fetch connection status", + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=str(exc), + ) + + +def disconnect_account(payload: GmailDisconnectPayload) -> JSONResponse: + connection_id = _normalized(payload.connection_id) or _normalized(payload.connection_request_id) + user_id = _normalized(payload.user_id) + + if not connection_id and not user_id: + return error_response( + "Missing connection_id or user_id", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + try: + client = _get_composio_client() + except Exception as exc: + logger.exception("gmail disconnect failed: client init", extra={"user_id": user_id}) + return error_response( + "Failed to disconnect Gmail", + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=str(exc), + ) + + removed_ids: list[str] = [] + errors: list[str] = [] + affected_user_ids: set[str] = set() + + def _delete_connection(identifier: str) -> None: + sanitized_id = _normalized(identifier) + if not sanitized_id: + return + try: + connection = client.connected_accounts.get(sanitized_id) + except Exception: + connection = None + try: + client.connected_accounts.delete(sanitized_id) + removed_ids.append(sanitized_id) + if connection is not None: + if hasattr(connection, "user_id"): + affected_user_ids.add(_normalized(getattr(connection, "user_id", None))) + elif isinstance(connection, dict): + affected_user_ids.add(_normalized(connection.get("user_id"))) + except Exception as exc: # pragma: no cover - depends on remote state + logger.exception("Failed to remove Gmail connection", extra={"connection_id": sanitized_id}) + errors.append(str(exc)) + + if connection_id: + _delete_connection(connection_id) + else: + try: + items = client.connected_accounts.list(user_ids=[user_id], toolkit_slugs=["GMAIL"]) + data = getattr(items, "data", None) + if data is None and isinstance(items, dict): + data = items.get("data") + except Exception as exc: # pragma: no cover - dependent on SDK + logger.exception("Failed to list Gmail connections", extra={"user_id": user_id}) + return error_response( + "Failed to disconnect Gmail", + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=str(exc), + ) + + if data: + for entry in data: + candidate = None + candidate_user_id = None + if hasattr(entry, "id"): + candidate = getattr(entry, "id", None) + candidate_user_id = getattr(entry, "user_id", None) + if candidate is None and isinstance(entry, dict): + candidate = entry.get("id") + candidate_user_id = entry.get("user_id") + if candidate: + if candidate_user_id: + affected_user_ids.add(_normalized(candidate_user_id)) + _delete_connection(candidate) + + if user_id: + affected_user_ids.add(user_id) + + for uid in list(affected_user_ids): + if uid: + _clear_cached_profile(uid) + if get_active_gmail_user_id() == uid: + _set_active_gmail_user_id(None) + + if errors and not removed_ids: + return error_response( + "Failed to disconnect Gmail", + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="; ".join(errors), + ) + + payload = { + "ok": True, + "disconnected": bool(removed_ids), + "removed_connection_ids": removed_ids, + } + if not removed_ids: + payload["message"] = "No Gmail connection found" + + if errors: + payload["warnings"] = errors + return JSONResponse(payload) + + +def _normalize_tool_response(result: Any) -> Dict[str, Any]: + payload_dict: Optional[Dict[str, Any]] = None + try: + if hasattr(result, "model_dump"): + payload_dict = result.model_dump() # type: ignore[assignment] + elif hasattr(result, "dict"): + payload_dict = result.dict() # type: ignore[assignment] + except Exception: + payload_dict = None + + if payload_dict is None: + try: + if hasattr(result, "model_dump_json"): + payload_dict = json.loads(result.model_dump_json()) + except Exception: + payload_dict = None + + if payload_dict is None: + if isinstance(result, dict): + payload_dict = result + elif isinstance(result, list): + payload_dict = {"items": result} + else: + payload_dict = {"repr": str(result)} + + return payload_dict + + +# Execute Gmail operations through Composio SDK with error handling +def execute_gmail_tool( + tool_name: str, + composio_user_id: str, + *, + arguments: Optional[Dict[str, Any]] = None, +) -> Dict[str, Any]: + prepared_arguments: Dict[str, Any] = {} + if isinstance(arguments, dict): + for key, value in arguments.items(): + if value is not None: + prepared_arguments[key] = value + + prepared_arguments.setdefault("user_id", "me") + + try: + client = _get_composio_client() + result = client.client.tools.execute( + tool_name, + user_id=composio_user_id, + arguments=prepared_arguments, + ) + return _normalize_tool_response(result) + except Exception as exc: + logger.exception( + "gmail tool execution failed", + extra={"tool": tool_name, "user_id": composio_user_id}, + ) + raise RuntimeError(f"{tool_name} invocation failed: {exc}") from exc diff --git a/server/services/gmail/importance_classifier.py b/server/services/gmail/importance_classifier.py new file mode 100644 index 0000000000000000000000000000000000000000..b9c1c19c3b5d6e7e38297326f4dcd130a7617e33 --- /dev/null +++ b/server/services/gmail/importance_classifier.py @@ -0,0 +1,170 @@ +"""LLM-powered classifier for determining important Gmail emails.""" + +from __future__ import annotations + +import json +from typing import Any, Dict, Optional + +from .processing import ProcessedEmail +from ...config import get_settings +from ...logging_config import logger +from ...openrouter_client import OpenRouterError, request_chat_completion + + +_TOOL_NAME = "mark_email_importance" +_TOOL_SCHEMA: Dict[str, Any] = { + "type": "function", + "function": { + "name": _TOOL_NAME, + "description": ( + "Decide whether an email should be proactively surfaced to the user and, " + "if so, provide a natural-language summary explaining why." + ), + "parameters": { + "type": "object", + "properties": { + "important": { + "type": "boolean", + "description": ( + "Set to true only when the email requires timely attention, a decision, " + "coordination, or contains critical security information (e.g. OTPs)." + ), + }, + "summary": { + "type": "string", + "description": ( + "Concise 2-3 sentence summary highlighting sender, topic, and the " + "specific action or urgency for the user. Only include when important=true." + ), + }, + }, + "required": ["important"], + "additionalProperties": False, + }, + }, +} + +_SYSTEM_PROMPT = ( + "You review incoming Gmail messages and decide whether they warrant an immediate proactive " + "notification to the user. Only mark an email as important if it materially affects the " + "user's plans, requires a prompt decision or action, is a security-sensitive OTP or login " + "notice, or contains high-priority updates (e.g. interviews, meeting changes). Ignore " + "order confirmations, routine marketing, newsletters, generic receipts, and low-impact " + "status notifications. When important, craft a brief summary that will be forwarded to the " + "user describing what happened and why it matters." +) + + +def _format_email_payload(email: ProcessedEmail) -> str: + attachments = ", ".join(email.attachment_filenames) if email.attachment_filenames else "None" + labels = ", ".join(email.label_ids) if email.label_ids else "None" + header_lines = [ + f"Sender: {email.sender}", + f"Recipient: {email.recipient}", + f"Subject: {email.subject}", + f"Received (user timezone): {email.timestamp.isoformat()}", + f"Thread ID: {email.thread_id or 'None'}", + f"Labels: {labels}", + f"Has attachments: {'Yes' if email.has_attachments else 'No'}", + f"Attachment filenames: {attachments}", + ] + + return ( + "Email Metadata:\n" + + "\n".join(header_lines) + + "\n\nCleaned Body:\n" + + (email.clean_text or "(empty body)") + ) + + +async def classify_email_importance(email: ProcessedEmail) -> Optional[str]: + """Return summary text when email should be surfaced; otherwise None.""" + + settings = get_settings() + api_key = settings.api_key + model = settings.email_classifier_model + + if not api_key: + logger.warning("Skipping importance check; API key missing") + return None + + user_payload = _format_email_payload(email) + messages = [{"role": "user", "content": user_payload}] + + try: + response = await request_chat_completion( + model=model, + messages=messages, + system=_SYSTEM_PROMPT, + api_key=api_key, + tools=[_TOOL_SCHEMA], + ) + except OpenRouterError as exc: + logger.error( + "Importance classification failed", + extra={"message_id": email.id, "error": str(exc)}, + ) + return None + except Exception as exc: # pragma: no cover - defensive + logger.exception( + "Unexpected error during importance classification", + extra={"message_id": email.id}, + ) + return None + + choice = (response.get("choices") or [{}])[0] + message = choice.get("message") or {} + tool_calls = message.get("tool_calls") or [] + + for tool_call in tool_calls: + function_block = tool_call.get("function") or {} + if function_block.get("name") != _TOOL_NAME: + continue + + raw_arguments = function_block.get("arguments") + arguments = _coerce_arguments(raw_arguments) + if arguments is None: + logger.warning( + "Importance tool returned invalid arguments", + extra={"message_id": email.id}, + ) + return None + + important = bool(arguments.get("important")) + summary = arguments.get("summary") + + if not important: + return None + + if not isinstance(summary, str) or not summary.strip(): + logger.warning( + "Importance tool marked email important without summary", + extra={"message_id": email.id}, + ) + return None + + return summary.strip() + + logger.debug( + "Importance classification produced no tool call", + extra={"message_id": email.id}, + ) + return None + + +def _coerce_arguments(raw: Any) -> Optional[Dict[str, Any]]: + if raw is None: + return {} + if isinstance(raw, dict): + return raw + if isinstance(raw, str): + if not raw.strip(): + return {} + try: + return json.loads(raw) + except json.JSONDecodeError: + return None + return None + + +__all__ = ["classify_email_importance"] diff --git a/server/services/gmail/importance_watcher.py b/server/services/gmail/importance_watcher.py new file mode 100644 index 0000000000000000000000000000000000000000..4b2dd6bb74ac0a74beef4b0f72437d3909f44e82 --- /dev/null +++ b/server/services/gmail/importance_watcher.py @@ -0,0 +1,244 @@ +"""Background watcher that surfaces important Gmail emails proactively.""" + +from __future__ import annotations + +import asyncio +from datetime import datetime, timezone, timedelta +from pathlib import Path +from typing import List, Optional, TYPE_CHECKING + +from .client import execute_gmail_tool, get_active_gmail_user_id +from .processing import EmailTextCleaner, ProcessedEmail, parse_gmail_fetch_response +from .seen_store import GmailSeenStore +from .importance_classifier import classify_email_importance +from ...logging_config import logger +from ...utils.timezones import convert_to_user_timezone + + +if TYPE_CHECKING: # pragma: no cover - typing only + from ...agents.interaction_agent.runtime import InteractionAgentRuntime + + +def _resolve_interaction_runtime() -> "InteractionAgentRuntime": + from ...agents.interaction_agent.runtime import InteractionAgentRuntime + + return InteractionAgentRuntime() + + +DEFAULT_POLL_INTERVAL_SECONDS = 60.0 +DEFAULT_LOOKBACK_MINUTES = 10 +DEFAULT_MAX_RESULTS = 50 +DEFAULT_SEEN_LIMIT = 300 + + +_DATA_DIR = Path(__file__).resolve().parent.parent.parent / "data" +_DEFAULT_SEEN_PATH = _DATA_DIR / "gmail_seen.json" + + +class ImportantEmailWatcher: + """Poll Gmail for recent messages and surface important ones.""" + + def __init__( + self, + poll_interval_seconds: float = DEFAULT_POLL_INTERVAL_SECONDS, + lookback_minutes: int = DEFAULT_LOOKBACK_MINUTES, + *, + seen_store: Optional[GmailSeenStore] = None, + ) -> None: + self._poll_interval = poll_interval_seconds + self._lookback_minutes = lookback_minutes + self._lock = asyncio.Lock() + self._task: Optional[asyncio.Task[None]] = None + self._running = False + self._seen_store = seen_store or GmailSeenStore(_DEFAULT_SEEN_PATH, DEFAULT_SEEN_LIMIT) + self._cleaner = EmailTextCleaner(max_url_length=60) + self._has_seeded_initial_snapshot = False + self._last_poll_timestamp: Optional[datetime] = None + + # Start the background email polling task + async def start(self) -> None: + async with self._lock: + if self._task and not self._task.done(): + return + loop = asyncio.get_running_loop() + self._running = True + self._has_seeded_initial_snapshot = False + self._last_poll_timestamp = None + self._task = loop.create_task(self._run(), name="important-email-watcher") + logger.info( + "Important email watcher started", + extra={"interval_seconds": self._poll_interval, "lookback_minutes": self._lookback_minutes}, + ) + + # Stop the background email polling task gracefully + async def stop(self) -> None: + async with self._lock: + self._running = False + if self._task: + self._task.cancel() + try: + await self._task + except asyncio.CancelledError: + pass + finally: + self._task = None + logger.info("Important email watcher stopped") + + async def _run(self) -> None: + try: + while self._running: + try: + await self._poll_once() + except Exception as exc: # pragma: no cover - defensive + logger.exception("Important email watcher poll failed", extra={"error": str(exc)}) + await asyncio.sleep(self._poll_interval) + except asyncio.CancelledError: + raise + + # Poll Gmail once for new messages and classify them for importance + def _complete_poll(self, user_now: datetime) -> None: + self._last_poll_timestamp = user_now + self._has_seeded_initial_snapshot = True + + async def _poll_once(self) -> None: + poll_started_at = datetime.now(timezone.utc) + user_now = convert_to_user_timezone(poll_started_at) + first_poll = not self._has_seeded_initial_snapshot + previous_poll_timestamp = self._last_poll_timestamp + interval_cutoff = user_now - timedelta(seconds=self._poll_interval) + cutoff_time = interval_cutoff + if previous_poll_timestamp is not None and previous_poll_timestamp > interval_cutoff: + cutoff_time = previous_poll_timestamp + + composio_user_id = get_active_gmail_user_id() + if not composio_user_id: + logger.debug("Gmail not connected; skipping importance poll") + return + + query = f"label:INBOX newer_than:{self._lookback_minutes}m" + arguments = { + "query": query, + "include_payload": True, + "max_results": DEFAULT_MAX_RESULTS, + } + + try: + raw_result = execute_gmail_tool("GMAIL_FETCH_EMAILS", composio_user_id, arguments=arguments) + except Exception as exc: + logger.warning( + "Failed to fetch Gmail messages for watcher", + extra={"error": str(exc)}, + ) + return + + processed_emails, _ = parse_gmail_fetch_response( + raw_result, + query=query, + cleaner=self._cleaner, + ) + + if not processed_emails: + logger.debug("No recent Gmail messages found for watcher") + self._complete_poll(user_now) + return + + if first_poll: + self._seen_store.mark_seen(email.id for email in processed_emails) + logger.info( + "Important email watcher completed initial warmup", + extra={"skipped_ids": len(processed_emails)}, + ) + self._complete_poll(user_now) + return + + unseen_emails: List[ProcessedEmail] = [ + email for email in processed_emails if not self._seen_store.is_seen(email.id) + ] + + if not unseen_emails: + logger.info( + "Important email watcher check complete", + extra={"emails_reviewed": 0, "surfaced": 0}, + ) + self._complete_poll(user_now) + return + + unseen_emails.sort(key=lambda email: email.timestamp or datetime.now(timezone.utc)) + + eligible_emails: List[ProcessedEmail] = [] + aged_emails: List[ProcessedEmail] = [] + + for email in unseen_emails: + email_timestamp = email.timestamp + if email_timestamp.tzinfo is not None: + email_timestamp = email_timestamp.astimezone(user_now.tzinfo) + else: + email_timestamp = email_timestamp.replace(tzinfo=user_now.tzinfo) + + if email_timestamp < cutoff_time: + aged_emails.append(email) + continue + + eligible_emails.append(email) + + if not eligible_emails and aged_emails: + self._seen_store.mark_seen(email.id for email in aged_emails) + logger.info( + "Important email watcher check complete", + extra={ + "emails_reviewed": len(unseen_emails), + "surfaced": 0, + "suppressed_for_age": len(aged_emails), + }, + ) + self._complete_poll(user_now) + return + + summaries_sent = 0 + processed_ids: List[str] = [email.id for email in aged_emails] + + for email in eligible_emails: + summary = await classify_email_importance(email) + processed_ids.append(email.id) + if not summary: + continue + + summaries_sent += 1 + await self._dispatch_summary(summary) + + if processed_ids: + self._seen_store.mark_seen(processed_ids) + + logger.info( + "Important email watcher check complete", + extra={ + "emails_reviewed": len(unseen_emails), + "surfaced": summaries_sent, + "suppressed_for_age": len(aged_emails), + }, + ) + self._complete_poll(user_now) + + async def _dispatch_summary(self, summary: str) -> None: + runtime = _resolve_interaction_runtime() + try: + contextualized = f"Important email watcher notification:\n{summary}" + await runtime.handle_agent_message(contextualized) + except Exception as exc: # pragma: no cover - defensive + logger.error( + "Failed to dispatch important email summary", + extra={"error": str(exc)}, + ) + + +_watcher_instance: Optional[ImportantEmailWatcher] = None + + +def get_important_email_watcher() -> ImportantEmailWatcher: + global _watcher_instance + if _watcher_instance is None: + _watcher_instance = ImportantEmailWatcher() + return _watcher_instance + + +__all__ = ["ImportantEmailWatcher", "get_important_email_watcher"] diff --git a/server/services/gmail/processing.py b/server/services/gmail/processing.py new file mode 100644 index 0000000000000000000000000000000000000000..07a4f8d011c292caeadaecbe72f4b157eefb0275 --- /dev/null +++ b/server/services/gmail/processing.py @@ -0,0 +1,400 @@ +"""Shared Gmail email normalization and cleaning utilities.""" + +from __future__ import annotations + +import html +import re +from dataclasses import dataclass +from datetime import datetime, timezone +from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple + +from bs4 import BeautifulSoup + +from ...logging_config import logger +from ...utils.timezones import convert_to_user_timezone + + +class EmailTextCleaner: + """Clean and extract readable text from Gmail API email responses.""" + + def __init__(self, max_url_length: int = 60) -> None: + self.max_url_length = max_url_length + self.remove_elements = [ + "style", + "script", + "meta", + "link", + "title", + "head", + "noscript", + "iframe", + "embed", + "object", + "img", + ] + self.noise_elements = [ + "footer", + "header", + ".footer", + ".header", + "[class*=\"footer\"]", + "[class*=\"header\"]", + "[class*=\"tracking\"]", + "[class*=\"pixel\"]", + "[style*=\"display:none\"]", + "[style*=\"display: none\"]", + ] + + # ------------------------------------------------------------------ + # Public API + # ------------------------------------------------------------------ + # Extract and clean email content from Gmail API message payload + def clean_email_content(self, message: Dict[str, Any]) -> str: + """Return cleaned plain-text representation of a Gmail message.""" + + html_content = self._extract_html_body(message) + text_content = self._extract_plain_body(message) + + if html_content: + return self.clean_html_email(html_content) + if text_content: + return self.post_process_text(text_content) + return "" + + # Clean HTML email content by removing unwanted elements and extracting text + def clean_html_email(self, html_content: str) -> str: + try: + soup = BeautifulSoup(html_content, "html.parser") + + for element_type in self.remove_elements: + for element in soup.find_all(element_type): + element.decompose() + + for selector in self.noise_elements: + try: + for element in soup.select(selector): + element.decompose() + except Exception as exc: # pragma: no cover - defensive + logger.debug( + "Failed to remove element via selector", + extra={"selector": selector, "error": str(exc)}, + ) + + for link in soup.find_all("a"): + href = link.get("href", "") + text = link.get_text(strip=True) + + if href: + display_url = self.truncate_url(href) + + if text and text != href and not self.is_url_like(text): + link.replace_with(f"{text} ({display_url})") + elif text and text != href: + link.replace_with(f"[Link: {display_url}]") + else: + link.replace_with(f"[Link: {display_url}]") + + text = soup.get_text(separator="\n", strip=True) + return self.post_process_text(text) + + except Exception as exc: # pragma: no cover - defensive + logger.error("Error cleaning HTML email", extra={"error": str(exc)}) + return self.fallback_text_extraction(html_content) + + def truncate_url(self, url: str) -> str: + if not url or len(url) <= self.max_url_length: + return url + + url = self.remove_tracking_params(url) + if len(url) <= self.max_url_length: + return url + return f"{url[: self.max_url_length]}..." + + def remove_tracking_params(self, url: str) -> str: + try: + from urllib.parse import parse_qs, urlencode, urlparse, urlunparse + + parsed = urlparse(url) + if not parsed.query: + return url + + tracking_params = { + "utm_source", + "utm_medium", + "utm_campaign", + "gclid", + "fbclid", + "ref", + "trk", + } + + query_params = parse_qs(parsed.query, keep_blank_values=False) + cleaned_params = { + key: value + for key, value in query_params.items() + if key.lower() not in tracking_params + } + + new_query = urlencode(cleaned_params, doseq=True) + new_parsed = parsed._replace(query=new_query) + return urlunparse(new_parsed) + + except Exception as exc: # pragma: no cover - defensive + logger.debug( + "Failed to strip tracking params", + extra={"error": str(exc), "url": url}, + ) + return url + + def is_url_like(self, text: str) -> bool: + if not text: + return False + lowered = text.lower() + if lowered.startswith(("http://", "https://", "www.", "ftp://")): + return True + return "." in lowered and " " not in lowered and len(lowered.split(".")) >= 2 + + def post_process_text(self, text: str) -> str: + text = html.unescape(text) + text = re.sub(r"\n\s*\n\s*\n", "\n\n", text) + text = re.sub(r"[ \t]+", " ", text) + text = re.sub(r"\n ", "\n", text) + + noise_patterns = [ + r"View this email in your browser.*?\n", + r"If you can't see this email.*?\n", + r"This is a system-generated email.*?\n", + r"Please do not reply to this email.*?\n", + r"Unsubscribe.*?preferences.*?\n", + r"Β© \d{4}.*?All rights reserved.*?\n", + r"\[Image:.*?\]", + r"\[Image\]", + r".*?", + r"\(image\)", + r"\(Image\)", + r"Image: .*?\n", + r"Alt text: .*?\n", + ] + + for pattern in noise_patterns: + text = re.sub(pattern, "", text, flags=re.IGNORECASE) + + text = re.sub(r"\n{3,}", "\n\n", text) + text = text.strip() + return text + + def fallback_text_extraction(self, html_content: str) -> str: + stripped = re.sub(r"<[^>]+>", " ", html_content) + stripped = re.sub(r"\s+", " ", stripped) + return self.post_process_text(stripped) + + def _extract_html_body(self, message: Dict[str, Any]) -> Optional[str]: + payload = message.get("payload") or {} + if isinstance(payload, dict): + parts = payload.get("parts") + if isinstance(parts, list): + for part in parts: + if not isinstance(part, dict): + continue + mime_type = part.get("mimeType") or "" + if mime_type.lower() == "text/html": + if body := part.get("body"): + data = body.get("data") + if isinstance(data, str): + try: + import base64 + + return base64.urlsafe_b64decode(data).decode("utf-8", errors="replace") + except Exception: + continue + return message.get("htmlBody") + + def _extract_plain_body(self, message: Dict[str, Any]) -> Optional[str]: + payload = message.get("payload") or {} + if isinstance(payload, dict): + if body := payload.get("body"): + data = body.get("data") + if isinstance(data, str): + try: + import base64 + + return base64.urlsafe_b64decode(data).decode("utf-8", errors="replace") + except Exception: + pass + return message.get("textBody") + + def extract_attachment_info(self, attachments: Iterable[Any]) -> Tuple[bool, int, List[str]]: + filenames: List[str] = [] + count = 0 + for item in attachments or []: + if isinstance(item, dict): + filename = item.get("filename") or item.get("name") + if filename: + filenames.append(str(filename)) + count += 1 + return bool(count), count, filenames + + +@dataclass(frozen=True) +class ProcessedEmail: + """Normalized Gmail message representation.""" + + id: str + thread_id: Optional[str] + query: str + subject: str + sender: str + recipient: str + timestamp: datetime + label_ids: List[str] + clean_text: str + has_attachments: bool + attachment_count: int + attachment_filenames: List[str] + + +# ---------------------------------------------------------------------- +# Helpers shared across modules +# ---------------------------------------------------------------------- + +# Parse Gmail timestamp string into timezone-aware datetime object +def parse_gmail_timestamp(raw: Optional[str]) -> Optional[datetime]: + if not raw: + return None + + try: + normalized = raw.replace("Z", "+00:00") if raw.endswith("Z") else raw + dt = datetime.fromisoformat(normalized) + return convert_to_user_timezone(dt) + except ValueError: + return None + + +# Convert raw Gmail API message into a clean ProcessedEmail object +def build_processed_email( + message: Dict[str, Any], + *, + query: str, + cleaner: Optional[EmailTextCleaner] = None, +) -> Optional[ProcessedEmail]: + message_id = (message.get("messageId") or message.get("id") or "").strip() + if not message_id: + logger.warning("Skipping email with missing message ID") + return None + + cleaner = cleaner or EmailTextCleaner() + + timestamp = parse_gmail_timestamp(message.get("messageTimestamp")) + if not timestamp: + logger.warning("Email missing timestamp; using current time", extra={"message_id": message_id}) + timestamp = convert_to_user_timezone(datetime.now(timezone.utc)) + + try: + clean_text = cleaner.clean_email_content(message) + except Exception as exc: # pragma: no cover - defensive + logger.error( + "Failed to clean email content", + extra={"message_id": message_id, "error": str(exc)}, + ) + clean_text = "Error processing email content" + + attachments = message.get("attachmentList", []) + has_attachments, attachment_count, attachment_filenames = cleaner.extract_attachment_info(attachments) + + thread_id = message.get("threadId") or message.get("thread_id") + subject = message.get("subject") or "No Subject" + sender = message.get("sender") or "Unknown Sender" + recipient = message.get("to") or "Unknown Recipient" + label_ids = list(message.get("labelIds") or []) + + return ProcessedEmail( + id=message_id, + thread_id=thread_id, + query=query, + subject=subject, + sender=sender, + recipient=recipient, + timestamp=timestamp, + label_ids=label_ids, + clean_text=clean_text, + has_attachments=has_attachments, + attachment_count=attachment_count, + attachment_filenames=attachment_filenames, + ) + + +# Convert multiple raw Gmail messages into ProcessedEmail objects +def build_processed_emails( + messages: Sequence[Dict[str, Any]], + *, + query: str, + cleaner: Optional[EmailTextCleaner] = None, +) -> List[ProcessedEmail]: + processed: List[ProcessedEmail] = [] + for message in messages: + if not isinstance(message, dict): + continue + email = build_processed_email(message, query=query, cleaner=cleaner) + if email is not None: + processed.append(email) + return processed + + +# Parse Composio Gmail API response and extract clean email data with pagination +def parse_gmail_fetch_response( + raw_result: Any, + *, + query: str, + cleaner: Optional[EmailTextCleaner] = None, +) -> Tuple[List[ProcessedEmail], Optional[str]]: + """Convert Composio Gmail fetch payload into processed email models.""" + + emails: List[ProcessedEmail] = [] + next_page: Optional[str] = None + + containers = [raw_result] if isinstance(raw_result, dict) else ( + raw_result if isinstance(raw_result, list) else [] + ) + + for container in containers: + if not isinstance(container, dict): + continue + + messages_block: Optional[Sequence[Any]] = None + + data_section = container.get("data") + if isinstance(data_section, dict): + token = data_section.get("nextPageToken") + if isinstance(token, str) and not next_page: + next_page = token + candidate = data_section.get("messages") + if isinstance(candidate, list): + messages_block = candidate + + if messages_block is None: + candidate = container.get("messages") + if isinstance(candidate, list): + messages_block = candidate + + if not messages_block: + continue + + for message in messages_block: + if not isinstance(message, dict): + continue + processed = build_processed_email(message, query=query, cleaner=cleaner) + if processed: + emails.append(processed) + + return emails, next_page + + +__all__ = [ + "EmailTextCleaner", + "ProcessedEmail", + "build_processed_email", + "build_processed_emails", + "convert_to_user_timezone", + "parse_gmail_timestamp", + "parse_gmail_fetch_response", +] diff --git a/server/services/gmail/seen_store.py b/server/services/gmail/seen_store.py new file mode 100644 index 0000000000000000000000000000000000000000..0986b5a26c8cc0f458ae4db779ab659b0f1383f7 --- /dev/null +++ b/server/services/gmail/seen_store.py @@ -0,0 +1,119 @@ +"""Persistence helper for tracking recently processed Gmail message IDs.""" + +from __future__ import annotations + +import json +import threading +from collections import deque +from pathlib import Path +from typing import Deque, Iterable, List, Optional, Set + +from ...logging_config import logger + + +class GmailSeenStore: + """Maintain a bounded set of Gmail message IDs backed by a JSON file.""" + + def __init__(self, path: Path, max_entries: int = 300) -> None: + self._path = path + self._max_entries = max_entries + self._lock = threading.Lock() + self._entries: Deque[str] = deque() + self._index: Set[str] = set() + self._load() + + # ------------------------------------------------------------------ + # Public API + # ------------------------------------------------------------------ + def has_entries(self) -> bool: + with self._lock: + return bool(self._entries) + + def is_seen(self, message_id: str) -> bool: + normalized = self._normalize(message_id) + if not normalized: + return False + with self._lock: + return normalized in self._index + + def mark_seen(self, message_ids: Iterable[str]) -> None: + normalized_ids = [mid for mid in (self._normalize(mid) for mid in message_ids) if mid] + if not normalized_ids: + return + + with self._lock: + for message_id in normalized_ids: + if message_id in self._index: + # Refresh recency by removing and re-appending + try: + self._entries.remove(message_id) + except ValueError: # pragma: no cover - defensive + pass + else: + self._index.add(message_id) + self._entries.append(message_id) + + self._prune_locked() + self._persist_locked() + + def snapshot(self) -> List[str]: + with self._lock: + return list(self._entries) + + def clear(self) -> None: + with self._lock: + self._entries.clear() + self._index.clear() + self._persist_locked() + + # ------------------------------------------------------------------ + # Internal helpers + # ------------------------------------------------------------------ + def _normalize(self, message_id: Optional[str]) -> str: + if not message_id: + return "" + return str(message_id).strip() + + def _load(self) -> None: + try: + data = json.loads(self._path.read_text(encoding="utf-8")) + except FileNotFoundError: + return + except Exception as exc: # pragma: no cover - defensive + logger.warning( + "Failed to load Gmail seen-store; starting empty", + extra={"path": str(self._path), "error": str(exc)}, + ) + return + + if not isinstance(data, list): + logger.warning( + "Gmail seen-store payload invalid; expected list", + extra={"path": str(self._path)}, + ) + return + + for raw_id in data[-self._max_entries :]: + normalized = self._normalize(raw_id) + if normalized and normalized not in self._index: + self._entries.append(normalized) + self._index.add(normalized) + + def _prune_locked(self) -> None: + while len(self._entries) > self._max_entries: + oldest = self._entries.popleft() + self._index.discard(oldest) + + def _persist_locked(self) -> None: + try: + self._path.parent.mkdir(parents=True, exist_ok=True) + payload = list(self._entries) + self._path.write_text(json.dumps(payload), encoding="utf-8") + except Exception as exc: # pragma: no cover - defensive + logger.warning( + "Failed to persist Gmail seen-store", + extra={"path": str(self._path), "error": str(exc)}, + ) + + +__all__ = ["GmailSeenStore"] diff --git a/server/services/timezone_store.py b/server/services/timezone_store.py new file mode 100644 index 0000000000000000000000000000000000000000..0029975384fab2a5ad7714d5e0d996a1ecf44a7b --- /dev/null +++ b/server/services/timezone_store.py @@ -0,0 +1,78 @@ +"""Persist and expose the user's preferred timezone.""" + +from __future__ import annotations + +import threading +from pathlib import Path +from typing import Optional + +from zoneinfo import ZoneInfo, ZoneInfoNotFoundError + +from ..logging_config import logger + + +class TimezoneStore: + """Stores a single timezone string supplied by the client UI.""" + + def __init__(self, path: Path): + self._path = path + self._lock = threading.Lock() + self._cached: Optional[str] = None + self._load() + + def _load(self) -> None: + try: + value = self._path.read_text(encoding="utf-8").strip() + except FileNotFoundError: + self._cached = None + return + except Exception as exc: # pragma: no cover - defensive + logger.warning("failed to read timezone file", extra={"error": str(exc)}) + self._cached = None + return + + self._cached = value or None + + def get_timezone(self, default: str = "UTC") -> str: + with self._lock: + return self._cached or default + + def set_timezone(self, timezone_name: str) -> None: + validated = self._validate(timezone_name) + with self._lock: + self._path.parent.mkdir(parents=True, exist_ok=True) + self._path.write_text(validated, encoding="utf-8") + self._cached = validated + logger.info("updated timezone preference", extra={"timezone": validated}) + + def clear(self) -> None: + with self._lock: + self._cached = None + try: + if self._path.exists(): + self._path.unlink() + except Exception as exc: # pragma: no cover - defensive + logger.warning("failed to clear timezone file", extra={"error": str(exc)}) + + def _validate(self, timezone_name: str) -> str: + candidate = (timezone_name or "").strip() + if not candidate: + raise ValueError("timezone must be a non-empty string") + try: + ZoneInfo(candidate) + except ZoneInfoNotFoundError as exc: + raise ValueError(f"Unknown timezone: {candidate}") from exc + return candidate + + +_DATA_DIR = Path(__file__).resolve().parent.parent / "data" +_TIMEZONE_PATH = _DATA_DIR / "timezone.txt" + +_timezone_store = TimezoneStore(_TIMEZONE_PATH) + + +def get_timezone_store() -> TimezoneStore: + return _timezone_store + + +__all__ = ["TimezoneStore", "get_timezone_store"] diff --git a/server/services/trigger_scheduler.py b/server/services/trigger_scheduler.py new file mode 100644 index 0000000000000000000000000000000000000000..2db12462b368dbad1727ff9bb7f8fc5a5ce1de6a --- /dev/null +++ b/server/services/trigger_scheduler.py @@ -0,0 +1,162 @@ +"""Background scheduler that watches trigger definitions and executes them.""" + +from __future__ import annotations + +import asyncio +from datetime import datetime, timezone +from typing import Optional, Set + +from ..agents.execution_agent.batch_manager import ExecutionBatchManager +from ..agents.execution_agent.runtime import ExecutionResult +from ..logging_config import logger +from .triggers import TriggerRecord, get_trigger_service + + +UTC = timezone.utc + + +def _utc_now() -> datetime: + return datetime.now(UTC) + + +def _isoformat(dt: datetime) -> str: + return dt.astimezone(UTC).isoformat(timespec="seconds").replace("+00:00", "Z") + + +class TriggerScheduler: + """Polls stored triggers and launches execution agents when due.""" + + def __init__(self, poll_interval_seconds: float = 10.0) -> None: + self._poll_interval = poll_interval_seconds + self._service = get_trigger_service() + self._task: Optional[asyncio.Task[None]] = None + self._running = False + self._in_flight: Set[int] = set() + self._lock = asyncio.Lock() + + async def start(self) -> None: + async with self._lock: + if self._task and not self._task.done(): + return + loop = asyncio.get_running_loop() + self._running = True + self._task = loop.create_task(self._run(), name="trigger-scheduler") + logger.info("Trigger scheduler started", extra={"interval": self._poll_interval}) + + async def stop(self) -> None: + async with self._lock: + self._running = False + if self._task: + self._task.cancel() + try: + await self._task + except asyncio.CancelledError: + pass + self._task = None + logger.info("Trigger scheduler stopped") + + async def _run(self) -> None: + try: + while self._running: + await self._poll_once() + await asyncio.sleep(self._poll_interval) + except asyncio.CancelledError: # pragma: no cover - shutdown path + raise + except Exception as exc: # pragma: no cover - defensive + logger.exception("Trigger scheduler loop crashed", extra={"error": str(exc)}) + + async def _poll_once(self) -> None: + now = _utc_now() + due_triggers = self._service.get_due_triggers(before=now) + if not due_triggers: + return + + for trigger in due_triggers: + if trigger.id in self._in_flight: + continue + self._in_flight.add(trigger.id) + asyncio.create_task(self._execute_trigger(trigger), name=f"trigger-{trigger.id}") + + async def _execute_trigger(self, trigger: TriggerRecord) -> None: + try: + fired_at = _utc_now() + instructions = self._format_instructions(trigger, fired_at) + logger.info( + "Dispatching trigger", + extra={ + "trigger_id": trigger.id, + "agent": trigger.agent_name, + "scheduled_for": trigger.next_trigger, + }, + ) + execution_manager = ExecutionBatchManager() + result = await execution_manager.execute_agent( + trigger.agent_name, + instructions, + ) + if result.success: + self._handle_success(trigger, fired_at) + else: + error_text = result.error or result.response + self._handle_failure(trigger, fired_at, error_text) + except Exception as exc: # pragma: no cover - defensive + self._handle_failure(trigger, _utc_now(), str(exc)) + logger.exception( + "Trigger execution failed unexpectedly", + extra={"trigger_id": trigger.id, "agent": trigger.agent_name}, + ) + finally: + self._in_flight.discard(trigger.id) + + def _handle_success(self, trigger: TriggerRecord, fired_at: datetime) -> None: + logger.info( + "Trigger completed", + extra={"trigger_id": trigger.id, "agent": trigger.agent_name}, + ) + self._service.schedule_next_occurrence(trigger, fired_at=fired_at) + + def _handle_failure(self, trigger: TriggerRecord, fired_at: datetime, error: str) -> None: + logger.warning( + "Trigger execution failed", + extra={ + "trigger_id": trigger.id, + "agent": trigger.agent_name, + "error": error, + }, + ) + self._service.record_failure(trigger, error) + if trigger.recurrence_rule: + self._service.schedule_next_occurrence(trigger, fired_at=fired_at) + else: + self._service.clear_next_fire(trigger.id, agent_name=trigger.agent_name) + + def _format_instructions(self, trigger: TriggerRecord, fired_at: datetime) -> str: + scheduled_for = trigger.next_trigger or _isoformat(fired_at) + metadata_lines = [f"Trigger ID: {trigger.id}"] + if trigger.recurrence_rule: + metadata_lines.append(f"Recurrence: {trigger.recurrence_rule}") + if trigger.timezone: + metadata_lines.append(f"Timezone: {trigger.timezone}") + if trigger.start_time: + metadata_lines.append(f"Start Time (UTC): {trigger.start_time}") + + metadata = "\n".join(f"- {line}" for line in metadata_lines) + return ( + f"Trigger fired at {_isoformat(fired_at)} (UTC).\n" + f"Scheduled occurrence time: {scheduled_for}.\n\n" + f"Metadata:\n{metadata}\n\n" + f"Payload:\n{trigger.payload}" + ) + + +_scheduler_instance: Optional[TriggerScheduler] = None + + +def get_trigger_scheduler() -> TriggerScheduler: + global _scheduler_instance + if _scheduler_instance is None: + _scheduler_instance = TriggerScheduler() + return _scheduler_instance + + +__all__ = ["TriggerScheduler", "get_trigger_scheduler"] diff --git a/server/services/triggers/__init__.py b/server/services/triggers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f61010badee8367a8ef80abf3fa0fb8090af8442 --- /dev/null +++ b/server/services/triggers/__init__.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from pathlib import Path + +from .models import TriggerRecord +from .service import TriggerService +from .store import TriggerStore + + +_DATA_DIR = Path(__file__).resolve().parent.parent.parent / "data" +_default_db_path = _DATA_DIR / "triggers.db" +_trigger_store = TriggerStore(_default_db_path) +_trigger_service = TriggerService(_trigger_store) + + +def get_trigger_service() -> TriggerService: + return _trigger_service + + +__all__ = [ + "TriggerRecord", + "TriggerService", + "get_trigger_service", +] diff --git a/server/services/triggers/models.py b/server/services/triggers/models.py new file mode 100644 index 0000000000000000000000000000000000000000..d4aa32118d259917b4ef0feea9a31024effa0cc9 --- /dev/null +++ b/server/services/triggers/models.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +from typing import Optional + +from pydantic import BaseModel, ConfigDict + + +class TriggerRecord(BaseModel): + """Serialized trigger representation returned to callers.""" + + model_config = ConfigDict(from_attributes=True) + + id: int + agent_name: str + payload: str + start_time: Optional[str] = None + next_trigger: Optional[str] = None + recurrence_rule: Optional[str] = None + timezone: Optional[str] = None + status: str + last_error: Optional[str] = None + created_at: str + updated_at: str + + +__all__ = ["TriggerRecord"] diff --git a/server/services/triggers/service.py b/server/services/triggers/service.py new file mode 100644 index 0000000000000000000000000000000000000000..750429c935308c94ba62df7696a5c0825dac166e --- /dev/null +++ b/server/services/triggers/service.py @@ -0,0 +1,285 @@ +from __future__ import annotations + +from datetime import datetime, timedelta +from typing import Any, Dict, List, Optional + +from zoneinfo import ZoneInfo + +from ...logging_config import logger +from .models import TriggerRecord +from .store import TriggerStore +from .utils import ( + build_recurrence, + coerce_start_datetime, + load_rrule, + normalize_status, + parse_iso, + resolve_timezone, + to_storage_timestamp, + utc_now, +) + + +MISSED_TRIGGER_GRACE_PERIOD = timedelta(minutes=5) + + +class TriggerService: + """High-level trigger management with recurrence awareness.""" + + def __init__(self, store: TriggerStore): + self._store = store + + def create_trigger( + self, + *, + agent_name: str, + payload: str, + recurrence_rule: Optional[str] = None, + start_time: Optional[str] = None, + timezone_name: Optional[str] = None, + status: Optional[str] = None, + ) -> TriggerRecord: + tz = resolve_timezone(timezone_name) + now = utc_now() + start_dt_local = coerce_start_datetime(start_time, tz, now) + stored_recurrence = build_recurrence(recurrence_rule, start_dt_local, tz) + next_fire = self._compute_next_fire( + stored_recurrence=stored_recurrence, + start_dt_local=start_dt_local, + tz=tz, + now=now, + ) + timestamp = to_storage_timestamp(now) + record: Dict[str, Any] = { + "agent_name": agent_name, + "payload": payload, + "start_time": to_storage_timestamp(start_dt_local), + "next_trigger": to_storage_timestamp(next_fire) if next_fire else None, + "recurrence_rule": stored_recurrence, + "timezone": getattr(tz, "key", "UTC"), + "status": normalize_status(status), + "last_error": None, + "created_at": timestamp, + "updated_at": timestamp, + } + trigger_id = self._store.insert(record) + created = self._store.fetch_one(trigger_id, agent_name) + if not created: # pragma: no cover - defensive + raise RuntimeError("Failed to load trigger after insert") + return created + + def update_trigger( + self, + trigger_id: int, + *, + agent_name: str, + payload: Optional[str] = None, + recurrence_rule: Optional[str] = None, + start_time: Optional[str] = None, + timezone_name: Optional[str] = None, + status: Optional[str] = None, + last_error: Optional[str] = None, + clear_error: bool = False, + ) -> Optional[TriggerRecord]: + existing = self._store.fetch_one(trigger_id, agent_name) + if existing is None: + return None + + tz = resolve_timezone(timezone_name or existing.timezone) + start_reference = ( + parse_iso(existing.start_time) + if existing.start_time + else utc_now() + ) + start_dt_local = coerce_start_datetime(start_time, tz, start_reference) + + fields: Dict[str, Any] = {} + if payload is not None: + fields["payload"] = payload + + normalized_status = None + status_changed_to_active = False + if status is not None: + normalized_status = normalize_status(status) + fields["status"] = normalized_status + status_changed_to_active = ( + normalized_status == "active" and existing.status != "active" + ) + else: + normalized_status = existing.status + + if start_time is not None: + fields["start_time"] = to_storage_timestamp(start_dt_local.astimezone(tz)) + if timezone_name is not None: + fields["timezone"] = getattr(tz, "key", "UTC") + + schedule_inputs_changed = any( + value is not None for value in (recurrence_rule, start_time, timezone_name) + ) + + recurrence_source = ( + recurrence_rule if recurrence_rule is not None else existing.recurrence_rule + ) + if schedule_inputs_changed: + stored_recurrence = ( + build_recurrence(recurrence_source, start_dt_local, tz) + if recurrence_source + else None + ) + else: + stored_recurrence = recurrence_source + + next_trigger_dt = ( + parse_iso(existing.next_trigger) + if existing.next_trigger + else None + ) + now = utc_now() + should_recompute_schedule = schedule_inputs_changed + + if status_changed_to_active: + if next_trigger_dt is None: + should_recompute_schedule = True + else: + missed_duration = now - next_trigger_dt + if missed_duration > MISSED_TRIGGER_GRACE_PERIOD: + should_recompute_schedule = True + + if should_recompute_schedule: + next_fire = self._compute_next_fire( + stored_recurrence=stored_recurrence, + start_dt_local=start_dt_local, + tz=tz, + now=now, + ) + if ( + stored_recurrence is None + and recurrence_rule is None + and start_time is None + and status_changed_to_active + and next_fire is not None + and next_fire <= now + ): + next_fire = now + fields["next_trigger"] = ( + to_storage_timestamp(next_fire) if next_fire else None + ) + if schedule_inputs_changed: + fields["recurrence_rule"] = stored_recurrence + elif schedule_inputs_changed: + fields["recurrence_rule"] = stored_recurrence + + if clear_error: + fields["last_error"] = None + elif last_error is not None: + fields["last_error"] = last_error + + if not fields: + return existing + + updated = self._store.update(trigger_id, agent_name, fields) + return self._store.fetch_one(trigger_id, agent_name) if updated else existing + + def list_triggers(self, *, agent_name: str) -> List[TriggerRecord]: + return self._store.list_for_agent(agent_name) + + def get_due_triggers( + self, *, before: datetime, agent_name: Optional[str] = None + ) -> List[TriggerRecord]: + iso_cutoff = to_storage_timestamp(before) + return self._store.fetch_due(agent_name, iso_cutoff) + + def mark_as_completed(self, trigger_id: int, *, agent_name: str) -> None: + self._store.update( + trigger_id, + agent_name, + { + "status": "completed", + "next_trigger": None, + "last_error": None, + }, + ) + + def schedule_next_occurrence( + self, + trigger: TriggerRecord, + *, + fired_at: datetime, + ) -> Optional[TriggerRecord]: + if not trigger.recurrence_rule: + self.mark_as_completed(trigger.id, agent_name=trigger.agent_name) + return self._store.fetch_one(trigger.id, trigger.agent_name) + + tz = resolve_timezone(trigger.timezone) + next_fire = self._compute_next_after(trigger.recurrence_rule, fired_at, tz) + fields: Dict[str, Any] = { + "next_trigger": to_storage_timestamp(next_fire) if next_fire else None, + "last_error": None, + } + if next_fire is None: + fields["status"] = "completed" + self._store.update(trigger.id, trigger.agent_name, fields) + return self._store.fetch_one(trigger.id, trigger.agent_name) + + def record_failure(self, trigger: TriggerRecord, error: str) -> None: + self._store.update( + trigger.id, + trigger.agent_name, + { + "last_error": error, + }, + ) + + def clear_next_fire(self, trigger_id: int, *, agent_name: str) -> Optional[TriggerRecord]: + self._store.update( + trigger_id, + agent_name, + { + "next_trigger": None, + }, + ) + return self._store.fetch_one(trigger_id, agent_name) + + def clear_all(self) -> None: + self._store.clear_all() + + def _compute_next_fire( + self, + *, + stored_recurrence: Optional[str], + start_dt_local: datetime, + tz: ZoneInfo, + now: datetime, + ) -> Optional[datetime]: + if stored_recurrence: + rule = load_rrule(stored_recurrence) + next_occurrence = rule.after(now.astimezone(tz), inc=True) + if next_occurrence is None: + return None + if next_occurrence.tzinfo is None: + next_occurrence = next_occurrence.replace(tzinfo=tz) + return next_occurrence.astimezone(tz) + + if start_dt_local < now.astimezone(tz): + logger.warning( + "start_time in the past; trigger will fire immediately", + extra={"start_time": start_dt_local.isoformat()}, + ) + return start_dt_local + + def _compute_next_after( + self, + stored_recurrence: str, + fired_at: datetime, + tz: ZoneInfo, + ) -> Optional[datetime]: + rule = load_rrule(stored_recurrence) + next_occurrence = rule.after(fired_at.astimezone(tz), inc=False) + if next_occurrence is None: + return None + if next_occurrence.tzinfo is None: + next_occurrence = next_occurrence.replace(tzinfo=tz) + return next_occurrence.astimezone(tz) + + +__all__ = ["TriggerService", "MISSED_TRIGGER_GRACE_PERIOD"] diff --git a/server/services/triggers/store.py b/server/services/triggers/store.py new file mode 100644 index 0000000000000000000000000000000000000000..a61598d738c6a254adf7d5e9fbadcdb3c6ba7493 --- /dev/null +++ b/server/services/triggers/store.py @@ -0,0 +1,129 @@ +from __future__ import annotations + +import sqlite3 +import threading +from pathlib import Path +from typing import Any, Dict, List, Optional + +from ...logging_config import logger +from .models import TriggerRecord +from .utils import to_storage_timestamp, utc_now + + +class TriggerStore: + """Low-level persistence for triggers backed by SQLite.""" + + def __init__(self, db_path: Path): + self._db_path = db_path + self._lock = threading.Lock() + self._ensure_directory() + self._ensure_schema() + + def _ensure_directory(self) -> None: + try: + self._db_path.parent.mkdir(parents=True, exist_ok=True) + except Exception as exc: # pragma: no cover - defensive + logger.warning( + "trigger directory creation failed", + extra={"error": str(exc)}, + ) + + def _connect(self) -> sqlite3.Connection: + conn = sqlite3.connect(self._db_path, timeout=30, isolation_level=None) + conn.row_factory = sqlite3.Row + return conn + + def _ensure_schema(self) -> None: + schema_sql = """ + CREATE TABLE IF NOT EXISTS triggers ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + agent_name TEXT NOT NULL, + payload TEXT NOT NULL, + start_time TEXT, + next_trigger TEXT, + recurrence_rule TEXT, + timezone TEXT, + status TEXT NOT NULL DEFAULT 'active', + last_error TEXT, + created_at TEXT NOT NULL, + updated_at TEXT NOT NULL + ); + """ + index_sql = """ + CREATE INDEX IF NOT EXISTS idx_triggers_agent_next + ON triggers (agent_name, next_trigger); + """ + with self._lock, self._connect() as conn: + conn.execute("PRAGMA journal_mode=WAL;") + conn.execute(schema_sql) + conn.execute(index_sql) + + def insert(self, payload: Dict[str, Any]) -> int: + with self._lock, self._connect() as conn: + columns = ", ".join(payload.keys()) + placeholders = ", ".join([":" + key for key in payload.keys()]) + sql = f"INSERT INTO triggers ({columns}) VALUES ({placeholders})" + conn.execute(sql, payload) + trigger_id = conn.execute("SELECT last_insert_rowid()").fetchone()[0] + return int(trigger_id) + + def fetch_one(self, trigger_id: int, agent_name: str) -> Optional[TriggerRecord]: + with self._lock, self._connect() as conn: + row = conn.execute( + "SELECT * FROM triggers WHERE id = ? AND agent_name = ?", + (trigger_id, agent_name), + ).fetchone() + return self._row_to_record(row) if row else None + + def update(self, trigger_id: int, agent_name: str, fields: Dict[str, Any]) -> bool: + if not fields: + return False + assignments = ", ".join(f"{key} = :{key}" for key in fields.keys()) + sql = ( + f"UPDATE triggers SET {assignments}, updated_at = :updated_at" + " WHERE id = :trigger_id AND agent_name = :agent_name" + ) + payload = { + **fields, + "updated_at": to_storage_timestamp(utc_now()), + "trigger_id": trigger_id, + "agent_name": agent_name, + } + with self._lock, self._connect() as conn: + cursor = conn.execute(sql, payload) + return cursor.rowcount > 0 + + def list_for_agent(self, agent_name: str) -> List[TriggerRecord]: + with self._lock, self._connect() as conn: + rows = conn.execute( + "SELECT * FROM triggers WHERE agent_name = ? ORDER BY next_trigger IS NULL, next_trigger", + (agent_name,), + ).fetchall() + return [self._row_to_record(row) for row in rows] + + def fetch_due( + self, agent_name: Optional[str], before_iso: str + ) -> List[TriggerRecord]: + sql = ( + "SELECT * FROM triggers WHERE status = 'active' AND next_trigger IS NOT NULL" + " AND next_trigger <= ?" + ) + params: List[Any] = [before_iso] + if agent_name: + sql += " AND agent_name = ?" + params.append(agent_name) + sql += " ORDER BY next_trigger, id" + with self._lock, self._connect() as conn: + rows = conn.execute(sql, params).fetchall() + return [self._row_to_record(row) for row in rows] + + def clear_all(self) -> None: + with self._lock, self._connect() as conn: + conn.execute("DELETE FROM triggers") + + def _row_to_record(self, row: sqlite3.Row) -> TriggerRecord: + data = dict(row) + return TriggerRecord.model_validate(data) + + +__all__ = ["TriggerStore"] diff --git a/server/services/triggers/utils.py b/server/services/triggers/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..c9ea1ebe0a367b806e6664471664b8b6ff333369 --- /dev/null +++ b/server/services/triggers/utils.py @@ -0,0 +1,140 @@ +from __future__ import annotations + +from datetime import datetime, timedelta, timezone +from typing import Optional + +from dateutil import parser as date_parser +from dateutil.rrule import rrulestr +from zoneinfo import ZoneInfo + +from ...logging_config import logger + + +UTC = timezone.utc +DEFAULT_STATUS = "active" +VALID_STATUSES = {"active", "paused", "completed"} + + +def utc_now() -> datetime: + """Return the current time in UTC.""" + + return datetime.now(UTC) + + +def to_storage_timestamp(moment: datetime) -> str: + """Normalize timestamps before writing to SQLite.""" + + return moment.astimezone(UTC).isoformat(timespec="seconds").replace("+00:00", "Z") + + +def resolve_timezone(timezone_name: Optional[str]) -> ZoneInfo: + """Return a `ZoneInfo` instance, defaulting to UTC on errors.""" + + if timezone_name: + try: + return ZoneInfo(timezone_name) + except Exception: + logger.warning( + "unknown timezone provided; defaulting to UTC", + extra={"timezone": timezone_name}, + ) + return ZoneInfo("UTC") + + +def normalize_status(status: Optional[str]) -> str: + """Clamp trigger status to the known set.""" + + if not status: + return DEFAULT_STATUS + normalized = status.lower() + if normalized not in VALID_STATUSES: + logger.warning( + "invalid status supplied; defaulting to active", + extra={"status": status}, + ) + return DEFAULT_STATUS + return normalized + + +def parse_iso(timestamp: str) -> datetime: + """Parse an ISO timestamp, defaulting to UTC when timezone is absent.""" + + dt = date_parser.isoparse(timestamp) + if dt.tzinfo is None: + dt = dt.replace(tzinfo=UTC) + return dt + + +def parse_datetime(timestamp: str, tz: ZoneInfo) -> datetime: + """Parse a timestamp string into the provided timezone.""" + + dt = date_parser.isoparse(timestamp) + if dt.tzinfo is None: + dt = dt.replace(tzinfo=tz) + else: + dt = dt.astimezone(tz) + return dt + + +def coerce_start_datetime( + start_time: Optional[str], tz: ZoneInfo, fallback: datetime +) -> datetime: + """Return the desired start datetime in the agent's timezone.""" + + if start_time: + return parse_datetime(start_time, tz) + return fallback.astimezone(tz) + + +def build_recurrence( + recurrence_rule: Optional[str], + start_dt_local: datetime, + tz: ZoneInfo, +) -> Optional[str]: + """Embed DTSTART metadata into the supplied RRULE text.""" + + if not recurrence_rule: + return None + + if start_dt_local.tzinfo is None: + localized_start = start_dt_local.replace(tzinfo=tz) + else: + localized_start = start_dt_local.astimezone(tz) + + if localized_start.utcoffset() == timedelta(0): + dt_line = f"DTSTART:{localized_start.astimezone(UTC).strftime('%Y%m%dT%H%M%SZ')}" + else: + tz_name = getattr(tz, "key", "UTC") + dt_line = f"DTSTART;TZID={tz_name}:{localized_start.strftime('%Y%m%dT%H%M%S')}" + + lines = [segment.strip() for segment in recurrence_rule.strip().splitlines() if segment.strip()] + filtered = [segment for segment in lines if not segment.upper().startswith("DTSTART")] + if not filtered: + raise ValueError("recurrence_rule must contain an RRULE definition") + + if not filtered[0].upper().startswith("RRULE"): + filtered[0] = f"RRULE:{filtered[0]}" + + return "\n".join([dt_line, *filtered]) + + +def load_rrule(recurrence_text: str): + """Parse a stored recurrence string into a dateutil rule instance.""" + + return rrulestr(recurrence_text) + + +__all__ = [ + "UTC", + "DEFAULT_STATUS", + "VALID_STATUSES", + "build_recurrence", + "coerce_start_datetime", + "load_rrule", + "normalize_status", + "parse_datetime", + "parse_iso", + "resolve_timezone", + "to_storage_timestamp", + "utc_now", +] diff --git a/server/utils/__init__.py b/server/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1bfcccd20318a9d6cdfe6bf6066a9e3a80f45a15 --- /dev/null +++ b/server/utils/__init__.py @@ -0,0 +1,17 @@ +from .responses import error_response +from .timezones import ( + UTC, + convert_to_user_timezone, + get_user_timezone_name, + now_in_user_timezone, + resolve_user_timezone, +) + +__all__ = [ + "error_response", + "UTC", + "convert_to_user_timezone", + "get_user_timezone_name", + "now_in_user_timezone", + "resolve_user_timezone", +] diff --git a/server/utils/responses.py b/server/utils/responses.py new file mode 100644 index 0000000000000000000000000000000000000000..b25596b5b1e34d1360d2b3d08baef1ac85c94837 --- /dev/null +++ b/server/utils/responses.py @@ -0,0 +1,13 @@ +"""Response utilities.""" + +from typing import Optional + +from fastapi.responses import JSONResponse + + +def error_response(message: str, *, status_code: int, detail: Optional[str] = None) -> JSONResponse: + """Create a standardized error response.""" + payload = {"ok": False, "error": message} + if detail: + payload["detail"] = detail + return JSONResponse(payload, status_code=status_code) \ No newline at end of file diff --git a/server/utils/timezones.py b/server/utils/timezones.py new file mode 100644 index 0000000000000000000000000000000000000000..3a7d6a700c0f030c15ecd922e1d970257614dae6 --- /dev/null +++ b/server/utils/timezones.py @@ -0,0 +1,81 @@ +"""Shared helpers for working with the user's preferred timezone.""" + +from __future__ import annotations + +from datetime import datetime, timezone +from typing import Optional + +from zoneinfo import ZoneInfo, ZoneInfoNotFoundError + +from ..logging_config import logger +from ..services.timezone_store import get_timezone_store + +UTC = timezone.utc + + +def get_user_timezone_name(default: str = "UTC") -> str: + """Return the stored timezone preference or a default.""" + + store = get_timezone_store() + return store.get_timezone(default) + + +def resolve_user_timezone(default: str = "UTC") -> ZoneInfo: + """Resolve the stored timezone to a ZoneInfo, falling back to default on error.""" + + tz_name = get_user_timezone_name(default) + try: + return ZoneInfo(tz_name) + except ZoneInfoNotFoundError: + logger.warning( + "unknown timezone; defaulting to %s", + default, + extra={"timezone": tz_name}, + ) + except Exception as exc: # pragma: no cover - defensive + logger.warning( + "timezone resolution failed; defaulting to %s", + default, + extra={"error": str(exc)}, + ) + return ZoneInfo(default) + + +def now_in_user_timezone(fmt: Optional[str] = None, *, default: str = "UTC") -> datetime | str: + """Return the current time in the user's timezone. + + When *fmt* is provided, the result is formatted using ``datetime.strftime``; + otherwise the aware ``datetime`` object is returned. + """ + + current = datetime.now(resolve_user_timezone(default)) + if fmt is None: + return current + return current.strftime(fmt) + + +def convert_to_user_timezone(dt: datetime, *, default: str = "UTC") -> datetime: + """Convert *dt* into the user's timezone with UTC fallback.""" + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=UTC) + + tz = resolve_user_timezone(default) + try: + return dt.astimezone(tz) + except Exception as exc: # pragma: no cover - defensive + logger.warning( + "timezone conversion failed; defaulting to %s", + default, + extra={"error": str(exc)}, + ) + return dt.astimezone(ZoneInfo(default)) + + +__all__ = [ + "UTC", + "convert_to_user_timezone", + "get_user_timezone_name", + "now_in_user_timezone", + "resolve_user_timezone", +] diff --git a/web/app/api/chat/history/route.ts b/web/app/api/chat/history/route.ts new file mode 100644 index 0000000000000000000000000000000000000000..c5536fb04fce2cbe46bd8854cae3182970aeb930 --- /dev/null +++ b/web/app/api/chat/history/route.ts @@ -0,0 +1,30 @@ +const serverBase = process.env.PY_SERVER_URL || 'http://localhost:8001'; +const historyPath = `${serverBase.replace(/\/$/, '')}/api/v1/chat/history`; + +async function forward(method: 'GET' | 'DELETE') { + try { + const res = await fetch(historyPath, { + method, + headers: { Accept: 'application/json' }, + cache: 'no-store', + }); + + const bodyText = await res.text(); + const headers = new Headers({ 'Content-Type': 'application/json; charset=utf-8' }); + return new Response(bodyText || '{}', { status: res.status, headers }); + } catch (error: any) { + const message = error?.message || 'Failed to reach Python server'; + return new Response(JSON.stringify({ error: message }), { + status: 502, + headers: { 'Content-Type': 'application/json; charset=utf-8' }, + }); + } +} + +export async function GET() { + return forward('GET'); +} + +export async function DELETE() { + return forward('DELETE'); +} diff --git a/web/app/api/chat/route.ts b/web/app/api/chat/route.ts new file mode 100644 index 0000000000000000000000000000000000000000..04f9405bde60ee2c4408f36dd7c108d9d9d9ebda --- /dev/null +++ b/web/app/api/chat/route.ts @@ -0,0 +1,61 @@ +export const runtime = 'nodejs'; + +type UIMsgPart = { type: string; text?: string }; +type UIMessage = { role: string; parts?: UIMsgPart[]; content?: string }; + +function uiToOpenAIContent(messages: UIMessage[]): { role: string; content: string }[] { + const out: { role: string; content: string }[] = []; + for (const m of messages || []) { + const role = m?.role; + if (!role) continue; + let content = ''; + if (Array.isArray(m.parts)) { + content = m.parts.filter((p) => p?.type === 'text').map((p) => p.text || '').join(''); + } else if (typeof m.content === 'string') { + content = m.content; + } + out.push({ role, content }); + } + return out; +} + +export async function POST(req: Request) { + let body: any; + try { + body = await req.json(); + } catch (e) { + console.error('[chat-proxy] invalid json', e); + return new Response('Invalid JSON', { status: 400 }); + } + + const { messages } = body || {}; + if (!Array.isArray(messages) || messages.length === 0) { + return new Response('Missing messages', { status: 400 }); + } + + const serverBase = process.env.PY_SERVER_URL || 'http://localhost:8001'; + const serverPath = process.env.PY_CHAT_PATH || '/api/v1/chat/send'; + const url = `${serverBase.replace(/\/$/, '')}${serverPath}`; + + const payload = { + system: '', + messages: uiToOpenAIContent(messages), + stream: false, + }; + + try { + const upstream = await fetch(url, { + method: 'POST', + headers: { 'Content-Type': 'application/json', Accept: 'text/plain, */*' }, + body: JSON.stringify(payload), + }); + const text = await upstream.text(); + return new Response(text, { + status: upstream.status, + headers: { 'Content-Type': 'text/plain; charset=utf-8' }, + }); + } catch (e: any) { + console.error('[chat-proxy] upstream error', e); + return new Response(e?.message || 'Upstream error', { status: 502 }); + } +} diff --git a/web/app/api/gmail/connect/route.ts b/web/app/api/gmail/connect/route.ts new file mode 100644 index 0000000000000000000000000000000000000000..4f7fd049e44768fe109a4650370e3d76eb3a79c6 --- /dev/null +++ b/web/app/api/gmail/connect/route.ts @@ -0,0 +1,31 @@ +export const runtime = 'nodejs'; + +export async function POST(req: Request) { + let body: any = {}; + try { + body = await req.json(); + } catch {} + const userId = body?.userId || ''; + const authConfigId = body?.authConfigId || ''; + + const serverBase = process.env.PY_SERVER_URL || 'http://localhost:8001'; + const url = `${serverBase.replace(/\/$/, '')}/api/v1/gmail/connect`; + + try { + const resp = await fetch(url, { + method: 'POST', + headers: { 'Content-Type': 'application/json', Accept: 'application/json' }, + body: JSON.stringify({ user_id: userId, auth_config_id: authConfigId }), + }); + const data = await resp.json().catch(() => ({})); + return new Response(JSON.stringify(data), { + status: resp.status, + headers: { 'Content-Type': 'application/json; charset=utf-8' }, + }); + } catch (e: any) { + return new Response( + JSON.stringify({ ok: false, error: 'Upstream error', detail: e?.message || String(e) }), + { status: 502, headers: { 'Content-Type': 'application/json; charset=utf-8' } } + ); + } +} diff --git a/web/app/api/gmail/disconnect/route.ts b/web/app/api/gmail/disconnect/route.ts new file mode 100644 index 0000000000000000000000000000000000000000..27e53aac09b05abfb57d5f8ea1c212cd9b0e453c --- /dev/null +++ b/web/app/api/gmail/disconnect/route.ts @@ -0,0 +1,37 @@ +export const runtime = 'nodejs'; + +export async function POST(req: Request) { + let body: any = {}; + try { + body = await req.json(); + } catch {} + + const userId = body?.userId || ''; + const connectionId = body?.connectionId || ''; + const connectionRequestId = body?.connectionRequestId || ''; + + const serverBase = process.env.PY_SERVER_URL || 'http://localhost:8001'; + const url = `${serverBase.replace(/\/$/, '')}/api/v1/gmail/disconnect`; + const payload: any = {}; + if (userId) payload.user_id = userId; + if (connectionId) payload.connection_id = connectionId; + if (connectionRequestId) payload.connection_request_id = connectionRequestId; + + try { + const resp = await fetch(url, { + method: 'POST', + headers: { 'Content-Type': 'application/json', Accept: 'application/json' }, + body: JSON.stringify(payload), + }); + const data = await resp.json().catch(() => ({})); + return new Response(JSON.stringify(data), { + status: resp.status, + headers: { 'Content-Type': 'application/json; charset=utf-8' }, + }); + } catch (e: any) { + return new Response( + JSON.stringify({ ok: false, error: 'Upstream error', detail: e?.message || String(e) }), + { status: 502, headers: { 'Content-Type': 'application/json; charset=utf-8' } } + ); + } +} diff --git a/web/app/api/gmail/status/route.ts b/web/app/api/gmail/status/route.ts new file mode 100644 index 0000000000000000000000000000000000000000..c517231dc92506fdf028779a3dccdc682068ed16 --- /dev/null +++ b/web/app/api/gmail/status/route.ts @@ -0,0 +1,34 @@ +export const runtime = 'nodejs'; + +export async function POST(req: Request) { + let body: any = {}; + try { + body = await req.json(); + } catch {} + const userId = body?.userId || ''; + const connectionRequestId = body?.connectionRequestId || ''; + + const serverBase = process.env.PY_SERVER_URL || 'http://localhost:8001'; + const url = `${serverBase.replace(/\/$/, '')}/api/v1/gmail/status`; + const payload: any = {}; + if (userId) payload.user_id = userId; + if (connectionRequestId) payload.connection_request_id = connectionRequestId; + + try { + const resp = await fetch(url, { + method: 'POST', + headers: { 'Content-Type': 'application/json', Accept: 'application/json' }, + body: JSON.stringify(payload), + }); + const data = await resp.json().catch(() => ({})); + return new Response(JSON.stringify(data), { + status: resp.status, + headers: { 'Content-Type': 'application/json; charset=utf-8' }, + }); + } catch (e: any) { + return new Response( + JSON.stringify({ ok: false, error: 'Upstream error', detail: e?.message || String(e) }), + { status: 502, headers: { 'Content-Type': 'application/json; charset=utf-8' } } + ); + } +} diff --git a/web/app/api/timezone/route.ts b/web/app/api/timezone/route.ts new file mode 100644 index 0000000000000000000000000000000000000000..ce2d9827851a804d6eaf27ea13314bf91d5a332c --- /dev/null +++ b/web/app/api/timezone/route.ts @@ -0,0 +1,39 @@ +export const runtime = 'nodejs'; + +export async function POST(req: Request) { + let body: any; + try { + body = await req.json(); + } catch (e) { + return new Response('Invalid JSON', { status: 400 }); + } + + const { timezone } = body || {}; + if (!timezone || typeof timezone !== 'string') { + return new Response('Missing or invalid timezone', { status: 400 }); + } + + const serverBase = process.env.PY_SERVER_URL || 'http://localhost:8001'; + const url = `${serverBase.replace(/\/$/, '')}/api/v1/meta/timezone`; + + try { + const upstream = await fetch(url, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ timezone }), + }); + + if (!upstream.ok) { + const text = await upstream.text(); + return new Response(text || 'Failed to set timezone', { status: upstream.status }); + } + + const data = await upstream.json(); + return new Response(JSON.stringify(data), { + status: 200, + headers: { 'Content-Type': 'application/json' }, + }); + } catch (e: any) { + return new Response(e?.message || 'Server error', { status: 502 }); + } +} diff --git a/web/app/globals.css b/web/app/globals.css new file mode 100644 index 0000000000000000000000000000000000000000..fde8e61debb0c974e76d683fe9b4560b29e61550 --- /dev/null +++ b/web/app/globals.css @@ -0,0 +1,50 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; + +:root { + color-scheme: light; +} + +/* Custom styles */ +.btn { + @apply inline-flex items-center justify-center rounded-md border border-transparent bg-brand-600 px-4 py-2 text-sm font-medium text-white shadow-sm hover:bg-brand-700 focus:outline-none focus:ring-2 focus:ring-brand-500 focus:ring-offset-2 disabled:opacity-50 disabled:cursor-not-allowed; +} + +.input { + @apply w-full rounded-full border border-gray-300 bg-white px-4 py-2.5 text-sm shadow-sm placeholder:text-gray-400 focus:border-brand-500 focus:outline-none focus:ring-2 focus:ring-brand-500 disabled:opacity-60; +} + +.card { + @apply rounded-xl border border-gray-200 bg-white shadow-sm; +} + +.chip { + @apply inline-flex items-center rounded-full bg-gray-100 px-2 py-0.5 text-xs font-medium text-gray-700; +} + +/* iMessage-style chat enhancements */ +.chat-wrap { + @apply mx-auto w-full max-w-2xl; +} + +.chat-bg { + @apply bg-[#F5F5F7]; + background-image: radial-gradient(circle at 10% 10%, #ffffff 0%, #f5f5f7 60%); +} + +.bubble-out { + @apply inline-block max-w-[80%] rounded-2xl bg-brand-600 px-4 py-2.5 text-sm text-white shadow-sm; +} + +.bubble-in { + @apply inline-block max-w-[80%] rounded-2xl bg-gray-100 px-4 py-2.5 text-sm text-gray-900 shadow-sm border border-gray-200; +} + +.bubble-tail-out { + @apply rounded-br-md; +} + +.bubble-tail-in { + @apply rounded-bl-md; +} diff --git a/web/app/layout.tsx b/web/app/layout.tsx new file mode 100644 index 0000000000000000000000000000000000000000..f1f85637207c1652e57a0eb654bc787fb85d861e --- /dev/null +++ b/web/app/layout.tsx @@ -0,0 +1,18 @@ +import type { Metadata } from 'next'; +import './globals.css'; + +export const metadata: Metadata = { + title: 'OpenPoke Chat', + description: 'A beautiful AI chat powered by Vercel AI SDK', +}; + +export default function RootLayout({ children }: { children: React.ReactNode }) { + return ( + + + {children} + + + ); +} + diff --git a/web/app/page.tsx b/web/app/page.tsx new file mode 100644 index 0000000000000000000000000000000000000000..dd8216d8572a5558af670e56e90230b9ddfee1af --- /dev/null +++ b/web/app/page.tsx @@ -0,0 +1,260 @@ +'use client'; + +import { useCallback, useEffect, useState } from 'react'; +import SettingsModal, { useSettings } from '@/components/SettingsModal'; +import { ChatHeader } from '@/components/chat/ChatHeader'; +import { ChatInput } from '@/components/chat/ChatInput'; +import { ChatMessages } from '@/components/chat/ChatMessages'; +import { ErrorBanner } from '@/components/chat/ErrorBanner'; +import { useAutoScroll } from '@/components/chat/useAutoScroll'; +import type { ChatBubble } from '@/components/chat/types'; + +const POLL_INTERVAL_MS = 1500; + +const formatEscapeCharacters = (text: string): string => { + return text + .replace(/\\n/g, '\n') + .replace(/\\t/g, '\t') + .replace(/\\r/g, '\r') + .replace(/\\\\/g, '\\'); +}; + +const isRenderableMessage = (entry: any) => + typeof entry?.role === 'string' && + typeof entry?.content === 'string' && + entry.content.trim().length > 0; + +const toBubbles = (payload: any): ChatBubble[] => { + if (!Array.isArray(payload?.messages)) return []; + + return payload.messages + .filter(isRenderableMessage) + .map((message: any, index: number) => ({ + id: `history-${index}`, + role: message.role, + text: formatEscapeCharacters(message.content), + })); +}; + +export default function Page() { + const { settings, setSettings } = useSettings(); + const [open, setOpen] = useState(false); + const [input, setInput] = useState(''); + const [messages, setMessages] = useState([]); + const [error, setError] = useState(null); + const [isWaitingForResponse, setIsWaitingForResponse] = useState(false); + const { scrollContainerRef, handleScroll } = useAutoScroll({ + items: messages, + isWaiting: isWaitingForResponse, + }); + const openSettings = useCallback(() => setOpen(true), [setOpen]); + const closeSettings = useCallback(() => setOpen(false), [setOpen]); + + const loadHistory = useCallback(async () => { + try { + const res = await fetch('/api/chat/history', { cache: 'no-store' }); + if (!res.ok) return; + const data = await res.json(); + setMessages(toBubbles(data)); + } catch (err: any) { + if (err?.name === 'AbortError') return; + console.error('Failed to load chat history', err); + } + }, []); + + useEffect(() => { + void loadHistory(); + }, [loadHistory]); + + // Detect and store browser timezone on first load + useEffect(() => { + const detectAndStoreTimezone = async () => { + // Only run if timezone not already stored + if (settings.timezone) return; + + try { + const browserTimezone = Intl.DateTimeFormat().resolvedOptions().timeZone; + + // Send to server + const response = await fetch('/api/timezone', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ timezone: browserTimezone }), + }); + + if (response.ok) { + // Update local settings + setSettings({ ...settings, timezone: browserTimezone }); + } + } catch (error) { + // Fail silently - timezone detection is not critical + console.debug('Timezone detection failed:', error); + } + }; + + void detectAndStoreTimezone(); + }, [settings, setSettings]); + + + useEffect(() => { + const intervalId = window.setInterval(() => { + void loadHistory(); + }, POLL_INTERVAL_MS); + + return () => window.clearInterval(intervalId); + }, [loadHistory]); + + const canSubmit = input.trim().length > 0; + const inputPlaceholder = 'Type a message…'; + + const sendMessage = useCallback( + async (text: string) => { + const trimmed = text.trim(); + if (!trimmed) return; + + setError(null); + setIsWaitingForResponse(true); + + // Optimistically add the user message immediately + const userMessage: ChatBubble = { + id: `user-${Date.now()}`, + role: 'user', + text: formatEscapeCharacters(trimmed), + }; + setMessages(prev => { + const newMessages = [...prev, userMessage]; + return newMessages; + }); + + try { + const res = await fetch('/api/chat', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + messages: [{ role: 'user', content: trimmed }], + }), + }); + + if (!(res.ok || res.status === 202)) { + const detail = await res.text(); + throw new Error(detail || `Request failed (${res.status})`); + } + } catch (err: any) { + console.error('Failed to send message', err); + setError(err?.message || 'Failed to send message'); + // Remove the optimistic message on error + setMessages(prev => prev.filter(msg => msg.id !== userMessage.id)); + setIsWaitingForResponse(false); + throw err instanceof Error ? err : new Error('Failed to send message'); + } finally { + // Poll until we get the assistant's response + let pollAttempts = 0; + const maxPollAttempts = 30; // Max 30 attempts (30 seconds) + + const pollForAssistantResponse = async () => { + pollAttempts++; + + try { + const res = await fetch('/api/chat/history', { cache: 'no-store' }); + if (res.ok) { + const data = await res.json(); + const currentMessages = toBubbles(data); + + // Check if the last message is from assistant and contains our user message + const lastMessage = currentMessages[currentMessages.length - 1]; + const hasUserMessage = currentMessages.some(msg => msg.text === trimmed && msg.role === 'user'); + const hasAssistantResponse = lastMessage?.role === 'assistant' && hasUserMessage; + + if (hasAssistantResponse) { + // We got the assistant response, update messages and stop loading + setMessages(currentMessages); + setIsWaitingForResponse(false); + return; + } + } + } catch (err) { + console.error('Error polling for response:', err); + } + + // Continue polling if we haven't exceeded max attempts + if (pollAttempts < maxPollAttempts) { + setTimeout(pollForAssistantResponse, 1000); // Poll every second + } else { + // Timeout - stop loading and update messages anyway + setIsWaitingForResponse(false); + await loadHistory(); + } + }; + + // Start polling after a brief delay + setTimeout(pollForAssistantResponse, 1000); + } + }, + [loadHistory], + ); + + const handleClearHistory = useCallback(async () => { + try { + const res = await fetch('/api/chat/history', { method: 'DELETE' }); + if (!res.ok) { + console.error('Failed to clear chat history', res.statusText); + return; + } + setMessages([]); + } catch (err) { + console.error('Failed to clear chat history', err); + } + }, [setMessages]); + + const triggerClearHistory = useCallback(() => { + void handleClearHistory(); + }, [handleClearHistory]); + + const handleSubmit = useCallback(async () => { + if (!canSubmit) return; + const value = input; + setInput(''); + try { + await sendMessage(value); + } catch { + setInput(value); + } + }, [canSubmit, input, sendMessage, setInput]); + + const handleInputChange = useCallback((value: string) => { + setInput(value); + }, [setInput]); + + const clearError = useCallback(() => setError(null), [setError]); + + return ( +
+
+ + +
+ + +
+ {error && } + + +
+
+ + +
+
+ ); +} diff --git a/web/components/SettingsModal.tsx b/web/components/SettingsModal.tsx new file mode 100644 index 0000000000000000000000000000000000000000..84f13859e2fd767ef81bd789d83d9e8e8678e8ba --- /dev/null +++ b/web/components/SettingsModal.tsx @@ -0,0 +1,487 @@ +"use client"; +import { useCallback, useEffect, useMemo, useState } from 'react'; + +export type Settings = { + timezone: string; +}; + +export function useSettings() { + const [settings, setSettings] = useState({ timezone: '' }); + + useEffect(() => { + try { + const timezone = localStorage.getItem('user_timezone') || ''; + setSettings({ timezone }); + } catch {} + }, []); + + const persist = useCallback((s: Settings) => { + setSettings(s); + try { + localStorage.setItem('user_timezone', s.timezone); + } catch {} + }, []); + + return { settings, setSettings: persist } as const; +} + +function coerceEmailFrom(value: unknown): string | null { + if (typeof value === 'string' && value.includes('@')) { + return value; + } + if (value && typeof value === 'object') { + const candidate = + (value as any).emailAddress ?? + (value as any).email ?? + (value as any).value ?? + (value as any).address; + if (typeof candidate === 'string' && candidate.includes('@')) { + return candidate; + } + } + return null; +} + +function deriveEmailFromPayload(payload: any): string { + if (!payload) return ''; + const profileSlice = payload?.profile; + const candidateObjects: any[] = []; + + if (profileSlice && typeof profileSlice === 'object') { + candidateObjects.push(profileSlice); + if ((profileSlice as any).response_data && typeof (profileSlice as any).response_data === 'object') { + candidateObjects.push((profileSlice as any).response_data); + } + if (Array.isArray((profileSlice as any).items)) { + for (const entry of (profileSlice as any).items as any[]) { + if (entry && typeof entry === 'object') { + if (typeof entry.data === 'object') candidateObjects.push(entry.data); + if (typeof entry.response_data === 'object') candidateObjects.push(entry.response_data); + if (typeof entry.profile === 'object') candidateObjects.push(entry.profile); + } + } + } + } + + const directCandidates = [payload?.email]; + + for (const obj of candidateObjects) { + if (!obj || typeof obj !== 'object') continue; + directCandidates.push( + obj?.email, + obj?.email_address, + obj?.emailAddress, + obj?.profile?.email, + obj?.profile?.emailAddress, + obj?.profile?.email_address, + obj?.user?.email, + obj?.user?.emailAddress, + obj?.user?.email_address, + obj?.data?.email, + obj?.data?.emailAddress, + obj?.data?.email_address, + ); + const emailAddresses = (obj as any).emailAddresses; + if (Array.isArray(emailAddresses)) { + for (const entry of emailAddresses) { + const email = coerceEmailFrom(entry) ?? coerceEmailFrom((entry as any)?.value); + if (email) return email; + } + } + } + + for (const candidate of directCandidates) { + const email = coerceEmailFrom(candidate); + if (email) return email; + } + + return ''; +} + +export default function SettingsModal({ + open, + onClose, + settings, + onSave, +}: { + open: boolean; + onClose: () => void; + settings: Settings; + onSave: (s: Settings) => void; +}) { + const [timezone, setTimezone] = useState(settings.timezone); + const [connectingGmail, setConnectingGmail] = useState(false); + const [isRefreshingGmail, setIsRefreshingGmail] = useState(false); + const [isDisconnecting, setIsDisconnecting] = useState(false); + const [gmailStatusMessage, setGmailStatusMessage] = useState(''); + const [gmailConnected, setGmailConnected] = useState(false); + const [gmailEmail, setGmailEmail] = useState(''); + const [gmailConnId, setGmailConnId] = useState(''); + const [gmailProfile, setGmailProfile] = useState | null>(null); + + const readStoredUserId = useCallback(() => { + if (typeof window === 'undefined') return ''; + try { + return localStorage.getItem('openpoke_user_id') || ''; + } catch { + return ''; + } + }, []); + + const ensureUserId = useCallback(() => { + if (typeof window === 'undefined') { + return `web-${Math.random().toString(36).slice(2)}`; + } + try { + const existing = localStorage.getItem('openpoke_user_id'); + if (existing) return existing; + const cryptoObj = (globalThis as { crypto?: Crypto }).crypto; + const randomPart = + cryptoObj && typeof cryptoObj.randomUUID === 'function' + ? cryptoObj.randomUUID().replace(/-/g, '') + : Math.random().toString(36).slice(2); + const generated = `web-${randomPart}`; + localStorage.setItem('openpoke_user_id', generated); + return generated; + } catch { + return `web-${Math.random().toString(36).slice(2)}`; + } + }, []); + + const readStoredConnectionRequestId = useCallback(() => { + if (gmailConnId) return gmailConnId; + if (typeof window === 'undefined') return ''; + try { + return localStorage.getItem('gmail_connection_request_id') || ''; + } catch { + return ''; + } + }, [gmailConnId]); + + useEffect(() => { + try { + const savedConnected = localStorage.getItem('gmail_connected') === 'true'; + const savedConnId = localStorage.getItem('gmail_connection_request_id') || ''; + const savedEmail = localStorage.getItem('gmail_email') || ''; + setGmailConnected(savedConnected); + setGmailConnId(savedConnId); + setGmailEmail(savedEmail); + if (savedConnected && savedEmail) { + setGmailStatusMessage(`Connected as ${savedEmail}`); + } + } catch {} + }, []); + + const gmailProfileDetails = useMemo(() => { + if (!gmailProfile) return [] as { label: string; value: string }[]; + const details: { label: string; value: string }[] = []; + const messagesTotal = (gmailProfile as any)?.messagesTotal; + if (typeof messagesTotal === 'number') { + details.push({ label: 'Messages', value: messagesTotal.toLocaleString() }); + } + const threadsTotal = (gmailProfile as any)?.threadsTotal; + if (typeof threadsTotal === 'number') { + details.push({ label: 'Threads', value: threadsTotal.toLocaleString() }); + } + const historyId = (gmailProfile as any)?.historyId ?? (gmailProfile as any)?.historyID; + if (historyId !== undefined && historyId !== null && historyId !== '') { + details.push({ label: 'History ID', value: String(historyId) }); + } + return details; + }, [gmailProfile]); + + const handleConnectGmail = useCallback(async () => { + try { + setConnectingGmail(true); + setGmailStatusMessage(''); + const userId = ensureUserId(); + const resp = await fetch('/api/gmail/connect', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ userId }), + }); + const data = await resp.json().catch(() => ({})); + if (!resp.ok || !data?.ok) { + const msg = data?.error || `Failed (${resp.status})`; + setGmailStatusMessage(msg); + return; + } + const url = data?.redirect_url; + const connId = data?.connection_request_id || ''; + if (connId) { + setGmailConnId(connId); + try { + localStorage.setItem('gmail_connection_request_id', connId); + } catch {} + } + setGmailConnected(false); + setGmailEmail(''); + setGmailProfile(null); + if (url) { + window.open(url, '_blank', 'noopener'); + setGmailStatusMessage('Gmail authorization opened in a new tab. Complete it, then press β€œRefresh status”.'); + } else { + setGmailStatusMessage('Connection initiated. Refresh status once authorization completes.'); + } + } catch (e: any) { + setGmailStatusMessage(e?.message || 'Failed to connect Gmail'); + } finally { + setConnectingGmail(false); + } + }, [ensureUserId]); + + const refreshGmailStatus = useCallback(async () => { + const userId = readStoredUserId(); + const connectionRequestId = readStoredConnectionRequestId(); + if (!userId && !connectionRequestId) { + setGmailConnected(false); + setGmailProfile(null); + setGmailEmail(''); + setGmailStatusMessage('Connect Gmail to get started.'); + return; + } + + try { + setIsRefreshingGmail(true); + setGmailStatusMessage('Refreshing Gmail status…'); + const resp = await fetch('/api/gmail/status', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ userId, connectionRequestId }), + }); + const data = await resp.json().catch(() => ({})); + + if (!resp.ok || !data?.ok) { + const message = data?.error || `Failed (${resp.status})`; + setGmailConnected(false); + setGmailProfile(null); + setGmailEmail(''); + setGmailStatusMessage(message); + return; + } + + if (!gmailConnId && connectionRequestId) { + setGmailConnId(connectionRequestId); + } + + const profileData = data?.profile && typeof data.profile === 'object' ? (data.profile as Record) : null; + setGmailProfile(profileData); + + const derivedEmail = deriveEmailFromPayload({ email: data?.email, profile: profileData }); + const email = derivedEmail || (typeof data?.email === 'string' ? data.email : ''); + const connected = Boolean(data?.connected); + + setGmailConnected(connected); + setGmailEmail(email); + + if (connected) { + const source = typeof data?.profile_source === 'string' ? data.profile_source : ''; + const sourceNote = source === 'fetched' ? 'Verified moments ago.' : source === 'cache' ? 'Loaded from cache.' : ''; + const message = email ? `Connected as ${email}` : 'Gmail connected.'; + setGmailStatusMessage(sourceNote ? `${message} ${sourceNote}` : message); + try { + localStorage.setItem('gmail_connected', 'true'); + if (email) localStorage.setItem('gmail_email', email); + if (typeof data?.user_id === 'string' && data.user_id) { + localStorage.setItem('openpoke_user_id', data.user_id); + } + } catch {} + } else { + const statusText = typeof data?.status === 'string' && data.status && data.status !== 'UNKNOWN' + ? `Status: ${data.status}` + : 'Not connected yet.'; + setGmailStatusMessage(statusText); + try { + localStorage.removeItem('gmail_connected'); + localStorage.removeItem('gmail_email'); + } catch {} + } + } catch (e: any) { + setGmailConnected(false); + setGmailProfile(null); + setGmailEmail(''); + setGmailStatusMessage(e?.message || 'Failed to check Gmail status'); + } finally { + setIsRefreshingGmail(false); + } + }, [gmailConnId, readStoredConnectionRequestId, readStoredUserId]); + + const handleDisconnectGmail = useCallback(async () => { + if (typeof window !== 'undefined') { + const proceed = window.confirm('Disconnect Gmail from OpenPoke?'); + if (!proceed) return; + } + + try { + setIsDisconnecting(true); + setGmailStatusMessage('Disconnecting Gmail…'); + const userId = readStoredUserId(); + const connectionRequestId = readStoredConnectionRequestId(); + const resp = await fetch('/api/gmail/disconnect', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ userId, connectionRequestId }), + }); + const data = await resp.json().catch(() => ({})); + + if (!resp.ok || !data?.ok) { + const message = data?.error || `Failed (${resp.status})`; + setGmailStatusMessage(message); + return; + } + + setGmailConnected(false); + setGmailEmail(''); + setGmailProfile(null); + setGmailConnId(''); + setGmailStatusMessage('Gmail disconnected.'); + try { + localStorage.removeItem('gmail_connected'); + localStorage.removeItem('gmail_email'); + localStorage.removeItem('gmail_connection_request_id'); + localStorage.removeItem('openpoke_user_id'); + } catch {} + } catch (e: any) { + setGmailStatusMessage(e?.message || 'Failed to disconnect Gmail'); + } finally { + setIsDisconnecting(false); + } + }, [readStoredConnectionRequestId, readStoredUserId]); + + useEffect(() => { + setTimezone(settings.timezone); + }, [settings]); + + useEffect(() => { + if (!open) return; + void refreshGmailStatus(); + }, [open, refreshGmailStatus]); + + if (!open) return null; + + const connectButtonLabel = connectingGmail ? 'Opening…' : gmailConnected ? 'Reconnect' : 'Connect Gmail'; + const refreshButtonLabel = isRefreshingGmail ? 'Refreshing…' : 'Refresh status'; + const disconnectButtonLabel = isDisconnecting ? 'Disconnecting…' : 'Disconnect'; + + return ( +
+
+
+

Settings

+ +
+
+
+ + setTimezone(e.target.value)} + placeholder="e.g. America/New_York, Europe/London" + readOnly={!timezone} + /> +

+ {timezone ? 'Auto-detected from browser. Edit to override.' : 'Will be auto-detected on next page load.'} +

+
+
+
Integrations
+
+
+
+
Gmail (via Composio)
+

+ Connect Gmail to unlock email search, drafting, and automations inside OpenPoke. +

+
+ + {gmailConnected ? 'Connected' : 'Not connected'} + +
+ + {gmailConnected ? ( +
+
+
Connected account
+
{gmailEmail || 'Email unavailable'}
+
+ {gmailProfileDetails.length > 0 && ( +
+ {gmailProfileDetails.map((item) => ( +
+
{item.label}
+
{item.value}
+
+ ))} +
+ )} + {gmailStatusMessage && ( +

{gmailStatusMessage}

+ )} +
+ ) : ( +
+ {gmailStatusMessage || 'Complete the connection to view your Gmail account details here.'} +
+ )} + +
+ + + {gmailConnected && ( + + )} +
+
+
+
+ +
+ + +
+
+
+ ); +} diff --git a/web/components/chat/ChatHeader.tsx b/web/components/chat/ChatHeader.tsx new file mode 100644 index 0000000000000000000000000000000000000000..199bc2f5b2336bae6951aee2e0e77c2b9af7d695 --- /dev/null +++ b/web/components/chat/ChatHeader.tsx @@ -0,0 +1,28 @@ +interface ChatHeaderProps { + onOpenSettings: () => void; + onClearHistory: () => void; +} + +export function ChatHeader({ onOpenSettings, onClearHistory }: ChatHeaderProps) { + return ( +
+
+

OpenPoke 🌴

+
+
+ + +
+
+ ); +} diff --git a/web/components/chat/ChatInput.tsx b/web/components/chat/ChatInput.tsx new file mode 100644 index 0000000000000000000000000000000000000000..b82373b8a517d37397f24c8730a802ced028f244 --- /dev/null +++ b/web/components/chat/ChatInput.tsx @@ -0,0 +1,27 @@ +import { FormEvent } from 'react'; + +interface ChatInputProps { + value: string; + canSubmit: boolean; + placeholder: string; + onChange: (value: string) => void; + onSubmit: () => Promise | void; +} + +export function ChatInput({ value, canSubmit, placeholder, onChange, onSubmit }: ChatInputProps) { + const handleSubmit = (event: FormEvent) => { + event.preventDefault(); + if (!canSubmit) return; + void onSubmit(); + }; + + return ( +
+ onChange(event.target.value)} placeholder={placeholder} /> + +
+ ); +} + diff --git a/web/components/chat/ChatMessages.tsx b/web/components/chat/ChatMessages.tsx new file mode 100644 index 0000000000000000000000000000000000000000..f7c2225ac2e0f844163fe67f615d13dab160f8a5 --- /dev/null +++ b/web/components/chat/ChatMessages.tsx @@ -0,0 +1,69 @@ +import clsx from 'clsx'; +import { RefObject } from 'react'; + +import type { ChatBubble } from './types'; + +interface ChatMessagesProps { + messages: ReadonlyArray; + isWaitingForResponse: boolean; + scrollContainerRef: RefObject; + onScroll: () => void; +} + +export function ChatMessages({ messages, isWaitingForResponse, scrollContainerRef, onScroll }: ChatMessagesProps) { + return ( +
+ {messages.length === 0 && } + + {messages.map((message, index) => { + const isUser = message.role === 'user'; + const isDraft = message.role === 'draft'; + const next = messages[index + 1]; + const tail = !next || next.role !== message.role; + + return ( +
+
+ {message.text} +
+
+ ); + })} + + {isWaitingForResponse && } +
+ ); +} + +function TypingIndicator() { + return ( +
+
+
+
+
+
+
+
+
+
+
+ ); +} + +function EmptyState() { + return ( +
+

Start a conversation

+

+ Your messages will appear here. Send something to get started. +

+
+ ); +} diff --git a/web/components/chat/ErrorBanner.tsx b/web/components/chat/ErrorBanner.tsx new file mode 100644 index 0000000000000000000000000000000000000000..698c688830dc324c2e78fee882ad19d522b7f2ec --- /dev/null +++ b/web/components/chat/ErrorBanner.tsx @@ -0,0 +1,19 @@ +interface ErrorBannerProps { + message: string; + onDismiss: () => void; +} + +export function ErrorBanner({ message, onDismiss }: ErrorBannerProps) { + return ( +
+
+ Something went wrong. + +
+
{message}
+
+ ); +} + diff --git a/web/components/chat/types.ts b/web/components/chat/types.ts new file mode 100644 index 0000000000000000000000000000000000000000..61ff062a0b9883bbc592ed5b52ad36f6d75b8556 --- /dev/null +++ b/web/components/chat/types.ts @@ -0,0 +1,6 @@ +export interface ChatBubble { + id: string; + role: string; + text: string; +} + diff --git a/web/components/chat/useAutoScroll.ts b/web/components/chat/useAutoScroll.ts new file mode 100644 index 0000000000000000000000000000000000000000..df15391b4dc5f88577df33b8a308a6d9c1e88af5 --- /dev/null +++ b/web/components/chat/useAutoScroll.ts @@ -0,0 +1,41 @@ +import { useCallback, useEffect, useRef } from 'react'; + +interface AutoScrollOptions { + items: ReadonlyArray; + isWaiting: boolean; +} + +export const useAutoScroll = ({ items, isWaiting }: AutoScrollOptions) => { + const scrollContainerRef = useRef(null); + const isUserNearBottomRef = useRef(true); + + const handleScroll = useCallback(() => { + const container = scrollContainerRef.current; + if (!container) return; + + const threshold = 80; + const distanceFromBottom = container.scrollHeight - container.scrollTop - container.clientHeight; + isUserNearBottomRef.current = distanceFromBottom <= threshold; + }, []); + + const scrollToBottom = useCallback((behavior: ScrollBehavior = 'smooth') => { + const container = scrollContainerRef.current; + if (!container) return; + + container.scrollTo({ top: container.scrollHeight, behavior }); + }, []); + + useEffect(() => { + if (isUserNearBottomRef.current) { + const behavior = items.length === 0 ? 'auto' : 'smooth'; + scrollToBottom(behavior); + } + }, [items, isWaiting, scrollToBottom]); + + return { + scrollContainerRef, + handleScroll, + scrollToBottom, + }; +}; + diff --git a/web/next-env.d.ts b/web/next-env.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..40c3d68096c270ef976f3db4e9eb42b05c7067bb --- /dev/null +++ b/web/next-env.d.ts @@ -0,0 +1,5 @@ +/// +/// + +// NOTE: This file should not be edited +// see https://nextjs.org/docs/app/building-your-application/configuring/typescript for more information. diff --git a/web/next.config.mjs b/web/next.config.mjs new file mode 100644 index 0000000000000000000000000000000000000000..6405c5f4e28be35899859e6d38a1c5cd9c459f8b --- /dev/null +++ b/web/next.config.mjs @@ -0,0 +1,16 @@ +import envPackage from '@next/env'; +import { dirname, resolve } from 'path'; +import { fileURLToPath } from 'url'; + +// Ensure the Next.js runtime loads environment variables declared in the repo root .env +const __dirname = dirname(fileURLToPath(import.meta.url)); +const repoRoot = resolve(__dirname, '..'); +const isDevelopment = process.env.NODE_ENV !== 'production'; + +const { loadEnvConfig } = envPackage; +loadEnvConfig?.(repoRoot, isDevelopment); + +/** @type {import('next').NextConfig} */ +const nextConfig = {}; + +export default nextConfig; diff --git a/web/package-lock.json b/web/package-lock.json new file mode 100644 index 0000000000000000000000000000000000000000..02abae89749efc81bf669fd87b8072b492a0f86b --- /dev/null +++ b/web/package-lock.json @@ -0,0 +1,6115 @@ +{ + "name": "openpoke-web", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "openpoke-web", + "version": "0.1.0", + "dependencies": { + "clsx": "^2.1.1", + "next": "^14.2.7", + "react": "^18.3.1", + "react-dom": "^18.3.1" + }, + "devDependencies": { + "@types/node": "24.5.1", + "@types/react": "19.1.13", + "autoprefixer": "^10.4.20", + "eslint": "^8.57.0", + "eslint-config-next": "^14.2.7", + "postcss": "^8.4.47", + "tailwindcss": "^3.4.10", + "typescript": "^5.6.2" + } + }, + "node_modules/@alloc/quick-lru": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", + "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@emnapi/core": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.5.0.tgz", + "integrity": "sha512-sbP8GzB1WDzacS8fgNPpHlp6C9VZe+SJP3F90W9rLemaQj2PzIuTEl1qDOYQf58YIpyjViI24y9aPWCjEzY2cg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/wasi-threads": "1.1.0", + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/runtime": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.5.0.tgz", + "integrity": "sha512-97/BJ3iXHww3djw6hYIfErCZFee7qCtrneuLa20UXFCOTCfBM2cvQHjWJ2EG0s0MtdNwInarqCTz35i4wWXHsQ==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/wasi-threads": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.1.0.tgz", + "integrity": "sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.9.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz", + "integrity": "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.12.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", + "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.6.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/js": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz", + "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz", + "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==", + "deprecated": "Use @eslint/config-array instead", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@humanwhocodes/object-schema": "^2.0.3", + "debug": "^4.3.1", + "minimatch": "^3.0.5" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", + "deprecated": "Use @eslint/object-schema instead", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", + "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", + "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@napi-rs/wasm-runtime": { + "version": "0.2.12", + "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.12.tgz", + "integrity": "sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.4.3", + "@emnapi/runtime": "^1.4.3", + "@tybys/wasm-util": "^0.10.0" + } + }, + "node_modules/@next/env": { + "version": "14.2.32", + "resolved": "https://registry.npmjs.org/@next/env/-/env-14.2.32.tgz", + "integrity": "sha512-n9mQdigI6iZ/DF6pCTwMKeWgF2e8lg7qgt5M7HXMLtyhZYMnf/u905M18sSpPmHL9MKp9JHo56C6jrD2EvWxng==", + "license": "MIT" + }, + "node_modules/@next/eslint-plugin-next": { + "version": "14.2.32", + "resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-14.2.32.tgz", + "integrity": "sha512-tyZMX8g4cWg/uPW4NxiJK13t62Pab47SKGJGVZJa6YtFwtfrXovH4j1n9tdpRdXW03PGQBugYEVGM7OhWfytdA==", + "dev": true, + "license": "MIT", + "dependencies": { + "glob": "10.3.10" + } + }, + "node_modules/@next/swc-darwin-arm64": { + "version": "14.2.32", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.2.32.tgz", + "integrity": "sha512-osHXveM70zC+ilfuFa/2W6a1XQxJTvEhzEycnjUaVE8kpUS09lDpiDDX2YLdyFCzoUbvbo5r0X1Kp4MllIOShw==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-darwin-x64": { + "version": "14.2.32", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.2.32.tgz", + "integrity": "sha512-P9NpCAJuOiaHHpqtrCNncjqtSBi1f6QUdHK/+dNabBIXB2RUFWL19TY1Hkhu74OvyNQEYEzzMJCMQk5agjw1Qg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-arm64-gnu": { + "version": "14.2.32", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.2.32.tgz", + "integrity": "sha512-v7JaO0oXXt6d+cFjrrKqYnR2ubrD+JYP7nQVRZgeo5uNE5hkCpWnHmXm9vy3g6foMO8SPwL0P3MPw1c+BjbAzA==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-arm64-musl": { + "version": "14.2.32", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.2.32.tgz", + "integrity": "sha512-tA6sIKShXtSJBTH88i0DRd6I9n3ZTirmwpwAqH5zdJoQF7/wlJXR8DkPmKwYl5mFWhEKr5IIa3LfpMW9RRwKmQ==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-x64-gnu": { + "version": "14.2.32", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.2.32.tgz", + "integrity": "sha512-7S1GY4TdnlGVIdeXXKQdDkfDysoIVFMD0lJuVVMeb3eoVjrknQ0JNN7wFlhCvea0hEk0Sd4D1hedVChDKfV2jw==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-x64-musl": { + "version": "14.2.32", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.2.32.tgz", + "integrity": "sha512-OHHC81P4tirVa6Awk6eCQ6RBfWl8HpFsZtfEkMpJ5GjPsJ3nhPe6wKAJUZ/piC8sszUkAgv3fLflgzPStIwfWg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-arm64-msvc": { + "version": "14.2.32", + "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.2.32.tgz", + "integrity": "sha512-rORQjXsAFeX6TLYJrCG5yoIDj+NKq31Rqwn8Wpn/bkPNy5rTHvOXkW8mLFonItS7QC6M+1JIIcLe+vOCTOYpvg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-ia32-msvc": { + "version": "14.2.32", + "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.2.32.tgz", + "integrity": "sha512-jHUeDPVHrgFltqoAqDB6g6OStNnFxnc7Aks3p0KE0FbwAvRg6qWKYF5mSTdCTxA3axoSAUwxYdILzXJfUwlHhA==", + "cpu": [ + "ia32" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-x64-msvc": { + "version": "14.2.32", + "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.2.32.tgz", + "integrity": "sha512-2N0lSoU4GjfLSO50wvKpMQgKd4HdI2UHEhQPPPnlgfBJlOgJxkjpkYBqzk08f1gItBB6xF/n+ykso2hgxuydsA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nolyfill/is-core-module": { + "version": "1.0.39", + "resolved": "https://registry.npmjs.org/@nolyfill/is-core-module/-/is-core-module-1.0.39.tgz", + "integrity": "sha512-nn5ozdjYQpUCZlWGuxcJY/KpxkWQs4DcbMCmKojjyrYDEAGy4Ce19NN4v5MduafTwJlbKc99UA8YhSVqq9yPZA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.4.0" + } + }, + "node_modules/@opentelemetry/api": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", + "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==", + "license": "Apache-2.0", + "optional": true, + "peer": true, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@rtsao/scc": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@rtsao/scc/-/scc-1.1.0.tgz", + "integrity": "sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rushstack/eslint-patch": { + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/@rushstack/eslint-patch/-/eslint-patch-1.12.0.tgz", + "integrity": "sha512-5EwMtOqvJMMa3HbmxLlF74e+3/HhwBTMcvt3nqVJgGCozO6hzIPOBlwm8mGVNR9SN2IJpxSnlxczyDjcn7qIyw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@swc/counter": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz", + "integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==", + "license": "Apache-2.0" + }, + "node_modules/@swc/helpers": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.5.tgz", + "integrity": "sha512-KGYxvIOXcceOAbEk4bi/dVLEK9z8sZ0uBB3Il5b1rhfClSpcX0yfRO0KmTkqR2cnQDymwLB+25ZyMzICg/cm/A==", + "license": "Apache-2.0", + "dependencies": { + "@swc/counter": "^0.1.3", + "tslib": "^2.4.0" + } + }, + "node_modules/@tybys/wasm-util": { + "version": "0.10.1", + "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.1.tgz", + "integrity": "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@types/json5": { + "version": "0.0.29", + "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", + "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "24.5.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.5.1.tgz", + "integrity": "sha512-/SQdmUP2xa+1rdx7VwB9yPq8PaKej8TD5cQ+XfKDPWWC+VDJU4rvVVagXqKUzhKjtFoNA8rXDJAkCxQPAe00+Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~7.12.0" + } + }, + "node_modules/@types/react": { + "version": "19.1.13", + "resolved": "https://registry.npmjs.org/@types/react/-/react-19.1.13.tgz", + "integrity": "sha512-hHkbU/eoO3EG5/MZkuFSKmYqPbSVk5byPFa3e7y/8TybHiLMACgI8seVYlicwk7H5K/rI2px9xrQp/C+AUDTiQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "csstype": "^3.0.2" + } + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "8.44.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.44.0.tgz", + "integrity": "sha512-EGDAOGX+uwwekcS0iyxVDmRV9HX6FLSM5kzrAToLTsr9OWCIKG/y3lQheCq18yZ5Xh78rRKJiEpP0ZaCs4ryOQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "8.44.0", + "@typescript-eslint/type-utils": "8.44.0", + "@typescript-eslint/utils": "8.44.0", + "@typescript-eslint/visitor-keys": "8.44.0", + "graphemer": "^1.4.0", + "ignore": "^7.0.0", + "natural-compare": "^1.4.0", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^8.44.0", + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/ignore": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "8.44.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.44.0.tgz", + "integrity": "sha512-VGMpFQGUQWYT9LfnPcX8ouFojyrZ/2w3K5BucvxL/spdNehccKhB4jUyB1yBCXpr2XFm0jkECxgrpXBW2ipoAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/scope-manager": "8.44.0", + "@typescript-eslint/types": "8.44.0", + "@typescript-eslint/typescript-estree": "8.44.0", + "@typescript-eslint/visitor-keys": "8.44.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/project-service": { + "version": "8.44.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.44.0.tgz", + "integrity": "sha512-ZeaGNraRsq10GuEohKTo4295Z/SuGcSq2LzfGlqiuEvfArzo/VRrT0ZaJsVPuKZ55lVbNk8U6FcL+ZMH8CoyVA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/tsconfig-utils": "^8.44.0", + "@typescript-eslint/types": "^8.44.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "8.44.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.44.0.tgz", + "integrity": "sha512-87Jv3E+al8wpD+rIdVJm/ItDBe/Im09zXIjFoipOjr5gHUhJmTzfFLuTJ/nPTMc2Srsroy4IBXwcTCHyRR7KzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.44.0", + "@typescript-eslint/visitor-keys": "8.44.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/tsconfig-utils": { + "version": "8.44.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.44.0.tgz", + "integrity": "sha512-x5Y0+AuEPqAInc6yd0n5DAcvtoQ/vyaGwuX5HE9n6qAefk1GaedqrLQF8kQGylLUb9pnZyLf+iEiL9fr8APDtQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "8.44.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.44.0.tgz", + "integrity": "sha512-9cwsoSxJ8Sak67Be/hD2RNt/fsqmWnNE1iHohG8lxqLSNY8xNfyY7wloo5zpW3Nu9hxVgURevqfcH6vvKCt6yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.44.0", + "@typescript-eslint/typescript-estree": "8.44.0", + "@typescript-eslint/utils": "8.44.0", + "debug": "^4.3.4", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/types": { + "version": "8.44.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.44.0.tgz", + "integrity": "sha512-ZSl2efn44VsYM0MfDQe68RKzBz75NPgLQXuGypmym6QVOWL5kegTZuZ02xRAT9T+onqvM6T8CdQk0OwYMB6ZvA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "8.44.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.44.0.tgz", + "integrity": "sha512-lqNj6SgnGcQZwL4/SBJ3xdPEfcBuhCG8zdcwCPgYcmiPLgokiNDKlbPzCwEwu7m279J/lBYWtDYL+87OEfn8Jw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/project-service": "8.44.0", + "@typescript-eslint/tsconfig-utils": "8.44.0", + "@typescript-eslint/types": "8.44.0", + "@typescript-eslint/visitor-keys": "8.44.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "8.44.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.44.0.tgz", + "integrity": "sha512-nktOlVcg3ALo0mYlV+L7sWUD58KG4CMj1rb2HUVOO4aL3K/6wcD+NERqd0rrA5Vg06b42YhF6cFxeixsp9Riqg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.7.0", + "@typescript-eslint/scope-manager": "8.44.0", + "@typescript-eslint/types": "8.44.0", + "@typescript-eslint/typescript-estree": "8.44.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "8.44.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.44.0.tgz", + "integrity": "sha512-zaz9u8EJ4GBmnehlrpoKvj/E3dNbuQ7q0ucyZImm3cLqJ8INTc970B1qEqDX/Rzq65r3TvVTN7kHWPBoyW7DWw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.44.0", + "eslint-visitor-keys": "^4.2.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", + "dev": true, + "license": "ISC" + }, + "node_modules/@unrs/resolver-binding-android-arm-eabi": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-android-arm-eabi/-/resolver-binding-android-arm-eabi-1.11.1.tgz", + "integrity": "sha512-ppLRUgHVaGRWUx0R0Ut06Mjo9gBaBkg3v/8AxusGLhsIotbBLuRk51rAzqLC8gq6NyyAojEXglNjzf6R948DNw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@unrs/resolver-binding-android-arm64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-android-arm64/-/resolver-binding-android-arm64-1.11.1.tgz", + "integrity": "sha512-lCxkVtb4wp1v+EoN+HjIG9cIIzPkX5OtM03pQYkG+U5O/wL53LC4QbIeazgiKqluGeVEeBlZahHalCaBvU1a2g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@unrs/resolver-binding-darwin-arm64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-darwin-arm64/-/resolver-binding-darwin-arm64-1.11.1.tgz", + "integrity": "sha512-gPVA1UjRu1Y/IsB/dQEsp2V1pm44Of6+LWvbLc9SDk1c2KhhDRDBUkQCYVWe6f26uJb3fOK8saWMgtX8IrMk3g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@unrs/resolver-binding-darwin-x64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-darwin-x64/-/resolver-binding-darwin-x64-1.11.1.tgz", + "integrity": "sha512-cFzP7rWKd3lZaCsDze07QX1SC24lO8mPty9vdP+YVa3MGdVgPmFc59317b2ioXtgCMKGiCLxJ4HQs62oz6GfRQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@unrs/resolver-binding-freebsd-x64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-freebsd-x64/-/resolver-binding-freebsd-x64-1.11.1.tgz", + "integrity": "sha512-fqtGgak3zX4DCB6PFpsH5+Kmt/8CIi4Bry4rb1ho6Av2QHTREM+47y282Uqiu3ZRF5IQioJQ5qWRV6jduA+iGw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm-gnueabihf": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm-gnueabihf/-/resolver-binding-linux-arm-gnueabihf-1.11.1.tgz", + "integrity": "sha512-u92mvlcYtp9MRKmP+ZvMmtPN34+/3lMHlyMj7wXJDeXxuM0Vgzz0+PPJNsro1m3IZPYChIkn944wW8TYgGKFHw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm-musleabihf": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm-musleabihf/-/resolver-binding-linux-arm-musleabihf-1.11.1.tgz", + "integrity": "sha512-cINaoY2z7LVCrfHkIcmvj7osTOtm6VVT16b5oQdS4beibX2SYBwgYLmqhBjA1t51CarSaBuX5YNsWLjsqfW5Cw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm64-gnu/-/resolver-binding-linux-arm64-gnu-1.11.1.tgz", + "integrity": "sha512-34gw7PjDGB9JgePJEmhEqBhWvCiiWCuXsL9hYphDF7crW7UgI05gyBAi6MF58uGcMOiOqSJ2ybEeCvHcq0BCmQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm64-musl/-/resolver-binding-linux-arm64-musl-1.11.1.tgz", + "integrity": "sha512-RyMIx6Uf53hhOtJDIamSbTskA99sPHS96wxVE/bJtePJJtpdKGXO1wY90oRdXuYOGOTuqjT8ACccMc4K6QmT3w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-ppc64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-ppc64-gnu/-/resolver-binding-linux-ppc64-gnu-1.11.1.tgz", + "integrity": "sha512-D8Vae74A4/a+mZH0FbOkFJL9DSK2R6TFPC9M+jCWYia/q2einCubX10pecpDiTmkJVUH+y8K3BZClycD8nCShA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-riscv64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-riscv64-gnu/-/resolver-binding-linux-riscv64-gnu-1.11.1.tgz", + "integrity": "sha512-frxL4OrzOWVVsOc96+V3aqTIQl1O2TjgExV4EKgRY09AJ9leZpEg8Ak9phadbuX0BA4k8U5qtvMSQQGGmaJqcQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-riscv64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-riscv64-musl/-/resolver-binding-linux-riscv64-musl-1.11.1.tgz", + "integrity": "sha512-mJ5vuDaIZ+l/acv01sHoXfpnyrNKOk/3aDoEdLO/Xtn9HuZlDD6jKxHlkN8ZhWyLJsRBxfv9GYM2utQ1SChKew==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-s390x-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-s390x-gnu/-/resolver-binding-linux-s390x-gnu-1.11.1.tgz", + "integrity": "sha512-kELo8ebBVtb9sA7rMe1Cph4QHreByhaZ2QEADd9NzIQsYNQpt9UkM9iqr2lhGr5afh885d/cB5QeTXSbZHTYPg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-x64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-x64-gnu/-/resolver-binding-linux-x64-gnu-1.11.1.tgz", + "integrity": "sha512-C3ZAHugKgovV5YvAMsxhq0gtXuwESUKc5MhEtjBpLoHPLYM+iuwSj3lflFwK3DPm68660rZ7G8BMcwSro7hD5w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-x64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-x64-musl/-/resolver-binding-linux-x64-musl-1.11.1.tgz", + "integrity": "sha512-rV0YSoyhK2nZ4vEswT/QwqzqQXw5I6CjoaYMOX0TqBlWhojUf8P94mvI7nuJTeaCkkds3QE4+zS8Ko+GdXuZtA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-wasm32-wasi": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-wasm32-wasi/-/resolver-binding-wasm32-wasi-1.11.1.tgz", + "integrity": "sha512-5u4RkfxJm+Ng7IWgkzi3qrFOvLvQYnPBmjmZQ8+szTK/b31fQCnleNl1GgEt7nIsZRIf5PLhPwT0WM+q45x/UQ==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@napi-rs/wasm-runtime": "^0.2.11" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@unrs/resolver-binding-win32-arm64-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-win32-arm64-msvc/-/resolver-binding-win32-arm64-msvc-1.11.1.tgz", + "integrity": "sha512-nRcz5Il4ln0kMhfL8S3hLkxI85BXs3o8EYoattsJNdsX4YUU89iOkVn7g0VHSRxFuVMdM4Q1jEpIId1Ihim/Uw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@unrs/resolver-binding-win32-ia32-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-win32-ia32-msvc/-/resolver-binding-win32-ia32-msvc-1.11.1.tgz", + "integrity": "sha512-DCEI6t5i1NmAZp6pFonpD5m7i6aFrpofcp4LA2i8IIq60Jyo28hamKBxNrZcyOwVOZkgsRp9O2sXWBWP8MnvIQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@unrs/resolver-binding-win32-x64-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-win32-x64-msvc/-/resolver-binding-win32-x64-msvc-1.11.1.tgz", + "integrity": "sha512-lrW200hZdbfRtztbygyaq/6jP6AKE8qQN2KvPcJ+x7wiD038YtnYtZ82IMNJ69GJibV7bwL3y9FgK+5w/pYt6g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/any-promise": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", + "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==", + "dev": true, + "license": "MIT" + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/arg": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==", + "dev": true, + "license": "MIT" + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/aria-query": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.2.tgz", + "integrity": "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/array-buffer-byte-length": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz", + "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "is-array-buffer": "^3.0.5" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array-includes": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.9.tgz", + "integrity": "sha512-FmeCCAenzH0KH381SPT5FZmiA/TmpndpcaShhfgEN9eCVjnFBqq3l1xrI42y8+PPLI6hypzou4GXw00WHmPBLQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "define-properties": "^1.2.1", + "es-abstract": "^1.24.0", + "es-object-atoms": "^1.1.1", + "get-intrinsic": "^1.3.0", + "is-string": "^1.1.1", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.findlast": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.findlast/-/array.prototype.findlast-1.2.5.tgz", + "integrity": "sha512-CVvd6FHg1Z3POpBLxO6E6zr+rSKEQ9L6rZHAaY7lLfhKsWYUBBOuMs0e9o24oopj6H+geRCX0YJ+TJLBK2eHyQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.findlastindex": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.6.tgz", + "integrity": "sha512-F/TKATkzseUExPlfvmwQKGITM3DGTK+vkAsCZoDc5daVygbJBnjEUCbgkAvVFsgfXfX4YIqZ/27G3k3tdXrTxQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.9", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "es-shim-unscopables": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.flat": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.3.tgz", + "integrity": "sha512-rwG/ja1neyLqCuGZ5YYrznA62D4mZXg0i1cIskIUKSiqF3Cje9/wXAls9B9s1Wa2fomMsIv8czB8jZcPmxCXFg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.flatmap": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.3.tgz", + "integrity": "sha512-Y7Wt51eKJSyi80hFrJCePGGNo5ktJCslFuboqJsbf57CCPcm5zztluPlc4/aD8sWsKvlwatezpV4U1efk8kpjg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.tosorted": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.4.tgz", + "integrity": "sha512-p6Fx8B7b7ZhL/gmUsAy0D15WhvDccw3mnGNbZpi3pmeJdxtWsj2jEaI4Y6oo3XiHfzuSgPwKc04MYt6KgvC/wA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.3", + "es-errors": "^1.3.0", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/arraybuffer.prototype.slice": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz", + "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "is-array-buffer": "^3.0.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ast-types-flow": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.8.tgz", + "integrity": "sha512-OH/2E5Fg20h2aPrbe+QL8JZQFko0YZaF+j4mnQ7BGhfavO7OpSLa8a0y9sBwomHdSbkhTS8TQNayBfnW5DwbvQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/async-function": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz", + "integrity": "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/autoprefixer": { + "version": "10.4.21", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.21.tgz", + "integrity": "sha512-O+A6LWV5LDHSJD3LjHYoNi4VLsj/Whi7k6zG12xTYaU4cQ8oxQGckXNX8cRHK5yOZ/ppVHe0ZBXGzSV9jXdVbQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/autoprefixer" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "browserslist": "^4.24.4", + "caniuse-lite": "^1.0.30001702", + "fraction.js": "^4.3.7", + "normalize-range": "^0.1.2", + "picocolors": "^1.1.1", + "postcss-value-parser": "^4.2.0" + }, + "bin": { + "autoprefixer": "bin/autoprefixer" + }, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/available-typed-arrays": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/axe-core": { + "version": "4.10.3", + "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.10.3.tgz", + "integrity": "sha512-Xm7bpRXnDSX2YE2YFfBk2FnF0ep6tmG7xPh8iHee8MIcrgq762Nkce856dYtJYLkuIoYZvGfTs/PbZhideTcEg==", + "dev": true, + "license": "MPL-2.0", + "engines": { + "node": ">=4" + } + }, + "node_modules/axobject-query": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-4.1.0.tgz", + "integrity": "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/baseline-browser-mapping": { + "version": "2.8.4", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.8.4.tgz", + "integrity": "sha512-L+YvJwGAgwJBV1p6ffpSTa2KRc69EeeYGYjRVWKs0GKrK+LON0GC0gV+rKSNtALEDvMDqkvCFq9r1r94/Gjwxw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.js" + } + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.26.2", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.26.2.tgz", + "integrity": "sha512-ECFzp6uFOSB+dcZ5BK/IBaGWssbSYBHvuMeMt3MMFyhI0Z8SqGgEkBLARgpRH3hutIgPVsALcMwbDrJqPxQ65A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "baseline-browser-mapping": "^2.8.3", + "caniuse-lite": "^1.0.30001741", + "electron-to-chromium": "^1.5.218", + "node-releases": "^2.0.21", + "update-browserslist-db": "^1.1.3" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/busboy": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz", + "integrity": "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==", + "dependencies": { + "streamsearch": "^1.1.0" + }, + "engines": { + "node": ">=10.16.0" + } + }, + "node_modules/call-bind": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", + "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.0", + "es-define-property": "^1.0.0", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase-css": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", + "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001743", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001743.tgz", + "integrity": "sha512-e6Ojr7RV14Un7dz6ASD0aZDmQPT/A+eZU+nuTNfjqmRrmkmQlnTNWH0SKmqagx9PeW87UVqapSurtAXifmtdmw==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/client-only": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/client-only/-/client-only-0.0.1.tgz", + "integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==", + "license": "MIT" + }, + "node_modules/clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "dev": true, + "license": "MIT", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/csstype": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", + "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", + "dev": true, + "license": "MIT" + }, + "node_modules/damerau-levenshtein": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz", + "integrity": "sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==", + "dev": true, + "license": "BSD-2-Clause" + }, + "node_modules/data-view-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz", + "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-length": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz", + "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/inspect-js" + } + }, + "node_modules/data-view-byte-offset": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz", + "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/define-properties": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-data-property": "^1.0.1", + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/didyoumean": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", + "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/dlv": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", + "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==", + "dev": true, + "license": "MIT" + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true, + "license": "MIT" + }, + "node_modules/electron-to-chromium": { + "version": "1.5.220", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.220.tgz", + "integrity": "sha512-TWXijEwR1ggr4BdAKrb1nMNqYLTx1/4aD1fkeZU+FVJGTKu53/T7UyHKXlqEX3Ub02csyHePbHmkvnrjcaYzMA==", + "dev": true, + "license": "ISC" + }, + "node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true, + "license": "MIT" + }, + "node_modules/es-abstract": { + "version": "1.24.0", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz", + "integrity": "sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-buffer-byte-length": "^1.0.2", + "arraybuffer.prototype.slice": "^1.0.4", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "data-view-buffer": "^1.0.2", + "data-view-byte-length": "^1.0.2", + "data-view-byte-offset": "^1.0.1", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "es-set-tostringtag": "^2.1.0", + "es-to-primitive": "^1.3.0", + "function.prototype.name": "^1.1.8", + "get-intrinsic": "^1.3.0", + "get-proto": "^1.0.1", + "get-symbol-description": "^1.1.0", + "globalthis": "^1.0.4", + "gopd": "^1.2.0", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "internal-slot": "^1.1.0", + "is-array-buffer": "^3.0.5", + "is-callable": "^1.2.7", + "is-data-view": "^1.0.2", + "is-negative-zero": "^2.0.3", + "is-regex": "^1.2.1", + "is-set": "^2.0.3", + "is-shared-array-buffer": "^1.0.4", + "is-string": "^1.1.1", + "is-typed-array": "^1.1.15", + "is-weakref": "^1.1.1", + "math-intrinsics": "^1.1.0", + "object-inspect": "^1.13.4", + "object-keys": "^1.1.1", + "object.assign": "^4.1.7", + "own-keys": "^1.0.1", + "regexp.prototype.flags": "^1.5.4", + "safe-array-concat": "^1.1.3", + "safe-push-apply": "^1.0.0", + "safe-regex-test": "^1.1.0", + "set-proto": "^1.0.0", + "stop-iteration-iterator": "^1.1.0", + "string.prototype.trim": "^1.2.10", + "string.prototype.trimend": "^1.0.9", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.3", + "typed-array-byte-length": "^1.0.3", + "typed-array-byte-offset": "^1.0.4", + "typed-array-length": "^1.0.7", + "unbox-primitive": "^1.1.0", + "which-typed-array": "^1.1.19" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-iterator-helpers": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.2.1.tgz", + "integrity": "sha512-uDn+FE1yrDzyC0pCo961B2IHbdM8y/ACZsKD4dG6WqrjV53BADjwa7D+1aom2rsNVfLyDgU/eigvlJGJ08OQ4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.6", + "es-errors": "^1.3.0", + "es-set-tostringtag": "^2.0.3", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.6", + "globalthis": "^1.0.4", + "gopd": "^1.2.0", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.2.0", + "has-symbols": "^1.1.0", + "internal-slot": "^1.1.0", + "iterator.prototype": "^1.1.4", + "safe-array-concat": "^1.1.3" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-shim-unscopables": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.1.0.tgz", + "integrity": "sha512-d9T8ucsEhh8Bi1woXCf+TIKDIROLG5WCkxg8geBCbvk22kzwC5G2OnXVMO6FUsvQlgUUXQ2itephWDLqDzbeCw==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-to-primitive": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz", + "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-callable": "^1.2.7", + "is-date-object": "^1.0.5", + "is-symbol": "^1.0.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz", + "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==", + "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.1", + "@humanwhocodes/config-array": "^0.13.0", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "graphemer": "^1.4.0", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-config-next": { + "version": "14.2.32", + "resolved": "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-14.2.32.tgz", + "integrity": "sha512-mP/NmYtDBsKlKIOBnH+CW+pYeyR3wBhE+26DAqQ0/aRtEBeTEjgY2wAFUugUELkTLmrX6PpuMSSTpOhz7j9kdQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@next/eslint-plugin-next": "14.2.32", + "@rushstack/eslint-patch": "^1.3.3", + "@typescript-eslint/eslint-plugin": "^5.4.2 || ^6.0.0 || ^7.0.0 || ^8.0.0", + "@typescript-eslint/parser": "^5.4.2 || ^6.0.0 || ^7.0.0 || ^8.0.0", + "eslint-import-resolver-node": "^0.3.6", + "eslint-import-resolver-typescript": "^3.5.2", + "eslint-plugin-import": "^2.28.1", + "eslint-plugin-jsx-a11y": "^6.7.1", + "eslint-plugin-react": "^7.33.2", + "eslint-plugin-react-hooks": "^4.5.0 || 5.0.0-canary-7118f5dd7-20230705" + }, + "peerDependencies": { + "eslint": "^7.23.0 || ^8.0.0", + "typescript": ">=3.3.1" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/eslint-import-resolver-node": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz", + "integrity": "sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^3.2.7", + "is-core-module": "^2.13.0", + "resolve": "^1.22.4" + } + }, + "node_modules/eslint-import-resolver-node/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-import-resolver-typescript": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-3.10.1.tgz", + "integrity": "sha512-A1rHYb06zjMGAxdLSkN2fXPBwuSaQ0iO5M/hdyS0Ajj1VBaRp0sPD3dn1FhME3c/JluGFbwSxyCfqdSbtQLAHQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "@nolyfill/is-core-module": "1.0.39", + "debug": "^4.4.0", + "get-tsconfig": "^4.10.0", + "is-bun-module": "^2.0.0", + "stable-hash": "^0.0.5", + "tinyglobby": "^0.2.13", + "unrs-resolver": "^1.6.2" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint-import-resolver-typescript" + }, + "peerDependencies": { + "eslint": "*", + "eslint-plugin-import": "*", + "eslint-plugin-import-x": "*" + }, + "peerDependenciesMeta": { + "eslint-plugin-import": { + "optional": true + }, + "eslint-plugin-import-x": { + "optional": true + } + } + }, + "node_modules/eslint-module-utils": { + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.12.1.tgz", + "integrity": "sha512-L8jSWTze7K2mTg0vos/RuLRS5soomksDPoJLXIslC7c8Wmut3bx7CPpJijDcBZtxQ5lrbUdM+s0OlNbz0DCDNw==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^3.2.7" + }, + "engines": { + "node": ">=4" + }, + "peerDependenciesMeta": { + "eslint": { + "optional": true + } + } + }, + "node_modules/eslint-module-utils/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-plugin-import": { + "version": "2.32.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.32.0.tgz", + "integrity": "sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@rtsao/scc": "^1.1.0", + "array-includes": "^3.1.9", + "array.prototype.findlastindex": "^1.2.6", + "array.prototype.flat": "^1.3.3", + "array.prototype.flatmap": "^1.3.3", + "debug": "^3.2.7", + "doctrine": "^2.1.0", + "eslint-import-resolver-node": "^0.3.9", + "eslint-module-utils": "^2.12.1", + "hasown": "^2.0.2", + "is-core-module": "^2.16.1", + "is-glob": "^4.0.3", + "minimatch": "^3.1.2", + "object.fromentries": "^2.0.8", + "object.groupby": "^1.0.3", + "object.values": "^1.2.1", + "semver": "^6.3.1", + "string.prototype.trimend": "^1.0.9", + "tsconfig-paths": "^3.15.0" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 || ^9" + } + }, + "node_modules/eslint-plugin-import/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-plugin-import/node_modules/doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eslint-plugin-import/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/eslint-plugin-jsx-a11y": { + "version": "6.10.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.10.2.tgz", + "integrity": "sha512-scB3nz4WmG75pV8+3eRUQOHZlNSUhFNq37xnpgRkCCELU3XMvXAxLk1eqWWyE22Ki4Q01Fnsw9BA3cJHDPgn2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "aria-query": "^5.3.2", + "array-includes": "^3.1.8", + "array.prototype.flatmap": "^1.3.2", + "ast-types-flow": "^0.0.8", + "axe-core": "^4.10.0", + "axobject-query": "^4.1.0", + "damerau-levenshtein": "^1.0.8", + "emoji-regex": "^9.2.2", + "hasown": "^2.0.2", + "jsx-ast-utils": "^3.3.5", + "language-tags": "^1.0.9", + "minimatch": "^3.1.2", + "object.fromentries": "^2.0.8", + "safe-regex-test": "^1.0.3", + "string.prototype.includes": "^2.0.1" + }, + "engines": { + "node": ">=4.0" + }, + "peerDependencies": { + "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9" + } + }, + "node_modules/eslint-plugin-react": { + "version": "7.37.5", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.37.5.tgz", + "integrity": "sha512-Qteup0SqU15kdocexFNAJMvCJEfa2xUKNV4CC1xsVMrIIqEy3SQ/rqyxCWNzfrd3/ldy6HMlD2e0JDVpDg2qIA==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-includes": "^3.1.8", + "array.prototype.findlast": "^1.2.5", + "array.prototype.flatmap": "^1.3.3", + "array.prototype.tosorted": "^1.1.4", + "doctrine": "^2.1.0", + "es-iterator-helpers": "^1.2.1", + "estraverse": "^5.3.0", + "hasown": "^2.0.2", + "jsx-ast-utils": "^2.4.1 || ^3.0.0", + "minimatch": "^3.1.2", + "object.entries": "^1.1.9", + "object.fromentries": "^2.0.8", + "object.values": "^1.2.1", + "prop-types": "^15.8.1", + "resolve": "^2.0.0-next.5", + "semver": "^6.3.1", + "string.prototype.matchall": "^4.0.12", + "string.prototype.repeat": "^1.0.0" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7" + } + }, + "node_modules/eslint-plugin-react-hooks": { + "version": "5.0.0-canary-7118f5dd7-20230705", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-5.0.0-canary-7118f5dd7-20230705.tgz", + "integrity": "sha512-AZYbMo/NW9chdL7vk6HQzQhT+PvTAEVqWk9ziruUoW2kAOcN5qNyelv70e0F1VNQAbvutOC9oc+xfWycI9FxDw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0" + } + }, + "node_modules/eslint-plugin-react/node_modules/doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eslint-plugin-react/node_modules/resolve": { + "version": "2.0.0-next.5", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.5.tgz", + "integrity": "sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/eslint-plugin-react/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/eslint-scope": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/espree": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fastq": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", + "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.3", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "dev": true, + "license": "ISC" + }, + "node_modules/for-each": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", + "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-callable": "^1.2.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/foreground-child": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", + "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", + "dev": true, + "license": "ISC", + "dependencies": { + "cross-spawn": "^7.0.6", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/fraction.js": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz", + "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==", + "dev": true, + "license": "MIT", + "engines": { + "node": "*" + }, + "funding": { + "type": "patreon", + "url": "https://github.com/sponsors/rawify" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true, + "license": "ISC" + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/function.prototype.name": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz", + "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "functions-have-names": "^1.2.3", + "hasown": "^2.0.2", + "is-callable": "^1.2.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dev": true, + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-symbol-description": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz", + "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-tsconfig": { + "version": "4.10.1", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.10.1.tgz", + "integrity": "sha512-auHyJ4AgMz7vgS8Hp3N6HXSmlMdUyhSUrfBF16w153rxtLIEOE+HGqaBppczZvnHLqQJfiHotCYpNhl0lUROFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/glob": { + "version": "10.3.10", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.10.tgz", + "integrity": "sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g==", + "dev": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^2.3.5", + "minimatch": "^9.0.1", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", + "path-scurry": "^1.10.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/globals": { + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globalthis": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", + "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-properties": "^1.2.1", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "license": "ISC" + }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true, + "license": "MIT" + }, + "node_modules/has-bigints": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.1.0.tgz", + "integrity": "sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-proto": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz", + "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/internal-slot": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", + "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "hasown": "^2.0.2", + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/is-array-buffer": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz", + "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-async-function": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz", + "integrity": "sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "async-function": "^1.0.0", + "call-bound": "^1.0.3", + "get-proto": "^1.0.1", + "has-tostringtag": "^1.0.2", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-bigint": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz", + "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-bigints": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "license": "MIT", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-boolean-object": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz", + "integrity": "sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-bun-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-bun-module/-/is-bun-module-2.0.0.tgz", + "integrity": "sha512-gNCGbnnnnFAUGKeZ9PdbyeGYJqewpmc2aKHUEMO5nQPWU9lOmv7jcmQIv+qHD8fXW6W7qfuCwX4rY9LNRjXrkQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.7.1" + } + }, + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-data-view": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz", + "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "get-intrinsic": "^1.2.6", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-date-object": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz", + "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-finalizationregistry": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz", + "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-generator-function": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.0.tgz", + "integrity": "sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "get-proto": "^1.0.0", + "has-tostringtag": "^1.0.2", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-map": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", + "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-negative-zero": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", + "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-number-object": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz", + "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-regex": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", + "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-set": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", + "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-shared-array-buffer": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz", + "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-string": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz", + "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-symbol": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz", + "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "has-symbols": "^1.1.0", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", + "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "which-typed-array": "^1.1.16" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakmap": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", + "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakref": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.1.tgz", + "integrity": "sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakset": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz", + "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true, + "license": "MIT" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/iterator.prototype": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/iterator.prototype/-/iterator.prototype-1.1.5.tgz", + "integrity": "sha512-H0dkQoCa3b2VEeKQBOxFph+JAbcrQdE7KC0UkqwpLmv2EC4P41QXP+rqo9wYodACiG5/WM5s9oDApTU8utwj9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-data-property": "^1.1.4", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.6", + "get-proto": "^1.0.0", + "has-symbols": "^1.1.0", + "set-function-name": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/jackspeak": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz", + "integrity": "sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/jiti": { + "version": "1.21.7", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.7.tgz", + "integrity": "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==", + "dev": true, + "license": "MIT", + "bin": { + "jiti": "bin/jiti.js" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/json5": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", + "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.0" + }, + "bin": { + "json5": "lib/cli.js" + } + }, + "node_modules/jsx-ast-utils": { + "version": "3.3.5", + "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz", + "integrity": "sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-includes": "^3.1.6", + "array.prototype.flat": "^1.3.1", + "object.assign": "^4.1.4", + "object.values": "^1.1.6" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/language-subtag-registry": { + "version": "0.3.23", + "resolved": "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.23.tgz", + "integrity": "sha512-0K65Lea881pHotoGEa5gDlMxt3pctLi2RplBb7Ezh4rRdLEOtgi7n4EwK9lamnUCkKBqaeKRVebTq6BAxSkpXQ==", + "dev": true, + "license": "CC0-1.0" + }, + "node_modules/language-tags": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/language-tags/-/language-tags-1.0.9.tgz", + "integrity": "sha512-MbjN408fEndfiQXbFQ1vnd+1NoLDsnQW41410oQBXiyXDMYH5z505juWa4KUE1LqxRC7DgOgZDbKLxHIwm27hA==", + "dev": true, + "license": "MIT", + "dependencies": { + "language-subtag-registry": "^0.3.20" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lilconfig": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", + "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true, + "license": "MIT" + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/mz": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", + "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0", + "object-assign": "^4.0.1", + "thenify-all": "^1.0.0" + } + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/napi-postinstall": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/napi-postinstall/-/napi-postinstall-0.3.3.tgz", + "integrity": "sha512-uTp172LLXSxuSYHv/kou+f6KW3SMppU9ivthaVTXian9sOt3XM/zHYHpRZiLgQoxeWfYUnslNWQHF1+G71xcow==", + "dev": true, + "license": "MIT", + "bin": { + "napi-postinstall": "lib/cli.js" + }, + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/napi-postinstall" + } + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" + }, + "node_modules/next": { + "version": "14.2.32", + "resolved": "https://registry.npmjs.org/next/-/next-14.2.32.tgz", + "integrity": "sha512-fg5g0GZ7/nFc09X8wLe6pNSU8cLWbLRG3TZzPJ1BJvi2s9m7eF991se67wliM9kR5yLHRkyGKU49MMx58s3LJg==", + "license": "MIT", + "dependencies": { + "@next/env": "14.2.32", + "@swc/helpers": "0.5.5", + "busboy": "1.6.0", + "caniuse-lite": "^1.0.30001579", + "graceful-fs": "^4.2.11", + "postcss": "8.4.31", + "styled-jsx": "5.1.1" + }, + "bin": { + "next": "dist/bin/next" + }, + "engines": { + "node": ">=18.17.0" + }, + "optionalDependencies": { + "@next/swc-darwin-arm64": "14.2.32", + "@next/swc-darwin-x64": "14.2.32", + "@next/swc-linux-arm64-gnu": "14.2.32", + "@next/swc-linux-arm64-musl": "14.2.32", + "@next/swc-linux-x64-gnu": "14.2.32", + "@next/swc-linux-x64-musl": "14.2.32", + "@next/swc-win32-arm64-msvc": "14.2.32", + "@next/swc-win32-ia32-msvc": "14.2.32", + "@next/swc-win32-x64-msvc": "14.2.32" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.1.0", + "@playwright/test": "^1.41.2", + "react": "^18.2.0", + "react-dom": "^18.2.0", + "sass": "^1.3.0" + }, + "peerDependenciesMeta": { + "@opentelemetry/api": { + "optional": true + }, + "@playwright/test": { + "optional": true + }, + "sass": { + "optional": true + } + } + }, + "node_modules/next/node_modules/postcss": { + "version": "8.4.31", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz", + "integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.6", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/node-releases": { + "version": "2.0.21", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.21.tgz", + "integrity": "sha512-5b0pgg78U3hwXkCM8Z9b2FJdPZlr9Psr9V2gQPESdGHqbntyFJKFW4r5TeWGFzafGY3hzs1JC62VEQMbl1JFkw==", + "dev": true, + "license": "MIT" + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/normalize-range": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", + "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.assign": { + "version": "4.1.7", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz", + "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0", + "has-symbols": "^1.1.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.entries": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.9.tgz", + "integrity": "sha512-8u/hfXFRBD1O0hPUjioLhoWFHRmt6tKA4/vZPyckBr18l1KE9uHrFaFaUi8MDRTpi4uak2goyPTSNJLXX2k2Hw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.fromentries": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz", + "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.groupby": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.3.tgz", + "integrity": "sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.values": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.1.tgz", + "integrity": "sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/own-keys": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz", + "integrity": "sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==", + "dev": true, + "license": "MIT", + "dependencies": { + "get-intrinsic": "^1.2.6", + "object-keys": "^1.1.1", + "safe-push-apply": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pirates": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/possible-typed-array-names": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", + "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-import": { + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz", + "integrity": "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==", + "dev": true, + "license": "MIT", + "dependencies": { + "postcss-value-parser": "^4.0.0", + "read-cache": "^1.0.0", + "resolve": "^1.1.7" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "postcss": "^8.0.0" + } + }, + "node_modules/postcss-js": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.1.0.tgz", + "integrity": "sha512-oIAOTqgIo7q2EOwbhb8UalYePMvYoIeRY2YKntdpFQXNosSu3vLrniGgmH9OKs/qAkfoj5oB3le/7mINW1LCfw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "camelcase-css": "^2.0.1" + }, + "engines": { + "node": "^12 || ^14 || >= 16" + }, + "peerDependencies": { + "postcss": "^8.4.21" + } + }, + "node_modules/postcss-nested": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.2.0.tgz", + "integrity": "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "postcss-selector-parser": "^6.1.1" + }, + "engines": { + "node": ">=12.0" + }, + "peerDependencies": { + "postcss": "^8.2.14" + } + }, + "node_modules/postcss-selector-parser": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", + "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prop-types": { + "version": "15.8.1", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", + "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", + "dev": true, + "license": "MIT", + "dependencies": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.13.1" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/react": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", + "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dom": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", + "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.2" + }, + "peerDependencies": { + "react": "^18.3.1" + } + }, + "node_modules/react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/read-cache": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", + "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pify": "^2.3.0" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/reflect.getprototypeof": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", + "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.9", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.7", + "get-proto": "^1.0.1", + "which-builtin-type": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/regexp.prototype.flags": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz", + "integrity": "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "set-function-name": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve": { + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "dev": true, + "license": "ISC", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/rimraf/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-array-concat": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.3.tgz", + "integrity": "sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "get-intrinsic": "^1.2.6", + "has-symbols": "^1.1.0", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">=0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safe-push-apply": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz", + "integrity": "sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safe-regex-test": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz", + "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "is-regex": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/scheduler": { + "version": "0.23.2", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz", + "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + } + }, + "node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-function-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-proto": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/set-proto/-/set-proto-1.0.0.tgz", + "integrity": "sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==", + "dev": true, + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/stable-hash": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/stable-hash/-/stable-hash-0.0.5.tgz", + "integrity": "sha512-+L3ccpzibovGXFK+Ap/f8LOS0ahMrHTf3xu7mMLSpEGU0EO9ucaysSylKo9eRDFNhWve/y275iPmIZ4z39a9iA==", + "dev": true, + "license": "MIT" + }, + "node_modules/stop-iteration-iterator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz", + "integrity": "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "internal-slot": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/streamsearch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz", + "integrity": "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/string-width/node_modules/ansi-regex": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", + "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/string-width/node_modules/strip-ansi": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", + "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/string.prototype.includes": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/string.prototype.includes/-/string.prototype.includes-2.0.1.tgz", + "integrity": "sha512-o7+c9bW6zpAdJHTtujeePODAhkuicdAryFsfVKwA+wGw89wJ4GTY484WTucM9hLtDEOpOvI+aHnzqnC5lHp4Rg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.3" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/string.prototype.matchall": { + "version": "4.0.12", + "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.12.tgz", + "integrity": "sha512-6CC9uyBL+/48dYizRf7H7VAYCMCNTBeM78x/VTUe9bFEaxBepPJDa1Ow99LqI/1yF7kuy7Q3cQsYMrcjGUcskA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.6", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.6", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "internal-slot": "^1.1.0", + "regexp.prototype.flags": "^1.5.3", + "set-function-name": "^2.0.2", + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.repeat": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/string.prototype.repeat/-/string.prototype.repeat-1.0.0.tgz", + "integrity": "sha512-0u/TldDbKD8bFCQ/4f5+mNRrXwZ8hg2w7ZR8wa16e8z9XpePWl3eGEcUD0OXpEH/VJH/2G3gjUtR3ZOiBe2S/w==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" + } + }, + "node_modules/string.prototype.trim": { + "version": "1.2.10", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz", + "integrity": "sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "define-data-property": "^1.1.4", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-object-atoms": "^1.0.0", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimend": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.9.tgz", + "integrity": "sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimstart": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", + "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/styled-jsx": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/styled-jsx/-/styled-jsx-5.1.1.tgz", + "integrity": "sha512-pW7uC1l4mBZ8ugbiZrcIsiIvVx1UmTfw7UkC3Um2tmfUq9Bhk8IiyEIPl6F8agHgjzku6j0xQEZbfA5uSgSaCw==", + "license": "MIT", + "dependencies": { + "client-only": "0.0.1" + }, + "engines": { + "node": ">= 12.0.0" + }, + "peerDependencies": { + "react": ">= 16.8.0 || 17.x.x || ^18.0.0-0" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "babel-plugin-macros": { + "optional": true + } + } + }, + "node_modules/sucrase": { + "version": "3.35.0", + "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.35.0.tgz", + "integrity": "sha512-8EbVDiu9iN/nESwxeSxDKe0dunta1GOlHufmSSXxMD2z2/tMZpDMpvXQGsc+ajGo8y2uYUmixaSRUc/QPoQ0GA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.2", + "commander": "^4.0.0", + "glob": "^10.3.10", + "lines-and-columns": "^1.1.6", + "mz": "^2.7.0", + "pirates": "^4.0.1", + "ts-interface-checker": "^0.1.9" + }, + "bin": { + "sucrase": "bin/sucrase", + "sucrase-node": "bin/sucrase-node" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/tailwindcss": { + "version": "3.4.17", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.17.tgz", + "integrity": "sha512-w33E2aCvSDP0tW9RZuNXadXlkHXqFzSkQew/aIa2i/Sj8fThxwovwlXHSPXTbAHwEIhBFXAedUhP2tueAKP8Og==", + "dev": true, + "license": "MIT", + "dependencies": { + "@alloc/quick-lru": "^5.2.0", + "arg": "^5.0.2", + "chokidar": "^3.6.0", + "didyoumean": "^1.2.2", + "dlv": "^1.1.3", + "fast-glob": "^3.3.2", + "glob-parent": "^6.0.2", + "is-glob": "^4.0.3", + "jiti": "^1.21.6", + "lilconfig": "^3.1.3", + "micromatch": "^4.0.8", + "normalize-path": "^3.0.0", + "object-hash": "^3.0.0", + "picocolors": "^1.1.1", + "postcss": "^8.4.47", + "postcss-import": "^15.1.0", + "postcss-js": "^4.0.1", + "postcss-load-config": "^4.0.2", + "postcss-nested": "^6.2.0", + "postcss-selector-parser": "^6.1.2", + "resolve": "^1.22.8", + "sucrase": "^3.35.0" + }, + "bin": { + "tailwind": "lib/cli.js", + "tailwindcss": "lib/cli.js" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tailwindcss/node_modules/postcss-load-config": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-4.0.2.tgz", + "integrity": "sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "lilconfig": "^3.0.0", + "yaml": "^2.3.4" + }, + "engines": { + "node": ">= 14" + }, + "peerDependencies": { + "postcss": ">=8.0.9", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "postcss": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true, + "license": "MIT" + }, + "node_modules/thenify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", + "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0" + } + }, + "node_modules/thenify-all": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", + "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", + "dev": true, + "license": "MIT", + "dependencies": { + "thenify": ">= 3.1.0 < 4" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/ts-api-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", + "integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.12" + }, + "peerDependencies": { + "typescript": ">=4.8.4" + } + }, + "node_modules/ts-interface-checker": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", + "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/tsconfig-paths": { + "version": "3.15.0", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz", + "integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/json5": "^0.0.29", + "json5": "^1.0.2", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typed-array-buffer": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", + "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/typed-array-byte-length": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.3.tgz", + "integrity": "sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "for-each": "^0.3.3", + "gopd": "^1.2.0", + "has-proto": "^1.2.0", + "is-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-byte-offset": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.4.tgz", + "integrity": "sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "for-each": "^0.3.3", + "gopd": "^1.2.0", + "has-proto": "^1.2.0", + "is-typed-array": "^1.1.15", + "reflect.getprototypeof": "^1.0.9" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-length": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.7.tgz", + "integrity": "sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "is-typed-array": "^1.1.13", + "possible-typed-array-names": "^1.0.0", + "reflect.getprototypeof": "^1.0.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typescript": { + "version": "5.9.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.2.tgz", + "integrity": "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/unbox-primitive": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz", + "integrity": "sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "has-bigints": "^1.0.2", + "has-symbols": "^1.1.0", + "which-boxed-primitive": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/undici-types": { + "version": "7.12.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.12.0.tgz", + "integrity": "sha512-goOacqME2GYyOZZfb5Lgtu+1IDmAlAEu5xnD3+xTzS10hT0vzpf0SPjkXwAw9Jm+4n/mQGDP3LO8CPbYROeBfQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/unrs-resolver": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/unrs-resolver/-/unrs-resolver-1.11.1.tgz", + "integrity": "sha512-bSjt9pjaEBnNiGgc9rUiHGKv5l4/TGzDmYw3RhnkJGtLhbnnA/5qJj7x3dNDCRx/PJxu774LlH8lCOlB4hEfKg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "napi-postinstall": "^0.3.0" + }, + "funding": { + "url": "https://opencollective.com/unrs-resolver" + }, + "optionalDependencies": { + "@unrs/resolver-binding-android-arm-eabi": "1.11.1", + "@unrs/resolver-binding-android-arm64": "1.11.1", + "@unrs/resolver-binding-darwin-arm64": "1.11.1", + "@unrs/resolver-binding-darwin-x64": "1.11.1", + "@unrs/resolver-binding-freebsd-x64": "1.11.1", + "@unrs/resolver-binding-linux-arm-gnueabihf": "1.11.1", + "@unrs/resolver-binding-linux-arm-musleabihf": "1.11.1", + "@unrs/resolver-binding-linux-arm64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-arm64-musl": "1.11.1", + "@unrs/resolver-binding-linux-ppc64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-riscv64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-riscv64-musl": "1.11.1", + "@unrs/resolver-binding-linux-s390x-gnu": "1.11.1", + "@unrs/resolver-binding-linux-x64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-x64-musl": "1.11.1", + "@unrs/resolver-binding-wasm32-wasi": "1.11.1", + "@unrs/resolver-binding-win32-arm64-msvc": "1.11.1", + "@unrs/resolver-binding-win32-ia32-msvc": "1.11.1", + "@unrs/resolver-binding-win32-x64-msvc": "1.11.1" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz", + "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true, + "license": "MIT" + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/which-boxed-primitive": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz", + "integrity": "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-bigint": "^1.1.0", + "is-boolean-object": "^1.2.1", + "is-number-object": "^1.1.1", + "is-string": "^1.1.1", + "is-symbol": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-builtin-type": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.2.1.tgz", + "integrity": "sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "function.prototype.name": "^1.1.6", + "has-tostringtag": "^1.0.2", + "is-async-function": "^2.0.0", + "is-date-object": "^1.1.0", + "is-finalizationregistry": "^1.1.0", + "is-generator-function": "^1.0.10", + "is-regex": "^1.2.1", + "is-weakref": "^1.0.2", + "isarray": "^2.0.5", + "which-boxed-primitive": "^1.1.0", + "which-collection": "^1.0.2", + "which-typed-array": "^1.1.16" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-collection": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", + "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-map": "^2.0.3", + "is-set": "^2.0.3", + "is-weakmap": "^2.0.2", + "is-weakset": "^2.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-typed-array": { + "version": "1.1.19", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", + "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", + "dev": true, + "license": "MIT", + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "for-each": "^0.3.5", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/wrap-ansi-cjs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-regex": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", + "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", + "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", + "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/yaml": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz", + "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==", + "dev": true, + "license": "ISC", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14.6" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/web/package.json b/web/package.json new file mode 100644 index 0000000000000000000000000000000000000000..37d5ae2d833c87eee6d95e0dbda03e9c779adca4 --- /dev/null +++ b/web/package.json @@ -0,0 +1,27 @@ +{ + "name": "openpoke-web", + "private": true, + "version": "0.1.0", + "scripts": { + "dev": "next dev", + "build": "next build", + "start": "next start", + "lint": "next lint" + }, + "dependencies": { + "clsx": "^2.1.1", + "next": "^14.2.7", + "react": "^18.3.1", + "react-dom": "^18.3.1" + }, + "devDependencies": { + "@types/node": "24.5.1", + "@types/react": "19.1.13", + "autoprefixer": "^10.4.20", + "eslint": "^8.57.0", + "eslint-config-next": "^14.2.7", + "postcss": "^8.4.47", + "tailwindcss": "^3.4.10", + "typescript": "^5.6.2" + } +} diff --git a/web/postcss.config.js b/web/postcss.config.js new file mode 100644 index 0000000000000000000000000000000000000000..c21c0763565b546f63508cc915438b3e2b68a2de --- /dev/null +++ b/web/postcss.config.js @@ -0,0 +1,7 @@ +module.exports = { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +}; + diff --git a/web/tailwind.config.ts b/web/tailwind.config.ts new file mode 100644 index 0000000000000000000000000000000000000000..138e086fd21ad9914b89cf21310ea933ef191733 --- /dev/null +++ b/web/tailwind.config.ts @@ -0,0 +1,30 @@ +import type { Config } from 'tailwindcss'; + +const config: Config = { + content: [ + './app/**/*.{ts,tsx}', + './components/**/*.{ts,tsx}', + ], + theme: { + extend: { + colors: { + brand: { + 50: '#f0f7ff', + 100: '#e1effe', + 200: '#c3dffe', + 300: '#a5cffe', + 400: '#87bffe', + 500: '#69affd', + 600: '#4b9ffd', + 700: '#2d8ffd', + 800: '#0f7ffd', + 900: '#0a66cc' + } + } + }, + }, + darkMode: 'class' +}; + +export default config; + diff --git a/web/tsconfig.json b/web/tsconfig.json new file mode 100644 index 0000000000000000000000000000000000000000..c38c68f99b259cd4663e0fb6aa145a2ffb2a7576 --- /dev/null +++ b/web/tsconfig.json @@ -0,0 +1,44 @@ +{ + "compilerOptions": { + "target": "ES2022", + "lib": [ + "dom", + "dom.iterable", + "esnext" + ], + "allowJs": false, + "skipLibCheck": true, + "strict": true, + "noEmit": true, + "esModuleInterop": true, + "module": "esnext", + "moduleResolution": "bundler", + "resolveJsonModule": true, + "isolatedModules": true, + "jsx": "preserve", + "incremental": true, + "baseUrl": ".", + "paths": { + "@/components/*": [ + "components/*" + ], + "@/lib/*": [ + "lib/*" + ] + }, + "plugins": [ + { + "name": "next" + } + ] + }, + "include": [ + "next-env.d.ts", + "**/*.ts", + "**/*.tsx", + ".next/types/**/*.ts" + ], + "exclude": [ + "node_modules" + ] +}