Spaces:
Sleeping
Sleeping
| """ | |
| 🇬🇧 Module: chat.py | |
| Purpose: General chat interface for user questions about investments or portfolios. | |
| 🇷🇺 Модуль: chat.py | |
| Назначение: общий чат-помощник для ответов на вопросы об инвестициях и портфелях. | |
| """ | |
| from typing import Generator | |
| from services.llm_client import llm_service | |
| from prompts.system_prompts import GENERAL_CONTEXT | |
| class ChatAssistant: | |
| """Handles general user dialogue via LLM.""" | |
| def __init__(self, llm=llm_service, model_name: str = "meta-llama/Meta-Llama-3.1-8B-Instruct"): | |
| self.llm = llm | |
| self.model_name = model_name | |
| def run(self, user_input: str) -> Generator[str, None, None]: | |
| """Stream chat responses.""" | |
| messages = [ | |
| {"role": "system", "content": GENERAL_CONTEXT}, | |
| {"role": "user", "content": user_input}, | |
| ] | |
| try: | |
| partial = "" | |
| for delta in self.llm.stream_chat(messages=messages, model=self.model_name): | |
| partial += delta | |
| yield partial | |
| except Exception as e: | |
| yield f"❌ Ошибка при генерации ответа: {e}" | |