File size: 1,210 Bytes
b2d5b74
 
15e92d0
b2d5b74
 
15e92d0
b2d5b74
 
 
678027d
15e92d0
b2d5b74
 
 
678027d
b2d5b74
678027d
b2d5b74
 
 
 
 
 
15e92d0
b2d5b74
 
 
678027d
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
"""
🇬🇧 Module: chat.py
Purpose: General chat interface for user questions about investments or portfolios.

🇷🇺 Модуль: chat.py
Назначение: общий чат-помощник для ответов на вопросы об инвестициях и портфелях.
"""

from typing import Generator
from services.llm_client import llm_service
from prompts.system_prompts import GENERAL_CONTEXT


class ChatAssistant:
    """Handles general user dialogue via LLM."""

    def __init__(self, llm=llm_service, model_name: str = "meta-llama/Meta-Llama-3.1-8B-Instruct"):
        self.llm = llm
        self.model_name = model_name

    def run(self, user_input: str) -> Generator[str, None, None]:
        """Stream chat responses."""
        messages = [
            {"role": "system", "content": GENERAL_CONTEXT},
            {"role": "user", "content": user_input},
        ]

        try:
            partial = ""
            for delta in self.llm.stream_chat(messages=messages, model=self.model_name):
                partial += delta
                yield partial

        except Exception as e:
            yield f"❌ Ошибка при генерации ответа: {e}"