Spaces:
Sleeping
Sleeping
| import httpx | |
| from prompts import ONE_PROMPT | |
| from prompts import TEST1 | |
| from fetch import extract_portfolio_id | |
| from openai import OpenAI | |
| import asyncio | |
| async def get_portfolio_metrics(portfolio_id: str) -> dict | None: | |
| url = f"https://api.tradelink.pro/portfolio/get?portfolioId={portfolio_id}&extended=1" | |
| try: | |
| async with httpx.AsyncClient(timeout=10) as client: | |
| response = await client.get(url) | |
| data = response.json().get("data", {}).get("extended", {}) | |
| return {k: v for k, v in data.items() if isinstance(v, (int, float))} | |
| except Exception as e: | |
| print(f"[API Error]: {e}") | |
| return None | |
| def analyze_portfolio_streaming(text: str, client: OpenAI): | |
| portfolio_id = extract_portfolio_id(text) | |
| if not portfolio_id: | |
| yield "❗ Укажите корректный portfolioId или ссылку." | |
| return | |
| yield "⏳ Загружаю метрики портфеля..." | |
| metrics = asyncio.run(get_portfolio_metrics(portfolio_id)) | |
| if not metrics: | |
| yield "❗ Не удалось получить метрики портфеля." | |
| return | |
| metrics_text = ", ".join([f"{k}: {v}" for k, v in metrics.items()]) | |
| prompt = f"Вот метрики портфеля: {metrics_text}. Проанализируй их. Дай общий отчёт на русском языке." | |
| try: | |
| response_llm = client.chat.completions.create( | |
| model="meta-llama/Meta-Llama-3.1-8B-Instruct", | |
| messages=[ | |
| {"role": "system", "content":TEST1}, | |
| {"role": "user", "content": prompt} | |
| ], | |
| stream=True | |
| ) | |
| partial = "" | |
| for chunk in response_llm: | |
| delta = chunk.choices[0].delta.content | |
| if delta: | |
| partial += delta | |
| yield partial | |
| except Exception as e: | |
| yield f"❌ Ошибка при генерации ответа: {e}" | |