Spaces:
Running
Running
File size: 1,363 Bytes
b2d5b74 0395151 b2d5b74 0395151 b2d5b74 0395151 4afef48 0395151 b2d5b74 0395151 b2d5b74 0395151 b2d5b74 0395151 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
"""
🇬🇧 Module: llm_client.py
Purpose: Adapter for Featherless.ai (OpenAI-compatible API).
🇷🇺 Модуль: llm_client.py
Назначение: адаптер для LLM-инференса через Featherless.ai (совместимо с OpenAI API).
"""
import os
from typing import List, Dict, Generator
from openai import OpenAI
from config import FEATHERLESS_API_KEY, FEATHERLESS_MODEL
class FeatherlessLLM:
"""Wrapper for Featherless.ai LLM inference."""
def __init__(self, api_key: str = FEATHERLESS_API_KEY, model: str = FEATHERLESS_MODEL):
if not api_key:
raise RuntimeError("❌ Environment variable 'featherless' (API key) is missing.")
self.client = OpenAI(base_url="https://api.featherless.ai/v1", api_key=api_key)
self.model = model
def stream_chat(self, *, messages: List[Dict], model: str = None) -> Generator[str, None, None]:
"""Stream chat completion using Featherless.ai."""
used_model = model or self.model
response = self.client.chat.completions.create(
model=used_model,
messages=messages,
stream=True,
)
for chunk in response:
delta = chunk.choices[0].delta.content
if delta:
yield delta
# === Global singleton instance ===
llm_service = FeatherlessLLM()
|