Spaces:
Sleeping
Sleeping
| import openai | |
| from .base_model import BaseLLMModel | |
| from .. import shared | |
| from ..config import retrieve_proxy | |
| class OpenAI_Instruct_Client(BaseLLMModel): | |
| def __init__(self, model_name, api_key, user_name="") -> None: | |
| super().__init__(model_name=model_name, user=user_name) | |
| self.api_key = api_key | |
| def _get_instruct_style_input(self): | |
| return "\n\n".join([item["content"] for item in self.history]) | |
| def get_answer_at_once(self): | |
| prompt = self._get_instruct_style_input() | |
| with retrieve_proxy(): | |
| response = openai.Completion.create( | |
| api_key=self.api_key, | |
| api_base=shared.state.openai_api_base, | |
| model=self.model_name, | |
| prompt=prompt, | |
| temperature=self.temperature, | |
| top_p=self.top_p, | |
| ) | |
| return response.choices[0].text.strip(), response.usage["total_tokens"] | |