Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
| 1 |
-
|
| 2 |
import discord
|
| 3 |
import logging
|
| 4 |
import os
|
|
@@ -13,7 +12,6 @@ intents = discord.Intents.default()
|
|
| 13 |
intents.messages = True
|
| 14 |
|
| 15 |
# ์ถ๋ก API ํด๋ผ์ด์ธํธ ์ค์
|
| 16 |
-
#hf_client = InferenceClient("meta-llama/Meta-Llama-3-70B-Instruct", token=os.getenv("HF_TOKEN"))
|
| 17 |
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus", token=os.getenv("HF_TOKEN"))
|
| 18 |
|
| 19 |
# ๋ํ ํ์คํ ๋ฆฌ๋ฅผ ์ ์ฅํ ๋ณ์
|
|
@@ -68,7 +66,8 @@ async def generate_response(user_input):
|
|
| 68 |
# ์คํธ๋ฆฌ๋ฐ ์๋ต์ ์ฒ๋ฆฌํ๋ ๋ก์ง ์ถ๊ฐ
|
| 69 |
full_response = ""
|
| 70 |
for part in response:
|
| 71 |
-
|
|
|
|
| 72 |
full_response += part.choices[0].delta.content.strip()
|
| 73 |
|
| 74 |
conversation_history.append({"role": "assistant", "content": full_response})
|
|
@@ -79,4 +78,3 @@ async def generate_response(user_input):
|
|
| 79 |
# ๋์ค์ฝ๋ ๋ด ์ธ์คํด์ค ์์ฑ ๋ฐ ์คํ
|
| 80 |
discord_client = MyClient(intents=intents)
|
| 81 |
discord_client.run(os.getenv('DISCORD_TOKEN'))
|
| 82 |
-
|
|
|
|
|
|
|
| 1 |
import discord
|
| 2 |
import logging
|
| 3 |
import os
|
|
|
|
| 12 |
intents.messages = True
|
| 13 |
|
| 14 |
# ์ถ๋ก API ํด๋ผ์ด์ธํธ ์ค์
|
|
|
|
| 15 |
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus", token=os.getenv("HF_TOKEN"))
|
| 16 |
|
| 17 |
# ๋ํ ํ์คํ ๋ฆฌ๋ฅผ ์ ์ฅํ ๋ณ์
|
|
|
|
| 66 |
# ์คํธ๋ฆฌ๋ฐ ์๋ต์ ์ฒ๋ฆฌํ๋ ๋ก์ง ์ถ๊ฐ
|
| 67 |
full_response = ""
|
| 68 |
for part in response:
|
| 69 |
+
logging.debug(f'Part received from stream: {part}') # ์คํธ๋ฆฌ๋ฐ ์๋ต์ ๊ฐ ํํธ ๋ก๊น
|
| 70 |
+
if part.choices and part.choices[0].delta and part.choices[0].delta.content:
|
| 71 |
full_response += part.choices[0].delta.content.strip()
|
| 72 |
|
| 73 |
conversation_history.append({"role": "assistant", "content": full_response})
|
|
|
|
| 78 |
# ๋์ค์ฝ๋ ๋ด ์ธ์คํด์ค ์์ฑ ๋ฐ ์คํ
|
| 79 |
discord_client = MyClient(intents=intents)
|
| 80 |
discord_client.run(os.getenv('DISCORD_TOKEN'))
|
|
|