Spaces:
Sleeping
Sleeping
Commit
Β·
fed3387
1
Parent(s):
579c644
Update app.py
Browse files
app.py
CHANGED
|
@@ -13,7 +13,7 @@ def respond(message, history, system_message, max_tokens, temperature, top_p, se
|
|
| 13 |
# Create an InferenceClient for the selected model
|
| 14 |
client = InferenceClient(model_list.get(selected_model, "HuggingFaceH4/zephyr-7b-beta"))
|
| 15 |
|
| 16 |
-
# Build conversation messages for the client
|
| 17 |
messages = [{"role": "system", "content": system_message}]
|
| 18 |
for user_msg, assistant_msg in history:
|
| 19 |
if user_msg: # Only add non-empty messages
|
|
@@ -147,35 +147,11 @@ with gr.Blocks(css=css) as demo:
|
|
| 147 |
|
| 148 |
# Main area: Chat interface
|
| 149 |
with gr.Column(scale=3):
|
| 150 |
-
# Using a custom chatbot renderer to add emojis
|
| 151 |
chatbot = gr.Chatbot(
|
| 152 |
label="Conversation",
|
| 153 |
show_label=True,
|
| 154 |
-
height=400
|
| 155 |
-
render=False # We'll render the messages manually
|
| 156 |
)
|
| 157 |
-
|
| 158 |
-
# Custom renderer for chat messages
|
| 159 |
-
def render_chat(history):
|
| 160 |
-
result = []
|
| 161 |
-
for user_msg, bot_msg in history:
|
| 162 |
-
if user_msg is None:
|
| 163 |
-
user_msg = ""
|
| 164 |
-
if bot_msg is None:
|
| 165 |
-
bot_msg = ""
|
| 166 |
-
|
| 167 |
-
# Add emojis to non-empty messages
|
| 168 |
-
if user_msg and not user_msg.startswith("π€"):
|
| 169 |
-
user_msg = f"π€ {user_msg}"
|
| 170 |
-
if bot_msg and not bot_msg.startswith("π‘οΈ"):
|
| 171 |
-
bot_msg = f"π‘οΈ {bot_msg}"
|
| 172 |
-
|
| 173 |
-
result.append((user_msg, bot_msg))
|
| 174 |
-
return result
|
| 175 |
-
|
| 176 |
-
# Custom chatbot with emoji rendering
|
| 177 |
-
chatbot.render = render_chat
|
| 178 |
-
|
| 179 |
with gr.Row():
|
| 180 |
user_input = gr.Textbox(
|
| 181 |
placeholder="Type your message here...",
|
|
@@ -194,8 +170,9 @@ with gr.Blocks(css=css) as demo:
|
|
| 194 |
|
| 195 |
# Define functions for chatbot interactions
|
| 196 |
def user(user_message, history):
|
| 197 |
-
#
|
| 198 |
-
|
|
|
|
| 199 |
|
| 200 |
def bot(history, system_message, max_tokens, temperature, top_p, selected_model):
|
| 201 |
# Ensure there's history
|
|
@@ -204,11 +181,23 @@ with gr.Blocks(css=css) as demo:
|
|
| 204 |
|
| 205 |
# Get the last user message from history
|
| 206 |
user_message = history[-1][0]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 207 |
|
| 208 |
# Call respond function with the message
|
| 209 |
response_generator = respond(
|
| 210 |
user_message,
|
| 211 |
-
|
| 212 |
system_message,
|
| 213 |
max_tokens,
|
| 214 |
temperature,
|
|
@@ -216,12 +205,12 @@ with gr.Blocks(css=css) as demo:
|
|
| 216 |
selected_model
|
| 217 |
)
|
| 218 |
|
| 219 |
-
# Update history as responses come in
|
| 220 |
for response in response_generator:
|
| 221 |
-
history[-1][1] = response
|
| 222 |
yield history
|
| 223 |
|
| 224 |
-
# Wire up the event chain
|
| 225 |
user_input.submit(
|
| 226 |
user,
|
| 227 |
[user_input, chatbot],
|
|
|
|
| 13 |
# Create an InferenceClient for the selected model
|
| 14 |
client = InferenceClient(model_list.get(selected_model, "HuggingFaceH4/zephyr-7b-beta"))
|
| 15 |
|
| 16 |
+
# Build conversation messages for the client
|
| 17 |
messages = [{"role": "system", "content": system_message}]
|
| 18 |
for user_msg, assistant_msg in history:
|
| 19 |
if user_msg: # Only add non-empty messages
|
|
|
|
| 147 |
|
| 148 |
# Main area: Chat interface
|
| 149 |
with gr.Column(scale=3):
|
|
|
|
| 150 |
chatbot = gr.Chatbot(
|
| 151 |
label="Conversation",
|
| 152 |
show_label=True,
|
| 153 |
+
height=400
|
|
|
|
| 154 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 155 |
with gr.Row():
|
| 156 |
user_input = gr.Textbox(
|
| 157 |
placeholder="Type your message here...",
|
|
|
|
| 170 |
|
| 171 |
# Define functions for chatbot interactions
|
| 172 |
def user(user_message, history):
|
| 173 |
+
# Add emoji to user message
|
| 174 |
+
user_message_with_emoji = f"π€ {user_message}"
|
| 175 |
+
return "", history + [[user_message_with_emoji, None]]
|
| 176 |
|
| 177 |
def bot(history, system_message, max_tokens, temperature, top_p, selected_model):
|
| 178 |
# Ensure there's history
|
|
|
|
| 181 |
|
| 182 |
# Get the last user message from history
|
| 183 |
user_message = history[-1][0]
|
| 184 |
+
# Remove emoji for processing if present
|
| 185 |
+
if user_message.startswith("π€ "):
|
| 186 |
+
user_message = user_message[2:].strip()
|
| 187 |
+
|
| 188 |
+
# Process previous history to clean emojis
|
| 189 |
+
clean_history = []
|
| 190 |
+
for h_user, h_bot in history[:-1]:
|
| 191 |
+
if h_user and h_user.startswith("π€ "):
|
| 192 |
+
h_user = h_user[2:].strip()
|
| 193 |
+
if h_bot and h_bot.startswith("π‘οΈ "):
|
| 194 |
+
h_bot = h_bot[2:].strip()
|
| 195 |
+
clean_history.append([h_user, h_bot])
|
| 196 |
|
| 197 |
# Call respond function with the message
|
| 198 |
response_generator = respond(
|
| 199 |
user_message,
|
| 200 |
+
clean_history, # Pass clean history
|
| 201 |
system_message,
|
| 202 |
max_tokens,
|
| 203 |
temperature,
|
|
|
|
| 205 |
selected_model
|
| 206 |
)
|
| 207 |
|
| 208 |
+
# Update history as responses come in, adding emoji
|
| 209 |
for response in response_generator:
|
| 210 |
+
history[-1][1] = f"π‘οΈ {response}"
|
| 211 |
yield history
|
| 212 |
|
| 213 |
+
# Wire up the event chain
|
| 214 |
user_input.submit(
|
| 215 |
user,
|
| 216 |
[user_input, chatbot],
|