Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -203,26 +203,30 @@ MODEL_TITLE = """
|
|
| 203 |
justify-content: center;
|
| 204 |
display: flex;
|
| 205 |
">
|
| 206 |
-
|
| 207 |
-
|
| 208 |
-
|
| 209 |
-
|
| 210 |
-
|
| 211 |
-
|
| 212 |
-
|
| 213 |
-
|
| 214 |
-
">
|
| 215 |
-
</div>
|
| 216 |
-
<div class="text" style="
|
| 217 |
-
padding-left: 20px;
|
| 218 |
-
padding-top: 1%;
|
| 219 |
-
float: left;
|
| 220 |
-
">
|
| 221 |
-
<h1 style="font-size: xx-large">SeaLLMs - Large Language Models for Southeast Asia</h1>
|
| 222 |
-
</div>
|
| 223 |
</div>
|
| 224 |
"""
|
| 225 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 226 |
|
| 227 |
MODEL_DESC = f"""
|
| 228 |
<div style='display:flex; gap: 0.25rem; '>
|
|
@@ -1012,7 +1016,7 @@ class CustomTabbedInterface(gr.Blocks):
|
|
| 1012 |
tab_names = [f"Tab {i}" for i in range(len(interface_list))]
|
| 1013 |
with self:
|
| 1014 |
if title:
|
| 1015 |
-
gr.
|
| 1016 |
f"<h1 style='text-align: center; margin-bottom: 1rem'>{title}</h1>"
|
| 1017 |
)
|
| 1018 |
if description:
|
|
@@ -1270,7 +1274,7 @@ def chat_response_stream_multiturn(
|
|
| 1270 |
# message, history, sys_prompt=system_prompt
|
| 1271 |
# )
|
| 1272 |
full_prompt = chatml_format(message.strip(), history=history, system_prompt=system_prompt)
|
| 1273 |
-
print(full_prompt)
|
| 1274 |
|
| 1275 |
if len(tokenizer.encode(full_prompt, add_special_tokens=False)) >= 4050:
|
| 1276 |
raise gr.Error(f"Conversation or prompt is too long, please clear the chatbox or try shorter input.")
|
|
|
|
| 203 |
justify-content: center;
|
| 204 |
display: flex;
|
| 205 |
">
|
| 206 |
+
|
| 207 |
+
<div class="text" style="
|
| 208 |
+
padding-left: 20px;
|
| 209 |
+
padding-top: 1%;
|
| 210 |
+
float: left;
|
| 211 |
+
">
|
| 212 |
+
<h1 style="font-size: xx-large">SeaLLMs - Large Language Models for Southeast Asia</h1>
|
| 213 |
+
</div>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 214 |
</div>
|
| 215 |
"""
|
| 216 |
|
| 217 |
+
"""
|
| 218 |
+
Somehow cannot add image here
|
| 219 |
+
<div class="image" >
|
| 220 |
+
<img src="file/seal_logo.png" style="
|
| 221 |
+
max-width: 10em;
|
| 222 |
+
max-height: 5%;
|
| 223 |
+
height: 3em;
|
| 224 |
+
width: 3em;
|
| 225 |
+
float: left;
|
| 226 |
+
margin-left: auto;
|
| 227 |
+
">
|
| 228 |
+
</div>
|
| 229 |
+
"""
|
| 230 |
|
| 231 |
MODEL_DESC = f"""
|
| 232 |
<div style='display:flex; gap: 0.25rem; '>
|
|
|
|
| 1016 |
tab_names = [f"Tab {i}" for i in range(len(interface_list))]
|
| 1017 |
with self:
|
| 1018 |
if title:
|
| 1019 |
+
gr.HTML(
|
| 1020 |
f"<h1 style='text-align: center; margin-bottom: 1rem'>{title}</h1>"
|
| 1021 |
)
|
| 1022 |
if description:
|
|
|
|
| 1274 |
# message, history, sys_prompt=system_prompt
|
| 1275 |
# )
|
| 1276 |
full_prompt = chatml_format(message.strip(), history=history, system_prompt=system_prompt)
|
| 1277 |
+
# print(full_prompt)
|
| 1278 |
|
| 1279 |
if len(tokenizer.encode(full_prompt, add_special_tokens=False)) >= 4050:
|
| 1280 |
raise gr.Error(f"Conversation or prompt is too long, please clear the chatbox or try shorter input.")
|