Spaces:
Paused
Paused
NGUYEN, Xuan Phi
commited on
Commit
·
cb6719e
1
Parent(s):
390b3ef
update time zone
Browse files
multipurpose_chatbot/configs.py
CHANGED
|
@@ -183,7 +183,15 @@ CHUNK_SIZE = int(os.environ.get("CHUNK_SIZE", "1024"))
|
|
| 183 |
CHUNK_OVERLAP = int(os.environ.get("CHUNK_SIZE", "50"))
|
| 184 |
|
| 185 |
|
| 186 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 187 |
|
| 188 |
MAX_TOKENS = int(os.environ.get("MAX_TOKENS", "2048"))
|
| 189 |
TEMPERATURE = float(os.environ.get("TEMPERATURE", "0.1"))
|
|
@@ -219,3 +227,13 @@ IMAGE_TOKEN = os.environ.get("IMAGE_TOKEN", "[IMAGE]<|image|>[/IMAGE]")
|
|
| 219 |
IMAGE_TOKEN_INTERACTIVE = bool(int(os.environ.get("IMAGE_TOKEN_INTERACTIVE", "0")))
|
| 220 |
IMAGE_TOKEN_LENGTH = int(os.environ.get("IMAGE_TOKEN_LENGTH", "576"))
|
| 221 |
MAX_PACHES = int(os.environ.get("MAX_PACHES", "1"))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 183 |
CHUNK_OVERLAP = int(os.environ.get("CHUNK_SIZE", "50"))
|
| 184 |
|
| 185 |
|
| 186 |
+
DEFAULT_SYSTEM_PROMPT = """You are a helpful, respectful, honest and safe AI assistant."""
|
| 187 |
+
DEFAULT_SYSTEM_PROMPT = """You are SeaLLM, you are a helpful, respectful and honest AI assistant. Based on your internal clock, the current date time: {cur_datetime}.
|
| 188 |
+
|
| 189 |
+
Your knowledge base was last updated on August 2023. Thus, you should answers questions about events prior to and after August 2023 the way a highly informed individual in August 2023 would if they were talking to someone from the above date, and can let the human know this when relevant.
|
| 190 |
+
|
| 191 |
+
You should give concise responses to very simple questions, but provide thorough responses to more complex and open-ended questions. You should provide thorough help with writing, analysis, question answering, math, coding, and all sorts of other tasks. It uses markdown for coding."""
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
SYSTEM_PROMPT = os.environ.get("SYSTEM_PROMPT", DEFAULT_SYSTEM_PROMPT)
|
| 195 |
|
| 196 |
MAX_TOKENS = int(os.environ.get("MAX_TOKENS", "2048"))
|
| 197 |
TEMPERATURE = float(os.environ.get("TEMPERATURE", "0.1"))
|
|
|
|
| 227 |
IMAGE_TOKEN_INTERACTIVE = bool(int(os.environ.get("IMAGE_TOKEN_INTERACTIVE", "0")))
|
| 228 |
IMAGE_TOKEN_LENGTH = int(os.environ.get("IMAGE_TOKEN_LENGTH", "576"))
|
| 229 |
MAX_PACHES = int(os.environ.get("MAX_PACHES", "1"))
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
"""
|
| 233 |
+
# claude style
|
| 234 |
+
You are SeaLLM, you are a helpful, respectful and honest AI assistant. Based on your internal clock, the current date time: {cur_datetime}.
|
| 235 |
+
|
| 236 |
+
Your knowledge base was last updated on August 2023. Thus, you should answers questions about events prior to and after August 2023 the way a highly informed individual in August 2023 would if they were talking to someone from the above date, and can let the human know this when relevant.
|
| 237 |
+
|
| 238 |
+
You should give concise responses to very simple questions, but provide thorough responses to more complex and open-ended questions. You should provide thorough help with writing, analysis, question answering, math, coding, and all sorts of other tasks. It uses markdown for coding.
|
| 239 |
+
"""
|
multipurpose_chatbot/demos/chat_interface.py
CHANGED
|
@@ -63,6 +63,7 @@ from ..configs import (
|
|
| 63 |
MODEL_NAME,
|
| 64 |
MAX_TOKENS,
|
| 65 |
TEMPERATURE,
|
|
|
|
| 66 |
)
|
| 67 |
|
| 68 |
from ..globals import MODEL_ENGINE
|
|
@@ -70,7 +71,7 @@ from ..globals import MODEL_ENGINE
|
|
| 70 |
CHAT_EXAMPLES = [
|
| 71 |
["Explain general relativity."],
|
| 72 |
]
|
| 73 |
-
DATETIME_FORMAT = "
|
| 74 |
|
| 75 |
|
| 76 |
def gradio_history_to_openai_conversations(message=None, history=None, system_prompt=None):
|
|
@@ -104,9 +105,11 @@ def gradio_history_to_conversation_prompt(message=None, history=None, system_pro
|
|
| 104 |
|
| 105 |
def get_datetime_string():
|
| 106 |
from datetime import datetime
|
| 107 |
-
now = datetime.now()
|
| 108 |
# dd/mm/YY H:M:S
|
| 109 |
-
|
|
|
|
|
|
|
| 110 |
return dt_string
|
| 111 |
|
| 112 |
|
|
@@ -690,6 +693,7 @@ class ChatInterfaceDemo(BaseDemo):
|
|
| 690 |
{ "left": "$$", "right": "$$", "display": True},
|
| 691 |
],
|
| 692 |
show_copy_button=True,
|
|
|
|
| 693 |
),
|
| 694 |
textbox=gr.Textbox(placeholder='Type message', lines=1, max_lines=128, min_width=200, scale=8),
|
| 695 |
submit_btn=gr.Button(value='Submit', variant="primary", scale=0),
|
|
|
|
| 63 |
MODEL_NAME,
|
| 64 |
MAX_TOKENS,
|
| 65 |
TEMPERATURE,
|
| 66 |
+
CHATBOT_HEIGHT,
|
| 67 |
)
|
| 68 |
|
| 69 |
from ..globals import MODEL_ENGINE
|
|
|
|
| 71 |
CHAT_EXAMPLES = [
|
| 72 |
["Explain general relativity."],
|
| 73 |
]
|
| 74 |
+
DATETIME_FORMAT = "{cur_datetime}"
|
| 75 |
|
| 76 |
|
| 77 |
def gradio_history_to_openai_conversations(message=None, history=None, system_prompt=None):
|
|
|
|
| 105 |
|
| 106 |
def get_datetime_string():
|
| 107 |
from datetime import datetime
|
| 108 |
+
# now = datetime.now()
|
| 109 |
# dd/mm/YY H:M:S
|
| 110 |
+
# tz_string = datetime.now().astimezone()
|
| 111 |
+
# dt_string = now.strftime("%B %d, %Y, %H:%M:%S")
|
| 112 |
+
dt_string = datetime.now().astimezone().strftime("%B %d, %Y, %H:%M GMT%Z")
|
| 113 |
return dt_string
|
| 114 |
|
| 115 |
|
|
|
|
| 693 |
{ "left": "$$", "right": "$$", "display": True},
|
| 694 |
],
|
| 695 |
show_copy_button=True,
|
| 696 |
+
height=CHATBOT_HEIGHT,
|
| 697 |
),
|
| 698 |
textbox=gr.Textbox(placeholder='Type message', lines=1, max_lines=128, min_width=200, scale=8),
|
| 699 |
submit_btn=gr.Button(value='Submit', variant="primary", scale=0),
|