Spaces:
Running
Running
Use Google-style docstrings and reorganize constants declaration
Browse files
app.py
CHANGED
|
@@ -27,12 +27,27 @@ from slidedeckai.helpers import chat_helper
|
|
| 27 |
|
| 28 |
|
| 29 |
load_dotenv()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30 |
|
| 31 |
|
| 32 |
class StreamlitChatMessageHistory:
|
| 33 |
"""Chat message history stored in Streamlit session state."""
|
| 34 |
|
| 35 |
def __init__(self, key: str):
|
|
|
|
| 36 |
self.key = key
|
| 37 |
if key not in st.session_state:
|
| 38 |
st.session_state[key] = []
|
|
@@ -51,14 +66,13 @@ class StreamlitChatMessageHistory:
|
|
| 51 |
st.session_state[self.key].append(AIMessage(content))
|
| 52 |
|
| 53 |
|
| 54 |
-
RUN_IN_OFFLINE_MODE = os.getenv('RUN_IN_OFFLINE_MODE', 'False').lower() == 'true'
|
| 55 |
-
|
| 56 |
-
|
| 57 |
@st.cache_data
|
| 58 |
def _load_strings() -> dict:
|
| 59 |
"""
|
| 60 |
Load various strings to be displayed in the app.
|
| 61 |
-
|
|
|
|
|
|
|
| 62 |
"""
|
| 63 |
with open(GlobalConfig.APP_STRINGS_FILE, 'r', encoding='utf-8') as in_file:
|
| 64 |
return json5.loads(in_file.read())
|
|
@@ -69,10 +83,12 @@ def _get_prompt_template(is_refinement: bool) -> str:
|
|
| 69 |
"""
|
| 70 |
Return a prompt template.
|
| 71 |
|
| 72 |
-
:
|
| 73 |
-
|
| 74 |
-
"""
|
| 75 |
|
|
|
|
|
|
|
|
|
|
| 76 |
if is_refinement:
|
| 77 |
with open(GlobalConfig.REFINEMENT_PROMPT_TEMPLATE, 'r', encoding='utf-8') as in_file:
|
| 78 |
template = in_file.read()
|
|
@@ -95,16 +111,18 @@ def are_all_inputs_valid(
|
|
| 95 |
"""
|
| 96 |
Validate user input and LLM selection.
|
| 97 |
|
| 98 |
-
:
|
| 99 |
-
|
| 100 |
-
|
| 101 |
-
|
| 102 |
-
|
| 103 |
-
|
| 104 |
-
|
| 105 |
-
|
|
|
|
|
|
|
|
|
|
| 106 |
"""
|
| 107 |
-
|
| 108 |
if not text_helper.is_valid_prompt(user_prompt):
|
| 109 |
handle_error(
|
| 110 |
'Not enough information provided!'
|
|
@@ -139,10 +157,10 @@ def handle_error(error_msg: str, should_log: bool):
|
|
| 139 |
"""
|
| 140 |
Display an error message in the app.
|
| 141 |
|
| 142 |
-
:
|
| 143 |
-
|
|
|
|
| 144 |
"""
|
| 145 |
-
|
| 146 |
if should_log:
|
| 147 |
logger.error(error_msg)
|
| 148 |
|
|
@@ -153,7 +171,6 @@ def reset_api_key():
|
|
| 153 |
"""
|
| 154 |
Clear API key input when a different LLM is selected from the dropdown list.
|
| 155 |
"""
|
| 156 |
-
|
| 157 |
st.session_state.api_key_input = ''
|
| 158 |
|
| 159 |
|
|
@@ -177,18 +194,8 @@ def reset_chat_history():
|
|
| 177 |
|
| 178 |
APP_TEXT = _load_strings()
|
| 179 |
|
| 180 |
-
# Session variables
|
| 181 |
-
CHAT_MESSAGES = 'chat_messages'
|
| 182 |
-
DOWNLOAD_FILE_KEY = 'download_file_name'
|
| 183 |
-
IS_IT_REFINEMENT = 'is_it_refinement'
|
| 184 |
-
ADDITIONAL_INFO = 'additional_info'
|
| 185 |
-
PDF_FILE_KEY = 'pdf_file'
|
| 186 |
|
| 187 |
-
|
| 188 |
-
logger = logging.getLogger(__name__)
|
| 189 |
-
|
| 190 |
-
texts = list(GlobalConfig.PPTX_TEMPLATE_FILES.keys())
|
| 191 |
-
captions = [GlobalConfig.PPTX_TEMPLATE_FILES[x]['caption'] for x in texts]
|
| 192 |
|
| 193 |
|
| 194 |
with st.sidebar:
|
|
@@ -201,8 +208,8 @@ with st.sidebar:
|
|
| 201 |
# The PPT templates
|
| 202 |
pptx_template = st.sidebar.radio(
|
| 203 |
'1: Select a presentation template:',
|
| 204 |
-
|
| 205 |
-
captions=
|
| 206 |
horizontal=True
|
| 207 |
)
|
| 208 |
|
|
@@ -303,7 +310,6 @@ def build_ui():
|
|
| 303 |
"""
|
| 304 |
Display the input elements for content generation.
|
| 305 |
"""
|
| 306 |
-
|
| 307 |
st.title(APP_TEXT['app_name'])
|
| 308 |
st.subheader(APP_TEXT['caption'])
|
| 309 |
st.markdown(
|
|
@@ -342,11 +348,11 @@ def set_up_chat_ui():
|
|
| 342 |
st.chat_message('ai').write(random.choice(APP_TEXT['ai_greetings']))
|
| 343 |
|
| 344 |
history = StreamlitChatMessageHistory(key=CHAT_MESSAGES)
|
| 345 |
-
prompt_template = chat_helper.ChatPromptTemplate.from_template(
|
| 346 |
-
|
| 347 |
-
|
| 348 |
-
|
| 349 |
-
)
|
| 350 |
|
| 351 |
# Since Streamlit app reloads at every interaction, display the chat history
|
| 352 |
# from the save session state
|
|
@@ -449,9 +455,9 @@ def set_up_chat_ui():
|
|
| 449 |
)
|
| 450 |
except ollama.ResponseError:
|
| 451 |
handle_error(
|
| 452 |
-
|
| 453 |
-
|
| 454 |
-
|
| 455 |
True
|
| 456 |
)
|
| 457 |
except Exception as ex:
|
|
@@ -484,9 +490,9 @@ def _is_it_refinement() -> bool:
|
|
| 484 |
"""
|
| 485 |
Whether it is the initial prompt or a refinement.
|
| 486 |
|
| 487 |
-
:
|
|
|
|
| 488 |
"""
|
| 489 |
-
|
| 490 |
if IS_IT_REFINEMENT in st.session_state:
|
| 491 |
return True
|
| 492 |
|
|
@@ -502,7 +508,8 @@ def _get_user_messages() -> list[str]:
|
|
| 502 |
"""
|
| 503 |
Get a list of user messages submitted until now from the session state.
|
| 504 |
|
| 505 |
-
:
|
|
|
|
| 506 |
"""
|
| 507 |
return [
|
| 508 |
msg.content for msg in st.session_state[CHAT_MESSAGES]
|
|
@@ -514,7 +521,8 @@ def _display_download_button(file_path: pathlib.Path):
|
|
| 514 |
"""
|
| 515 |
Display a download button to download a slide deck.
|
| 516 |
|
| 517 |
-
:
|
|
|
|
| 518 |
"""
|
| 519 |
with open(file_path, 'rb') as download_file:
|
| 520 |
st.download_button(
|
|
@@ -529,7 +537,6 @@ def main():
|
|
| 529 |
"""
|
| 530 |
Trigger application run.
|
| 531 |
"""
|
| 532 |
-
|
| 533 |
build_ui()
|
| 534 |
|
| 535 |
|
|
|
|
| 27 |
|
| 28 |
|
| 29 |
load_dotenv()
|
| 30 |
+
logger = logging.getLogger(__name__)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
RUN_IN_OFFLINE_MODE = os.getenv('RUN_IN_OFFLINE_MODE', 'False').lower() == 'true'
|
| 34 |
+
|
| 35 |
+
# Session variables
|
| 36 |
+
CHAT_MESSAGES = 'chat_messages'
|
| 37 |
+
DOWNLOAD_FILE_KEY = 'download_file_name'
|
| 38 |
+
IS_IT_REFINEMENT = 'is_it_refinement'
|
| 39 |
+
ADDITIONAL_INFO = 'additional_info'
|
| 40 |
+
PDF_FILE_KEY = 'pdf_file'
|
| 41 |
+
|
| 42 |
+
TEXTS = list(GlobalConfig.PPTX_TEMPLATE_FILES.keys())
|
| 43 |
+
CAPTIONS = [GlobalConfig.PPTX_TEMPLATE_FILES[x]['caption'] for x in TEXTS]
|
| 44 |
|
| 45 |
|
| 46 |
class StreamlitChatMessageHistory:
|
| 47 |
"""Chat message history stored in Streamlit session state."""
|
| 48 |
|
| 49 |
def __init__(self, key: str):
|
| 50 |
+
"""Initialize the chat message history."""
|
| 51 |
self.key = key
|
| 52 |
if key not in st.session_state:
|
| 53 |
st.session_state[key] = []
|
|
|
|
| 66 |
st.session_state[self.key].append(AIMessage(content))
|
| 67 |
|
| 68 |
|
|
|
|
|
|
|
|
|
|
| 69 |
@st.cache_data
|
| 70 |
def _load_strings() -> dict:
|
| 71 |
"""
|
| 72 |
Load various strings to be displayed in the app.
|
| 73 |
+
|
| 74 |
+
Returns:
|
| 75 |
+
The dictionary of strings.
|
| 76 |
"""
|
| 77 |
with open(GlobalConfig.APP_STRINGS_FILE, 'r', encoding='utf-8') as in_file:
|
| 78 |
return json5.loads(in_file.read())
|
|
|
|
| 83 |
"""
|
| 84 |
Return a prompt template.
|
| 85 |
|
| 86 |
+
Args:
|
| 87 |
+
is_refinement: Whether this is the initial or refinement prompt.
|
|
|
|
| 88 |
|
| 89 |
+
Returns:
|
| 90 |
+
The prompt template as f-string.
|
| 91 |
+
"""
|
| 92 |
if is_refinement:
|
| 93 |
with open(GlobalConfig.REFINEMENT_PROMPT_TEMPLATE, 'r', encoding='utf-8') as in_file:
|
| 94 |
template = in_file.read()
|
|
|
|
| 111 |
"""
|
| 112 |
Validate user input and LLM selection.
|
| 113 |
|
| 114 |
+
Args:
|
| 115 |
+
user_prompt: The prompt.
|
| 116 |
+
provider: The LLM provider.
|
| 117 |
+
selected_model: Name of the model.
|
| 118 |
+
user_key: User-provided API key.
|
| 119 |
+
azure_deployment_url: Azure OpenAI deployment URL.
|
| 120 |
+
azure_endpoint_name: Azure OpenAI model endpoint.
|
| 121 |
+
azure_api_version: Azure OpenAI API version.
|
| 122 |
+
|
| 123 |
+
Returns:
|
| 124 |
+
`True` if all inputs "look" OK; `False` otherwise.
|
| 125 |
"""
|
|
|
|
| 126 |
if not text_helper.is_valid_prompt(user_prompt):
|
| 127 |
handle_error(
|
| 128 |
'Not enough information provided!'
|
|
|
|
| 157 |
"""
|
| 158 |
Display an error message in the app.
|
| 159 |
|
| 160 |
+
Args:
|
| 161 |
+
error_msg: The error message to be displayed.
|
| 162 |
+
should_log: If `True`, log the message.
|
| 163 |
"""
|
|
|
|
| 164 |
if should_log:
|
| 165 |
logger.error(error_msg)
|
| 166 |
|
|
|
|
| 171 |
"""
|
| 172 |
Clear API key input when a different LLM is selected from the dropdown list.
|
| 173 |
"""
|
|
|
|
| 174 |
st.session_state.api_key_input = ''
|
| 175 |
|
| 176 |
|
|
|
|
| 194 |
|
| 195 |
APP_TEXT = _load_strings()
|
| 196 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 197 |
|
| 198 |
+
# -----= UI display begins here =-----
|
|
|
|
|
|
|
|
|
|
|
|
|
| 199 |
|
| 200 |
|
| 201 |
with st.sidebar:
|
|
|
|
| 208 |
# The PPT templates
|
| 209 |
pptx_template = st.sidebar.radio(
|
| 210 |
'1: Select a presentation template:',
|
| 211 |
+
TEXTS,
|
| 212 |
+
captions=CAPTIONS,
|
| 213 |
horizontal=True
|
| 214 |
)
|
| 215 |
|
|
|
|
| 310 |
"""
|
| 311 |
Display the input elements for content generation.
|
| 312 |
"""
|
|
|
|
| 313 |
st.title(APP_TEXT['app_name'])
|
| 314 |
st.subheader(APP_TEXT['caption'])
|
| 315 |
st.markdown(
|
|
|
|
| 348 |
st.chat_message('ai').write(random.choice(APP_TEXT['ai_greetings']))
|
| 349 |
|
| 350 |
history = StreamlitChatMessageHistory(key=CHAT_MESSAGES)
|
| 351 |
+
# prompt_template = chat_helper.ChatPromptTemplate.from_template(
|
| 352 |
+
# _get_prompt_template(
|
| 353 |
+
# is_refinement=_is_it_refinement()
|
| 354 |
+
# )
|
| 355 |
+
# )
|
| 356 |
|
| 357 |
# Since Streamlit app reloads at every interaction, display the chat history
|
| 358 |
# from the save session state
|
|
|
|
| 455 |
)
|
| 456 |
except ollama.ResponseError:
|
| 457 |
handle_error(
|
| 458 |
+
'The model is unavailable with Ollama on your system.'
|
| 459 |
+
' Make sure that you have provided the correct LLM name or pull it.'
|
| 460 |
+
' View LLMs available locally by running `ollama list`.',
|
| 461 |
True
|
| 462 |
)
|
| 463 |
except Exception as ex:
|
|
|
|
| 490 |
"""
|
| 491 |
Whether it is the initial prompt or a refinement.
|
| 492 |
|
| 493 |
+
Returns:
|
| 494 |
+
True if it is the initial prompt; False otherwise.
|
| 495 |
"""
|
|
|
|
| 496 |
if IS_IT_REFINEMENT in st.session_state:
|
| 497 |
return True
|
| 498 |
|
|
|
|
| 508 |
"""
|
| 509 |
Get a list of user messages submitted until now from the session state.
|
| 510 |
|
| 511 |
+
Returns:
|
| 512 |
+
The list of user messages.
|
| 513 |
"""
|
| 514 |
return [
|
| 515 |
msg.content for msg in st.session_state[CHAT_MESSAGES]
|
|
|
|
| 521 |
"""
|
| 522 |
Display a download button to download a slide deck.
|
| 523 |
|
| 524 |
+
Args:
|
| 525 |
+
file_path: The path of the .pptx file.
|
| 526 |
"""
|
| 527 |
with open(file_path, 'rb') as download_file:
|
| 528 |
st.download_button(
|
|
|
|
| 537 |
"""
|
| 538 |
Trigger application run.
|
| 539 |
"""
|
|
|
|
| 540 |
build_ui()
|
| 541 |
|
| 542 |
|