Spaces:
Running
Running
google-labs-jules[bot]
commited on
Commit
·
f243b4c
1
Parent(s):
2535210
fix: Address PR comments
Browse files- Fix PDF file option not working.
- app.py +1 -0
- src/slidedeckai/core.py +5 -3
app.py
CHANGED
|
@@ -399,6 +399,7 @@ def set_up_chat_ui():
|
|
| 399 |
topic=prompt_text,
|
| 400 |
api_key=api_key_token.strip(),
|
| 401 |
template_idx=list(GlobalConfig.PPTX_TEMPLATE_FILES.keys()).index(pptx_template),
|
|
|
|
| 402 |
)
|
| 403 |
|
| 404 |
progress_bar = st.progress(0, 'Preparing to call LLM...')
|
|
|
|
| 399 |
topic=prompt_text,
|
| 400 |
api_key=api_key_token.strip(),
|
| 401 |
template_idx=list(GlobalConfig.PPTX_TEMPLATE_FILES.keys()).index(pptx_template),
|
| 402 |
+
additional_info=st.session_state.get(ADDITIONAL_INFO, ''),
|
| 403 |
)
|
| 404 |
|
| 405 |
progress_bar = st.progress(0, 'Preparing to call LLM...')
|
src/slidedeckai/core.py
CHANGED
|
@@ -26,7 +26,7 @@ class SlideDeckAI:
|
|
| 26 |
The main class for generating slide decks.
|
| 27 |
"""
|
| 28 |
|
| 29 |
-
def __init__(self, model, topic, api_key=None, pdf_file_path=None, pdf_page_range=None, template_idx=0):
|
| 30 |
"""
|
| 31 |
Initializes the SlideDeckAI object.
|
| 32 |
|
|
@@ -36,6 +36,7 @@ class SlideDeckAI:
|
|
| 36 |
:param pdf_file_path: The path to a PDF file to use as a source for the slide deck.
|
| 37 |
:param pdf_page_range: A tuple representing the page range to use from the PDF file.
|
| 38 |
:param template_idx: The index of the PowerPoint template to use.
|
|
|
|
| 39 |
"""
|
| 40 |
self.model = model
|
| 41 |
self.topic = topic
|
|
@@ -43,6 +44,7 @@ class SlideDeckAI:
|
|
| 43 |
self.pdf_file_path = pdf_file_path
|
| 44 |
self.pdf_page_range = pdf_page_range
|
| 45 |
self.template_idx = template_idx
|
|
|
|
| 46 |
self.chat_history = ChatMessageHistory()
|
| 47 |
self.last_response = None
|
| 48 |
|
|
@@ -68,7 +70,7 @@ class SlideDeckAI:
|
|
| 68 |
"""
|
| 69 |
self.chat_history.add_user_message(self.topic)
|
| 70 |
prompt_template = self._get_prompt_template(is_refinement=False)
|
| 71 |
-
formatted_template = prompt_template.format(question=self.topic, additional_info=
|
| 72 |
|
| 73 |
provider, llm_name = llm_helper.get_provider_model(self.model, use_ollama=RUN_IN_OFFLINE_MODE)
|
| 74 |
|
|
@@ -119,7 +121,7 @@ class SlideDeckAI:
|
|
| 119 |
formatted_template = prompt_template.format(
|
| 120 |
instructions='\n'.join(list_of_msgs),
|
| 121 |
previous_content=self.last_response,
|
| 122 |
-
additional_info=
|
| 123 |
)
|
| 124 |
|
| 125 |
provider, llm_name = llm_helper.get_provider_model(self.model, use_ollama=RUN_IN_OFFLINE_MODE)
|
|
|
|
| 26 |
The main class for generating slide decks.
|
| 27 |
"""
|
| 28 |
|
| 29 |
+
def __init__(self, model, topic, api_key=None, pdf_file_path=None, pdf_page_range=None, template_idx=0, additional_info=''):
|
| 30 |
"""
|
| 31 |
Initializes the SlideDeckAI object.
|
| 32 |
|
|
|
|
| 36 |
:param pdf_file_path: The path to a PDF file to use as a source for the slide deck.
|
| 37 |
:param pdf_page_range: A tuple representing the page range to use from the PDF file.
|
| 38 |
:param template_idx: The index of the PowerPoint template to use.
|
| 39 |
+
:param additional_info: Additional information to be sent to the LLM, such as text from a PDF.
|
| 40 |
"""
|
| 41 |
self.model = model
|
| 42 |
self.topic = topic
|
|
|
|
| 44 |
self.pdf_file_path = pdf_file_path
|
| 45 |
self.pdf_page_range = pdf_page_range
|
| 46 |
self.template_idx = template_idx
|
| 47 |
+
self.additional_info = additional_info
|
| 48 |
self.chat_history = ChatMessageHistory()
|
| 49 |
self.last_response = None
|
| 50 |
|
|
|
|
| 70 |
"""
|
| 71 |
self.chat_history.add_user_message(self.topic)
|
| 72 |
prompt_template = self._get_prompt_template(is_refinement=False)
|
| 73 |
+
formatted_template = prompt_template.format(question=self.topic, additional_info=self.additional_info)
|
| 74 |
|
| 75 |
provider, llm_name = llm_helper.get_provider_model(self.model, use_ollama=RUN_IN_OFFLINE_MODE)
|
| 76 |
|
|
|
|
| 121 |
formatted_template = prompt_template.format(
|
| 122 |
instructions='\n'.join(list_of_msgs),
|
| 123 |
previous_content=self.last_response,
|
| 124 |
+
additional_info=self.additional_info,
|
| 125 |
)
|
| 126 |
|
| 127 |
provider, llm_name = llm_helper.get_provider_model(self.model, use_ollama=RUN_IN_OFFLINE_MODE)
|