Spaces:
Running
Running
Aditya Bakshi
commited on
add reset button just below the chat input
Browse files- .DS_Store +0 -0
- app.py +30 -2
- requirements.txt +1 -0
.DS_Store
ADDED
|
Binary file (8.2 kB). View file
|
|
|
app.py
CHANGED
|
@@ -16,6 +16,7 @@ import ollama
|
|
| 16 |
import requests
|
| 17 |
import streamlit as st
|
| 18 |
from dotenv import load_dotenv
|
|
|
|
| 19 |
|
| 20 |
import global_config as gcfg
|
| 21 |
import helpers.file_manager as filem
|
|
@@ -131,6 +132,23 @@ def reset_api_key():
|
|
| 131 |
st.session_state.api_key_input = ''
|
| 132 |
|
| 133 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 134 |
APP_TEXT = _load_strings()
|
| 135 |
|
| 136 |
# Session variables
|
|
@@ -304,12 +322,22 @@ def set_up_chat_ui():
|
|
| 304 |
for msg in history.messages:
|
| 305 |
st.chat_message(msg.type).code(msg.content, language='json')
|
| 306 |
|
| 307 |
-
|
|
|
|
| 308 |
placeholder=APP_TEXT['chat_placeholder'],
|
| 309 |
max_chars=GlobalConfig.LLM_MODEL_MAX_INPUT_LENGTH,
|
| 310 |
accept_file=True,
|
| 311 |
file_type=['pdf', ],
|
| 312 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 313 |
prompt_text = prompt.text or ''
|
| 314 |
if prompt['files']:
|
| 315 |
# Store uploaded pdf in session state
|
|
|
|
| 16 |
import requests
|
| 17 |
import streamlit as st
|
| 18 |
from dotenv import load_dotenv
|
| 19 |
+
from streamlit_extras.bottom_container import bottom
|
| 20 |
|
| 21 |
import global_config as gcfg
|
| 22 |
import helpers.file_manager as filem
|
|
|
|
| 132 |
st.session_state.api_key_input = ''
|
| 133 |
|
| 134 |
|
| 135 |
+
def reset_chat_history():
|
| 136 |
+
"""
|
| 137 |
+
Clear the chat history and related session state variables.
|
| 138 |
+
"""
|
| 139 |
+
if CHAT_MESSAGES in st.session_state:
|
| 140 |
+
del st.session_state[CHAT_MESSAGES]
|
| 141 |
+
if IS_IT_REFINEMENT in st.session_state:
|
| 142 |
+
del st.session_state[IS_IT_REFINEMENT]
|
| 143 |
+
if ADDITIONAL_INFO in st.session_state:
|
| 144 |
+
del st.session_state[ADDITIONAL_INFO]
|
| 145 |
+
if 'pdf_file' in st.session_state:
|
| 146 |
+
del st.session_state['pdf_file']
|
| 147 |
+
if DOWNLOAD_FILE_KEY in st.session_state:
|
| 148 |
+
del st.session_state[DOWNLOAD_FILE_KEY]
|
| 149 |
+
st.rerun()
|
| 150 |
+
|
| 151 |
+
|
| 152 |
APP_TEXT = _load_strings()
|
| 153 |
|
| 154 |
# Session variables
|
|
|
|
| 322 |
for msg in history.messages:
|
| 323 |
st.chat_message(msg.type).code(msg.content, language='json')
|
| 324 |
|
| 325 |
+
# Chat input at the bottom
|
| 326 |
+
prompt = st.chat_input(
|
| 327 |
placeholder=APP_TEXT['chat_placeholder'],
|
| 328 |
max_chars=GlobalConfig.LLM_MODEL_MAX_INPUT_LENGTH,
|
| 329 |
accept_file=True,
|
| 330 |
file_type=['pdf', ],
|
| 331 |
+
)
|
| 332 |
+
|
| 333 |
+
# Reset button below the chat input using bottom container
|
| 334 |
+
with bottom():
|
| 335 |
+
col1, col2, col3 = st.columns([1, 1, 1])
|
| 336 |
+
with col2:
|
| 337 |
+
if st.button("🔄 Reset Chat", help="Clear chat history and start a new conversation", use_container_width=True):
|
| 338 |
+
reset_chat_history()
|
| 339 |
+
|
| 340 |
+
if prompt:
|
| 341 |
prompt_text = prompt.text or ''
|
| 342 |
if prompt['files']:
|
| 343 |
# Store uploaded pdf in session state
|
requirements.txt
CHANGED
|
@@ -10,6 +10,7 @@ pydantic==2.9.1
|
|
| 10 |
litellm>=1.55.0
|
| 11 |
google-generativeai # ~=0.8.3
|
| 12 |
streamlit==1.44.1
|
|
|
|
| 13 |
|
| 14 |
python-pptx~=1.0.2
|
| 15 |
json5~=0.9.14
|
|
|
|
| 10 |
litellm>=1.55.0
|
| 11 |
google-generativeai # ~=0.8.3
|
| 12 |
streamlit==1.44.1
|
| 13 |
+
streamlit-extras>=0.3.0
|
| 14 |
|
| 15 |
python-pptx~=1.0.2
|
| 16 |
json5~=0.9.14
|