barunsaha commited on
Commit
dfcf0f2
·
1 Parent(s): a0d261f

Remove unused code and modules

Browse files
Files changed (1) hide show
  1. app.py +5 -16
app.py CHANGED
@@ -9,7 +9,6 @@ import random
9
  import sys
10
 
11
  import httpx
12
- import huggingface_hub
13
  import json5
14
  import ollama
15
  import requests
@@ -39,6 +38,7 @@ DOWNLOAD_FILE_KEY = 'download_file_name'
39
  IS_IT_REFINEMENT = 'is_it_refinement'
40
  ADDITIONAL_INFO = 'additional_info'
41
  PDF_FILE_KEY = 'pdf_file'
 
42
 
43
  TEXTS = list(GlobalConfig.PPTX_TEMPLATE_FILES.keys())
44
  CAPTIONS = [GlobalConfig.PPTX_TEMPLATE_FILES[x]['caption'] for x in TEXTS]
@@ -264,15 +264,15 @@ with st.sidebar:
264
  default_api_key = os.getenv(env_key_name, "") if env_key_name else ""
265
 
266
  # Always sync session state to env value if needed (autofill on provider change)
267
- if default_api_key and st.session_state.get('api_key_input', None) != default_api_key:
268
- st.session_state['api_key_input'] = default_api_key
269
 
270
  api_key_token = st.text_input(
271
  label=(
272
  '3: Paste your API key/access token:\n\n'
273
  '*Mandatory* for all providers.'
274
  ),
275
- key='api_key_input',
276
  type='password',
277
  disabled=bool(default_api_key),
278
  )
@@ -350,11 +350,6 @@ def set_up_chat_ui():
350
  st.chat_message('ai').write(random.choice(APP_TEXT['ai_greetings']))
351
 
352
  history = StreamlitChatMessageHistory(key=CHAT_MESSAGES)
353
- # prompt_template = chat_helper.ChatPromptTemplate.from_template(
354
- # _get_prompt_template(
355
- # is_refinement=_is_it_refinement()
356
- # )
357
- # )
358
 
359
  # Since Streamlit app reloads at every interaction, display the chat history
360
  # from the save session state
@@ -445,7 +440,7 @@ def set_up_chat_ui():
445
  st.chat_message('ai').code(slide_generator.last_response, language='json')
446
  _display_download_button(path)
447
  else:
448
- handle_error("Failed to generate slide deck.", True)
449
 
450
  except (httpx.ConnectError, requests.exceptions.ConnectionError):
451
  handle_error(
@@ -455,12 +450,6 @@ def set_up_chat_ui():
455
  ' using Ollama, make sure that Ollama is already running on your system.',
456
  True
457
  )
458
- except huggingface_hub.errors.ValidationError as ve:
459
- handle_error(
460
- f'An error occurred while trying to generate the content: {ve}'
461
- '\nPlease try again with a significantly shorter input text.',
462
- True
463
- )
464
  except ollama.ResponseError:
465
  handle_error(
466
  'The model is unavailable with Ollama on your system.'
 
9
  import sys
10
 
11
  import httpx
 
12
  import json5
13
  import ollama
14
  import requests
 
38
  IS_IT_REFINEMENT = 'is_it_refinement'
39
  ADDITIONAL_INFO = 'additional_info'
40
  PDF_FILE_KEY = 'pdf_file'
41
+ API_INPUT_KEY = 'api_key_input'
42
 
43
  TEXTS = list(GlobalConfig.PPTX_TEMPLATE_FILES.keys())
44
  CAPTIONS = [GlobalConfig.PPTX_TEMPLATE_FILES[x]['caption'] for x in TEXTS]
 
264
  default_api_key = os.getenv(env_key_name, "") if env_key_name else ""
265
 
266
  # Always sync session state to env value if needed (autofill on provider change)
267
+ if default_api_key and st.session_state.get(API_INPUT_KEY, None) != default_api_key:
268
+ st.session_state[API_INPUT_KEY] = default_api_key
269
 
270
  api_key_token = st.text_input(
271
  label=(
272
  '3: Paste your API key/access token:\n\n'
273
  '*Mandatory* for all providers.'
274
  ),
275
+ key=API_INPUT_KEY,
276
  type='password',
277
  disabled=bool(default_api_key),
278
  )
 
350
  st.chat_message('ai').write(random.choice(APP_TEXT['ai_greetings']))
351
 
352
  history = StreamlitChatMessageHistory(key=CHAT_MESSAGES)
 
 
 
 
 
353
 
354
  # Since Streamlit app reloads at every interaction, display the chat history
355
  # from the save session state
 
440
  st.chat_message('ai').code(slide_generator.last_response, language='json')
441
  _display_download_button(path)
442
  else:
443
+ handle_error('Failed to generate slide deck.', True)
444
 
445
  except (httpx.ConnectError, requests.exceptions.ConnectionError):
446
  handle_error(
 
450
  ' using Ollama, make sure that Ollama is already running on your system.',
451
  True
452
  )
 
 
 
 
 
 
453
  except ollama.ResponseError:
454
  handle_error(
455
  'The model is unavailable with Ollama on your system.'