Spaces:
Running
Running
darkisz
commited on
Update backendv1.py
Browse files- backendv1.py +3 -2
backendv1.py
CHANGED
|
@@ -299,7 +299,7 @@ def initialize_backend():
|
|
| 299 |
custom_words = ["dunaelektronika", "kft", "outsourcing", "dell", "lenovo", "nis2", "szerver", "kliens", "hálózati", "hpe"]
|
| 300 |
spell_checker.word_frequency.load_words(custom_words)
|
| 301 |
except Exception as e:
|
| 302 |
-
print(f"{RED}Helyesírás-
|
| 303 |
|
| 304 |
try:
|
| 305 |
print(f"{CYAN}Elasticsearch kliens inicializálása...{RESET}")
|
|
@@ -323,6 +323,7 @@ def initialize_backend():
|
|
| 323 |
return backend_objects
|
| 324 |
except Exception as e:
|
| 325 |
print(f"{RED}Hiba a backend inicializálása során: {e}{RESET}")
|
|
|
|
| 326 |
return None
|
| 327 |
|
| 328 |
def process_query(user_question, chat_history, backend, confidence_threshold, fallback_message):
|
|
@@ -359,4 +360,4 @@ KONTEXTUS:
|
|
| 359 |
|
| 360 |
answer = generate_answer_with_history(backend["llm_client"], CONFIG["TOGETHER_MODEL_NAME"], messages_for_llm, CONFIG["GENERATION_TEMPERATURE"])
|
| 361 |
|
| 362 |
-
return {"answer": answer, "sources": sources, "corrected_question": corrected_question, "confidence_score": confidence_score}
|
|
|
|
| 299 |
custom_words = ["dunaelektronika", "kft", "outsourcing", "dell", "lenovo", "nis2", "szerver", "kliens", "hálózati", "hpe"]
|
| 300 |
spell_checker.word_frequency.load_words(custom_words)
|
| 301 |
except Exception as e:
|
| 302 |
+
print(f"{RED}Helyesírás-ellenőrző hiba: {e}{RESET}")
|
| 303 |
|
| 304 |
try:
|
| 305 |
print(f"{CYAN}Elasticsearch kliens inicializálása...{RESET}")
|
|
|
|
| 323 |
return backend_objects
|
| 324 |
except Exception as e:
|
| 325 |
print(f"{RED}Hiba a backend inicializálása során: {e}{RESET}")
|
| 326 |
+
traceback.print_exc()
|
| 327 |
return None
|
| 328 |
|
| 329 |
def process_query(user_question, chat_history, backend, confidence_threshold, fallback_message):
|
|
|
|
| 360 |
|
| 361 |
answer = generate_answer_with_history(backend["llm_client"], CONFIG["TOGETHER_MODEL_NAME"], messages_for_llm, CONFIG["GENERATION_TEMPERATURE"])
|
| 362 |
|
| 363 |
+
return {"answer": answer, "sources": sources, "corrected_question": corrected_question, "confidence_score": confidence_score}
|