Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -570,7 +570,7 @@ def handle_query(user_question, chatbot, audio=None):
|
|
| 570 |
return "A query is already being processed. Please stop it before starting a new one."
|
| 571 |
print(user_question)
|
| 572 |
# Start the processing in a new thread
|
| 573 |
-
future = executor.submit(
|
| 574 |
print(future)
|
| 575 |
|
| 576 |
# Check if the process is done or cancelled
|
|
@@ -578,10 +578,10 @@ def handle_query(user_question, chatbot, audio=None):
|
|
| 578 |
if future.cancelled():
|
| 579 |
return "Processing was cancelled."
|
| 580 |
try:
|
| 581 |
-
|
| 582 |
-
|
| 583 |
-
|
| 584 |
-
|
| 585 |
except Exception as e:
|
| 586 |
return f"Error occurred: {e}"
|
| 587 |
else:
|
|
@@ -599,9 +599,122 @@ def stop_processing():
|
|
| 599 |
return "Sorry, there is some issue with the query. Please try after some time."
|
| 600 |
return "No ongoing processing to stop."
|
| 601 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 602 |
|
| 603 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 604 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 605 |
|
| 606 |
global iterations
|
| 607 |
iterations = 0
|
|
|
|
| 570 |
return "A query is already being processed. Please stop it before starting a new one."
|
| 571 |
print(user_question)
|
| 572 |
# Start the processing in a new thread
|
| 573 |
+
future = executor.submit(answer_question_test, user_question, chatbot)
|
| 574 |
print(future)
|
| 575 |
|
| 576 |
# Check if the process is done or cancelled
|
|
|
|
| 578 |
if future.cancelled():
|
| 579 |
return "Processing was cancelled."
|
| 580 |
try:
|
| 581 |
+
user_question1, response_text1 = future.result() # Get the result of the completed future
|
| 582 |
+
chatbot.append((user_question1, response_text1))
|
| 583 |
+
return gr.update(value=chatbot)
|
| 584 |
+
|
| 585 |
except Exception as e:
|
| 586 |
return f"Error occurred: {e}"
|
| 587 |
else:
|
|
|
|
| 599 |
return "Sorry, there is some issue with the query. Please try after some time."
|
| 600 |
return "No ongoing processing to stop."
|
| 601 |
|
| 602 |
+
def answer_question_test(user_question, chatbot, audio=None):
|
| 603 |
+
|
| 604 |
+
global iterations
|
| 605 |
+
iterations = 0
|
| 606 |
+
# Ensure the temporary chart directory exists
|
| 607 |
+
# ensure_temp_chart_dir()
|
| 608 |
+
# Clean the /tmp/gradio/ directory
|
| 609 |
+
# clean_gradio_tmp_dir()
|
| 610 |
+
# Handle audio input if provided
|
| 611 |
+
if audio is not None:
|
| 612 |
+
sample_rate, audio_data = audio
|
| 613 |
+
audio_segment = AudioSegment(
|
| 614 |
+
audio_data.tobytes(),
|
| 615 |
+
frame_rate=sample_rate,
|
| 616 |
+
sample_width=audio_data.dtype.itemsize,
|
| 617 |
+
channels=1
|
| 618 |
+
)
|
| 619 |
+
with tempfile.NamedTemporaryFile(delete=False, suffix=".wav") as temp_audio_file:
|
| 620 |
+
audio_segment.export(temp_audio_file.name, format="wav")
|
| 621 |
+
temp_audio_file_path = temp_audio_file.name
|
| 622 |
|
| 623 |
+
recognizer = sr.Recognizer()
|
| 624 |
+
with sr.AudioFile(temp_audio_file_path) as source:
|
| 625 |
+
audio_content = recognizer.record(source)
|
| 626 |
+
try:
|
| 627 |
+
user_question = recognizer.recognize_google(audio_content)
|
| 628 |
+
except sr.UnknownValueError:
|
| 629 |
+
user_question = "Sorry, I could not understand the audio."
|
| 630 |
+
except sr.RequestError:
|
| 631 |
+
user_question = "Could not request results from Google Speech Recognition service."
|
| 632 |
+
|
| 633 |
+
while iterations < max_iterations:
|
| 634 |
+
|
| 635 |
+
"""if "send email to" in user_question:
|
| 636 |
+
email_match = re.search(r"send email to ([\w\.-]+@[\w\.-]+)", user_question)
|
| 637 |
+
if email_match:
|
| 638 |
+
user_email = email_match.group(1).strip()
|
| 639 |
+
user_question = user_question.replace(f"send email to {user_email}", "").strip()
|
| 640 |
+
user_question = f"{user_question}:{user_email}"
|
| 641 |
+
"""
|
| 642 |
+
|
| 643 |
+
response = agent_executor.invoke({"input": user_question}, config={"callbacks": [langfuse_handler]})
|
| 644 |
+
|
| 645 |
+
if isinstance(response, dict):
|
| 646 |
+
response_text = response.get("output", "")
|
| 647 |
+
else:
|
| 648 |
+
response_text = response
|
| 649 |
+
if "invalid" not in response_text.lower():
|
| 650 |
+
break
|
| 651 |
+
iterations += 1
|
| 652 |
+
|
| 653 |
+
if iterations == max_iterations:
|
| 654 |
+
return "The agent could not generate a valid response within the iteration limit."
|
| 655 |
+
|
| 656 |
+
if os.getenv("IMAGE_PATH") in response_text:
|
| 657 |
+
# Open the image file
|
| 658 |
+
img = Image.open(os.getenv("IMAGE_PATH"))
|
| 659 |
+
|
| 660 |
+
# Convert the PIL Image to a base64 encoded string
|
| 661 |
+
buffered = BytesIO()
|
| 662 |
+
img.save(buffered, format="PNG")
|
| 663 |
+
img_str = base64.b64encode(buffered.getvalue()).decode("utf-8")
|
| 664 |
+
|
| 665 |
+
img = f'<img src="data:image/png;base64,{img_str}" style="width:450px; height:400px;">'
|
| 666 |
+
# image = gr.Image(value=img_str)
|
| 667 |
+
#chatbot.append((user_question, img))
|
| 668 |
|
| 669 |
+
email_pattern = r'[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}'
|
| 670 |
+
match = re.search(email_pattern, user_question)
|
| 671 |
+
if match:
|
| 672 |
+
user_email = match.group() # Return the matched email
|
| 673 |
+
|
| 674 |
+
# email send
|
| 675 |
+
if len(user_email) > 0:
|
| 676 |
+
# Send email with the chart image attached
|
| 677 |
+
send_email_with_attachment_mailjet(
|
| 678 |
+
recipient_email=user_email,
|
| 679 |
+
subject="Warehouse Inventory Report",
|
| 680 |
+
body=response.get("output", "").split(".")[0],
|
| 681 |
+
# attachment_path=chart_path
|
| 682 |
+
attachment_path=img_str)
|
| 683 |
+
|
| 684 |
+
# Send email with the chart image attached
|
| 685 |
+
"""send_email_with_attachment(
|
| 686 |
+
recipient_email=user_email,
|
| 687 |
+
subject="Warehouse Inventory Report",
|
| 688 |
+
body=response.get("output", "").split(":")[0],
|
| 689 |
+
# attachment_path=chart_path
|
| 690 |
+
attachment_path=os.getenv("IMAGE_PATH")
|
| 691 |
+
)"""
|
| 692 |
+
|
| 693 |
+
if "send email to" in user_question:
|
| 694 |
+
try:
|
| 695 |
+
os.remove(img) # Clean up the temporary image file
|
| 696 |
+
except Exception as e:
|
| 697 |
+
print(f"Error cleaning up image file: {e}")
|
| 698 |
+
except Exception as e:
|
| 699 |
+
print(f"Error loading image file: {e}")
|
| 700 |
+
response_text = "Chart generation failed. Please try again."
|
| 701 |
+
#chatbot.append((user_question, "Chart generation failed. Please try again."))
|
| 702 |
+
#return gr.update(value=chatbot)
|
| 703 |
+
return user_question, response_text
|
| 704 |
+
|
| 705 |
+
|
| 706 |
+
# return [(user_question,gr.Image("/home/user/app/exports/charts/temp_chart.png"))]
|
| 707 |
+
# return "/home/user/app/exports/charts/temp_chart.png"
|
| 708 |
+
else:
|
| 709 |
+
#chatbot.append((user_question, response_text))
|
| 710 |
+
#return gr.update(value=chatbot)
|
| 711 |
+
return user_question, response_text
|
| 712 |
+
# response_text = response_text.replace('\n', ' ').replace(' ', ' ').strip()
|
| 713 |
+
# return response_text
|
| 714 |
+
|
| 715 |
+
|
| 716 |
+
|
| 717 |
+
def answer_question(user_question, chatbot, audio=None):
|
| 718 |
|
| 719 |
global iterations
|
| 720 |
iterations = 0
|