Spaces:
Running
Running
simplify ui and remove unnecessary features
Browse files- removed status indicators (api health, response time, tokens) for cleaner interface
- removed export format options, keeping only markdown export
- simplified code by removing metrics tracking
- maintained keyboard shortcuts for better usability:
* ctrl+enter: send message
* ctrl+l: clear chat
* ctrl+e: export conversation
these changes focus on core functionality and reduce visual clutter while keeping the most useful improvements.
app.py
CHANGED
|
@@ -364,67 +364,14 @@ Model: {MODEL}
|
|
| 364 |
return markdown_content
|
| 365 |
|
| 366 |
|
| 367 |
-
def export_conversation_to_json(history: List[Dict[str, str]]) -> str:
|
| 368 |
-
"""Export conversation history to JSON"""
|
| 369 |
-
if not history:
|
| 370 |
-
return json.dumps({"error": "No conversation to export"})
|
| 371 |
-
|
| 372 |
-
export_data = {
|
| 373 |
-
"metadata": {
|
| 374 |
-
"generated_on": datetime.now().isoformat(),
|
| 375 |
-
"space_name": SPACE_NAME,
|
| 376 |
-
"model": MODEL,
|
| 377 |
-
"language": LANGUAGE,
|
| 378 |
-
"message_count": len([m for m in history if m.get('role') == 'user'])
|
| 379 |
-
},
|
| 380 |
-
"conversation": history
|
| 381 |
-
}
|
| 382 |
-
|
| 383 |
-
return json.dumps(export_data, indent=2, ensure_ascii=False)
|
| 384 |
-
|
| 385 |
-
|
| 386 |
-
def export_conversation_to_pdf(history: List[Dict[str, str]]) -> bytes:
|
| 387 |
-
"""Export conversation history to PDF (simple text-based PDF)"""
|
| 388 |
-
try:
|
| 389 |
-
# Create a simple text-based PDF representation
|
| 390 |
-
# For a proper PDF, you'd need reportlab or similar library
|
| 391 |
-
pdf_content = f"CONVERSATION EXPORT\n"
|
| 392 |
-
pdf_content += f"{'='*50}\n\n"
|
| 393 |
-
pdf_content += f"Generated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n"
|
| 394 |
-
pdf_content += f"Space: {SPACE_NAME}\n"
|
| 395 |
-
pdf_content += f"Model: {MODEL}\n"
|
| 396 |
-
pdf_content += f"Language: {LANGUAGE}\n\n"
|
| 397 |
-
pdf_content += f"{'='*50}\n\n"
|
| 398 |
-
|
| 399 |
-
message_count = 0
|
| 400 |
-
for message in history:
|
| 401 |
-
if isinstance(message, dict):
|
| 402 |
-
role = message.get('role', 'unknown')
|
| 403 |
-
content = message.get('content', '')
|
| 404 |
-
|
| 405 |
-
if role == 'user':
|
| 406 |
-
message_count += 1
|
| 407 |
-
pdf_content += f"USER MESSAGE {message_count}:\n{content}\n\n"
|
| 408 |
-
elif role == 'assistant':
|
| 409 |
-
pdf_content += f"ASSISTANT RESPONSE {message_count}:\n{content}\n\n"
|
| 410 |
-
pdf_content += f"{'-'*50}\n\n"
|
| 411 |
-
|
| 412 |
-
# Convert to bytes for PDF mime type
|
| 413 |
-
return pdf_content.encode('utf-8')
|
| 414 |
-
except Exception as e:
|
| 415 |
-
return f"Error generating PDF: {str(e)}".encode('utf-8')
|
| 416 |
|
| 417 |
|
| 418 |
-
def generate_response(message: str, history: List[Dict[str, str]], files: Optional[List] = None) ->
|
| 419 |
-
"""Generate response using OpenRouter API with file support
|
| 420 |
-
|
| 421 |
-
start_time = datetime.now()
|
| 422 |
-
metrics = {"response_time": 0, "tokens_used": 0, "api_healthy": True}
|
| 423 |
|
| 424 |
# API key validation
|
| 425 |
if not API_KEY:
|
| 426 |
-
|
| 427 |
-
return (f"""π **API Key Required**
|
| 428 |
|
| 429 |
Please configure your OpenRouter API key:
|
| 430 |
1. Go to Settings (βοΈ) in your HuggingFace Space
|
|
@@ -432,7 +379,7 @@ Please configure your OpenRouter API key:
|
|
| 432 |
3. Add secret: **{API_KEY_VAR}**
|
| 433 |
4. Value: Your OpenRouter API key (starts with `sk-or-`)
|
| 434 |
|
| 435 |
-
Get your API key at: https://openrouter.ai/keys"""
|
| 436 |
|
| 437 |
# Process files if provided
|
| 438 |
file_context = ""
|
|
@@ -534,35 +481,22 @@ Get your API key at: https://openrouter.ai/keys""", metrics)
|
|
| 534 |
result = response.json()
|
| 535 |
ai_response = result['choices'][0]['message']['content']
|
| 536 |
|
| 537 |
-
# Calculate metrics
|
| 538 |
-
end_time = datetime.now()
|
| 539 |
-
metrics["response_time"] = int((end_time - start_time).total_seconds() * 1000)
|
| 540 |
-
|
| 541 |
-
# Try to get token usage from response
|
| 542 |
-
usage = result.get('usage', {})
|
| 543 |
-
metrics["tokens_used"] = usage.get('total_tokens', 0)
|
| 544 |
-
metrics["api_healthy"] = True
|
| 545 |
-
|
| 546 |
# Add file notification if files were uploaded
|
| 547 |
if file_notification:
|
| 548 |
ai_response += file_notification
|
| 549 |
|
| 550 |
-
return ai_response
|
| 551 |
else:
|
| 552 |
error_data = response.json()
|
| 553 |
error_message = error_data.get('error', {}).get('message', 'Unknown error')
|
| 554 |
-
|
| 555 |
-
return f"β API Error ({response.status_code}): {error_message}", metrics
|
| 556 |
|
| 557 |
except requests.exceptions.Timeout:
|
| 558 |
-
|
| 559 |
-
return "β° Request timeout (30s limit). Try a shorter message or different model.", metrics
|
| 560 |
except requests.exceptions.ConnectionError:
|
| 561 |
-
|
| 562 |
-
return "π Connection error. Check your internet connection and try again.", metrics
|
| 563 |
except Exception as e:
|
| 564 |
-
|
| 565 |
-
return f"β Error: {str(e)}", metrics
|
| 566 |
|
| 567 |
|
| 568 |
# Chat history for export
|
|
@@ -608,22 +542,9 @@ def create_interface():
|
|
| 608 |
# Access control check
|
| 609 |
has_access = ACCESS_CODE is None # No access code required
|
| 610 |
|
| 611 |
-
with gr.Blocks(title=SPACE_NAME, theme=theme
|
| 612 |
-
.status-indicator {
|
| 613 |
-
padding: 8px 12px;
|
| 614 |
-
border-radius: 4px;
|
| 615 |
-
font-size: 0.9em;
|
| 616 |
-
display: inline-block;
|
| 617 |
-
margin: 2px;
|
| 618 |
-
}
|
| 619 |
-
.status-ok { background-color: #d4edda; color: #155724; }
|
| 620 |
-
.status-error { background-color: #f8d7da; color: #721c24; }
|
| 621 |
-
.status-warning { background-color: #fff3cd; color: #856404; }
|
| 622 |
-
""") as demo:
|
| 623 |
# State for access control
|
| 624 |
access_granted = gr.State(has_access)
|
| 625 |
-
# State for API metrics
|
| 626 |
-
api_metrics = gr.State({"response_time": 0, "tokens_used": 0, "api_healthy": True})
|
| 627 |
|
| 628 |
# Header - always visible
|
| 629 |
gr.Markdown(f"# {SPACE_NAME}")
|
|
@@ -650,21 +571,6 @@ def create_interface():
|
|
| 650 |
with gr.Tabs() as tabs:
|
| 651 |
# Chat Tab
|
| 652 |
with gr.Tab("π¬ Chat"):
|
| 653 |
-
# Status indicators row
|
| 654 |
-
with gr.Row():
|
| 655 |
-
api_status = gr.HTML(
|
| 656 |
-
'<span class="status-indicator status-ok">π’ API: Healthy</span>',
|
| 657 |
-
elem_id="api-status"
|
| 658 |
-
)
|
| 659 |
-
response_time_status = gr.HTML(
|
| 660 |
-
'<span class="status-indicator status-ok">β±οΈ Response Time: 0ms</span>',
|
| 661 |
-
elem_id="response-time"
|
| 662 |
-
)
|
| 663 |
-
tokens_status = gr.HTML(
|
| 664 |
-
'<span class="status-indicator status-ok">π― Tokens: 0</span>',
|
| 665 |
-
elem_id="tokens-used"
|
| 666 |
-
)
|
| 667 |
-
|
| 668 |
# Get examples
|
| 669 |
examples = config.get('examples', [])
|
| 670 |
if isinstance(examples, str):
|
|
@@ -698,13 +604,6 @@ def create_interface():
|
|
| 698 |
size="sm",
|
| 699 |
elem_id="export-btn"
|
| 700 |
)
|
| 701 |
-
# Export format dropdown
|
| 702 |
-
export_format = gr.Dropdown(
|
| 703 |
-
choices=["Markdown", "JSON", "PDF"],
|
| 704 |
-
value="Markdown",
|
| 705 |
-
label="Format",
|
| 706 |
-
scale=1
|
| 707 |
-
)
|
| 708 |
# Hidden file component for actual download
|
| 709 |
export_file = gr.File(
|
| 710 |
visible=False,
|
|
@@ -712,32 +611,22 @@ def create_interface():
|
|
| 712 |
)
|
| 713 |
|
| 714 |
# Export handler
|
| 715 |
-
def prepare_export(chat_history
|
| 716 |
if not chat_history:
|
| 717 |
gr.Warning("No conversation history to export.")
|
| 718 |
return None
|
| 719 |
|
| 720 |
try:
|
|
|
|
|
|
|
|
|
|
| 721 |
space_name_safe = re.sub(r'[^a-zA-Z0-9]+', '_', SPACE_NAME).lower()
|
| 722 |
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
|
|
|
| 723 |
|
| 724 |
-
|
| 725 |
-
|
| 726 |
-
|
| 727 |
-
temp_path = Path(tempfile.gettempdir()) / filename
|
| 728 |
-
temp_path.write_text(content, encoding='utf-8')
|
| 729 |
-
|
| 730 |
-
elif format_choice == "JSON":
|
| 731 |
-
content = export_conversation_to_json(chat_history)
|
| 732 |
-
filename = f"{space_name_safe}_conversation_{timestamp}.json"
|
| 733 |
-
temp_path = Path(tempfile.gettempdir()) / filename
|
| 734 |
-
temp_path.write_text(content, encoding='utf-8')
|
| 735 |
-
|
| 736 |
-
elif format_choice == "PDF":
|
| 737 |
-
content = export_conversation_to_pdf(chat_history)
|
| 738 |
-
filename = f"{space_name_safe}_conversation_{timestamp}.pdf"
|
| 739 |
-
temp_path = Path(tempfile.gettempdir()) / filename
|
| 740 |
-
temp_path.write_bytes(content)
|
| 741 |
|
| 742 |
# Return the file path for download
|
| 743 |
return gr.File(visible=True, value=str(temp_path))
|
|
@@ -747,7 +636,7 @@ def create_interface():
|
|
| 747 |
|
| 748 |
export_trigger_btn.click(
|
| 749 |
prepare_export,
|
| 750 |
-
inputs=[chatbot
|
| 751 |
outputs=[export_file]
|
| 752 |
)
|
| 753 |
|
|
@@ -758,10 +647,10 @@ def create_interface():
|
|
| 758 |
# Chat functionality
|
| 759 |
def respond(message, chat_history, files_state, is_granted):
|
| 760 |
if not is_granted:
|
| 761 |
-
return chat_history, "", is_granted
|
| 762 |
|
| 763 |
if not message:
|
| 764 |
-
return chat_history, "", is_granted
|
| 765 |
|
| 766 |
# Format history for the generate_response function
|
| 767 |
formatted_history = []
|
|
@@ -769,8 +658,8 @@ def create_interface():
|
|
| 769 |
if isinstance(h, dict):
|
| 770 |
formatted_history.append(h)
|
| 771 |
|
| 772 |
-
# Get response
|
| 773 |
-
response
|
| 774 |
|
| 775 |
# Update chat history
|
| 776 |
chat_history = chat_history + [
|
|
@@ -782,21 +671,11 @@ def create_interface():
|
|
| 782 |
global chat_history_store
|
| 783 |
chat_history_store = chat_history
|
| 784 |
|
| 785 |
-
|
| 786 |
-
api_class = "status-ok" if metrics["api_healthy"] else "status-error"
|
| 787 |
-
api_icon = "π’" if metrics["api_healthy"] else "π΄"
|
| 788 |
-
|
| 789 |
-
time_class = "status-ok" if metrics["response_time"] < 2000 else "status-warning" if metrics["response_time"] < 5000 else "status-error"
|
| 790 |
-
|
| 791 |
-
api_status_html = f'<span class="status-indicator {api_class}">{api_icon} API: {"Healthy" if metrics["api_healthy"] else "Error"}</span>'
|
| 792 |
-
response_time_html = f'<span class="status-indicator {time_class}">β±οΈ Response Time: {metrics["response_time"]}ms</span>'
|
| 793 |
-
tokens_html = f'<span class="status-indicator status-ok">π― Tokens: {metrics["tokens_used"]}</span>'
|
| 794 |
-
|
| 795 |
-
return chat_history, "", is_granted, api_status_html, response_time_html, tokens_html
|
| 796 |
|
| 797 |
# Wire up the interface
|
| 798 |
-
msg.submit(respond, [msg, chatbot, uploaded_files, access_granted], [chatbot, msg, access_granted
|
| 799 |
-
submit_btn.click(respond, [msg, chatbot, uploaded_files, access_granted], [chatbot, msg, access_granted
|
| 800 |
|
| 801 |
def clear_chat():
|
| 802 |
global chat_history_store
|
|
|
|
| 364 |
return markdown_content
|
| 365 |
|
| 366 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 367 |
|
| 368 |
|
| 369 |
+
def generate_response(message: str, history: List[Dict[str, str]], files: Optional[List] = None) -> str:
|
| 370 |
+
"""Generate response using OpenRouter API with file support"""
|
|
|
|
|
|
|
|
|
|
| 371 |
|
| 372 |
# API key validation
|
| 373 |
if not API_KEY:
|
| 374 |
+
return f"""π **API Key Required**
|
|
|
|
| 375 |
|
| 376 |
Please configure your OpenRouter API key:
|
| 377 |
1. Go to Settings (βοΈ) in your HuggingFace Space
|
|
|
|
| 379 |
3. Add secret: **{API_KEY_VAR}**
|
| 380 |
4. Value: Your OpenRouter API key (starts with `sk-or-`)
|
| 381 |
|
| 382 |
+
Get your API key at: https://openrouter.ai/keys"""
|
| 383 |
|
| 384 |
# Process files if provided
|
| 385 |
file_context = ""
|
|
|
|
| 481 |
result = response.json()
|
| 482 |
ai_response = result['choices'][0]['message']['content']
|
| 483 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 484 |
# Add file notification if files were uploaded
|
| 485 |
if file_notification:
|
| 486 |
ai_response += file_notification
|
| 487 |
|
| 488 |
+
return ai_response
|
| 489 |
else:
|
| 490 |
error_data = response.json()
|
| 491 |
error_message = error_data.get('error', {}).get('message', 'Unknown error')
|
| 492 |
+
return f"β API Error ({response.status_code}): {error_message}"
|
|
|
|
| 493 |
|
| 494 |
except requests.exceptions.Timeout:
|
| 495 |
+
return "β° Request timeout (30s limit). Try a shorter message or different model."
|
|
|
|
| 496 |
except requests.exceptions.ConnectionError:
|
| 497 |
+
return "π Connection error. Check your internet connection and try again."
|
|
|
|
| 498 |
except Exception as e:
|
| 499 |
+
return f"β Error: {str(e)}"
|
|
|
|
| 500 |
|
| 501 |
|
| 502 |
# Chat history for export
|
|
|
|
| 542 |
# Access control check
|
| 543 |
has_access = ACCESS_CODE is None # No access code required
|
| 544 |
|
| 545 |
+
with gr.Blocks(title=SPACE_NAME, theme=theme) as demo:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 546 |
# State for access control
|
| 547 |
access_granted = gr.State(has_access)
|
|
|
|
|
|
|
| 548 |
|
| 549 |
# Header - always visible
|
| 550 |
gr.Markdown(f"# {SPACE_NAME}")
|
|
|
|
| 571 |
with gr.Tabs() as tabs:
|
| 572 |
# Chat Tab
|
| 573 |
with gr.Tab("π¬ Chat"):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 574 |
# Get examples
|
| 575 |
examples = config.get('examples', [])
|
| 576 |
if isinstance(examples, str):
|
|
|
|
| 604 |
size="sm",
|
| 605 |
elem_id="export-btn"
|
| 606 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 607 |
# Hidden file component for actual download
|
| 608 |
export_file = gr.File(
|
| 609 |
visible=False,
|
|
|
|
| 611 |
)
|
| 612 |
|
| 613 |
# Export handler
|
| 614 |
+
def prepare_export(chat_history):
|
| 615 |
if not chat_history:
|
| 616 |
gr.Warning("No conversation history to export.")
|
| 617 |
return None
|
| 618 |
|
| 619 |
try:
|
| 620 |
+
content = export_conversation_to_markdown(chat_history)
|
| 621 |
+
|
| 622 |
+
# Create filename
|
| 623 |
space_name_safe = re.sub(r'[^a-zA-Z0-9]+', '_', SPACE_NAME).lower()
|
| 624 |
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
| 625 |
+
filename = f"{space_name_safe}_conversation_{timestamp}.md"
|
| 626 |
|
| 627 |
+
# Save to temp file
|
| 628 |
+
temp_path = Path(tempfile.gettempdir()) / filename
|
| 629 |
+
temp_path.write_text(content, encoding='utf-8')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 630 |
|
| 631 |
# Return the file path for download
|
| 632 |
return gr.File(visible=True, value=str(temp_path))
|
|
|
|
| 636 |
|
| 637 |
export_trigger_btn.click(
|
| 638 |
prepare_export,
|
| 639 |
+
inputs=[chatbot],
|
| 640 |
outputs=[export_file]
|
| 641 |
)
|
| 642 |
|
|
|
|
| 647 |
# Chat functionality
|
| 648 |
def respond(message, chat_history, files_state, is_granted):
|
| 649 |
if not is_granted:
|
| 650 |
+
return chat_history, "", is_granted
|
| 651 |
|
| 652 |
if not message:
|
| 653 |
+
return chat_history, "", is_granted
|
| 654 |
|
| 655 |
# Format history for the generate_response function
|
| 656 |
formatted_history = []
|
|
|
|
| 658 |
if isinstance(h, dict):
|
| 659 |
formatted_history.append(h)
|
| 660 |
|
| 661 |
+
# Get response
|
| 662 |
+
response = generate_response(message, formatted_history, files_state)
|
| 663 |
|
| 664 |
# Update chat history
|
| 665 |
chat_history = chat_history + [
|
|
|
|
| 671 |
global chat_history_store
|
| 672 |
chat_history_store = chat_history
|
| 673 |
|
| 674 |
+
return chat_history, "", is_granted
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 675 |
|
| 676 |
# Wire up the interface
|
| 677 |
+
msg.submit(respond, [msg, chatbot, uploaded_files, access_granted], [chatbot, msg, access_granted])
|
| 678 |
+
submit_btn.click(respond, [msg, chatbot, uploaded_files, access_granted], [chatbot, msg, access_granted])
|
| 679 |
|
| 680 |
def clear_chat():
|
| 681 |
global chat_history_store
|