Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -671,7 +671,8 @@ def create_demo():
|
|
| 671 |
with gr.Column(elem_classes="markdown-style"):
|
| 672 |
gr.Markdown("""
|
| 673 |
# 🤖 OnDevice AI RAG
|
| 674 |
-
#### 📊 RAG:
|
|
|
|
| 675 |
""")
|
| 676 |
|
| 677 |
chatbot = gr.Chatbot(
|
|
@@ -695,7 +696,7 @@ def create_demo():
|
|
| 695 |
with gr.Column(scale=3):
|
| 696 |
msg = gr.Textbox(
|
| 697 |
show_label=False,
|
| 698 |
-
placeholder="
|
| 699 |
container=False,
|
| 700 |
elem_classes="input-textbox",
|
| 701 |
scale=1
|
|
@@ -703,7 +704,7 @@ def create_demo():
|
|
| 703 |
|
| 704 |
with gr.Column(scale=1, min_width=70):
|
| 705 |
send = gr.Button(
|
| 706 |
-
"
|
| 707 |
elem_classes="send-button custom-button",
|
| 708 |
scale=1
|
| 709 |
)
|
|
@@ -715,48 +716,47 @@ def create_demo():
|
|
| 715 |
scale=1
|
| 716 |
)
|
| 717 |
|
| 718 |
-
with gr.Accordion("🎮
|
| 719 |
with gr.Row():
|
| 720 |
with gr.Column(scale=1):
|
| 721 |
temperature = gr.Slider(
|
| 722 |
minimum=0, maximum=1, step=0.1, value=0.8,
|
| 723 |
-
label="
|
| 724 |
)
|
| 725 |
max_new_tokens = gr.Slider(
|
| 726 |
minimum=128, maximum=8000, step=1, value=4000,
|
| 727 |
-
label="
|
| 728 |
)
|
| 729 |
with gr.Column(scale=1):
|
| 730 |
top_p = gr.Slider(
|
| 731 |
minimum=0.0, maximum=1.0, step=0.1, value=0.8,
|
| 732 |
-
label="
|
| 733 |
)
|
| 734 |
top_k = gr.Slider(
|
| 735 |
minimum=1, maximum=20, step=1, value=20,
|
| 736 |
-
label="
|
| 737 |
)
|
| 738 |
penalty = gr.Slider(
|
| 739 |
minimum=0.0, maximum=2.0, step=0.1, value=1.0,
|
| 740 |
-
label="
|
| 741 |
)
|
| 742 |
|
| 743 |
gr.Examples(
|
| 744 |
examples=[
|
| 745 |
-
["
|
| 746 |
-
["
|
| 747 |
-
["
|
| 748 |
-
["
|
| 749 |
],
|
| 750 |
inputs=msg
|
| 751 |
)
|
| 752 |
|
| 753 |
-
# Clear 함수 정의
|
| 754 |
def clear_conversation():
|
| 755 |
global current_file_context
|
| 756 |
current_file_context = None
|
| 757 |
-
return [], None, "
|
| 758 |
|
| 759 |
-
#
|
| 760 |
msg.submit(
|
| 761 |
stream_chat,
|
| 762 |
inputs=[msg, chatbot, file_upload, temperature, max_new_tokens, top_p, top_k, penalty],
|
|
@@ -780,7 +780,7 @@ def create_demo():
|
|
| 780 |
queue=True
|
| 781 |
)
|
| 782 |
|
| 783 |
-
# Clear
|
| 784 |
clear.click(
|
| 785 |
fn=clear_conversation,
|
| 786 |
outputs=[chatbot, file_upload, msg],
|
|
|
|
| 671 |
with gr.Column(elem_classes="markdown-style"):
|
| 672 |
gr.Markdown("""
|
| 673 |
# 🤖 OnDevice AI RAG
|
| 674 |
+
#### 📊 RAG: Upload and Analyze Files (TXT, CSV, PDF, Parquet files)
|
| 675 |
+
Upload your files for data analysis and learning
|
| 676 |
""")
|
| 677 |
|
| 678 |
chatbot = gr.Chatbot(
|
|
|
|
| 696 |
with gr.Column(scale=3):
|
| 697 |
msg = gr.Textbox(
|
| 698 |
show_label=False,
|
| 699 |
+
placeholder="Type your message here... 💭",
|
| 700 |
container=False,
|
| 701 |
elem_classes="input-textbox",
|
| 702 |
scale=1
|
|
|
|
| 704 |
|
| 705 |
with gr.Column(scale=1, min_width=70):
|
| 706 |
send = gr.Button(
|
| 707 |
+
"Send",
|
| 708 |
elem_classes="send-button custom-button",
|
| 709 |
scale=1
|
| 710 |
)
|
|
|
|
| 716 |
scale=1
|
| 717 |
)
|
| 718 |
|
| 719 |
+
with gr.Accordion("🎮 Advanced Settings", open=False):
|
| 720 |
with gr.Row():
|
| 721 |
with gr.Column(scale=1):
|
| 722 |
temperature = gr.Slider(
|
| 723 |
minimum=0, maximum=1, step=0.1, value=0.8,
|
| 724 |
+
label="Creativity Level 🎨"
|
| 725 |
)
|
| 726 |
max_new_tokens = gr.Slider(
|
| 727 |
minimum=128, maximum=8000, step=1, value=4000,
|
| 728 |
+
label="Maximum Token Count 📝"
|
| 729 |
)
|
| 730 |
with gr.Column(scale=1):
|
| 731 |
top_p = gr.Slider(
|
| 732 |
minimum=0.0, maximum=1.0, step=0.1, value=0.8,
|
| 733 |
+
label="Diversity Control 🎯"
|
| 734 |
)
|
| 735 |
top_k = gr.Slider(
|
| 736 |
minimum=1, maximum=20, step=1, value=20,
|
| 737 |
+
label="Selection Range 📊"
|
| 738 |
)
|
| 739 |
penalty = gr.Slider(
|
| 740 |
minimum=0.0, maximum=2.0, step=0.1, value=1.0,
|
| 741 |
+
label="Repetition Penalty 🔄"
|
| 742 |
)
|
| 743 |
|
| 744 |
gr.Examples(
|
| 745 |
examples=[
|
| 746 |
+
["Please analyze this code and suggest improvements:\ndef fibonacci(n):\n if n <= 1: return n\n return fibonacci(n-1) + fibonacci(n-2)"],
|
| 747 |
+
["Please analyze this data and provide insights:\nAnnual Revenue (Million)\n2019: 1200\n2020: 980\n2021: 1450\n2022: 2100\n2023: 1890"],
|
| 748 |
+
["Please solve this math problem step by step: 'When a circle's area is twice that of its inscribed square, find the relationship between the circle's radius and the square's side length.'"],
|
| 749 |
+
["Please analyze this marketing campaign's ROI and suggest improvements:\nTotal Cost: $50,000\nReach: 1M users\nClick Rate: 2.3%\nConversion Rate: 0.8%\nAverage Purchase: $35"],
|
| 750 |
],
|
| 751 |
inputs=msg
|
| 752 |
)
|
| 753 |
|
|
|
|
| 754 |
def clear_conversation():
|
| 755 |
global current_file_context
|
| 756 |
current_file_context = None
|
| 757 |
+
return [], None, "Start a new conversation..."
|
| 758 |
|
| 759 |
+
# Event bindings
|
| 760 |
msg.submit(
|
| 761 |
stream_chat,
|
| 762 |
inputs=[msg, chatbot, file_upload, temperature, max_new_tokens, top_p, top_k, penalty],
|
|
|
|
| 780 |
queue=True
|
| 781 |
)
|
| 782 |
|
| 783 |
+
# Clear button event binding
|
| 784 |
clear.click(
|
| 785 |
fn=clear_conversation,
|
| 786 |
outputs=[chatbot, file_upload, msg],
|