Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -406,13 +406,13 @@ def create_ui() -> gr.Blocks:
|
|
| 406 |
# Progress bar for batch analysis
|
| 407 |
with gr.Row():
|
| 408 |
analysis_progress = gr.Progress()
|
| 409 |
-
progress_display = gr.Textbox(
|
| 410 |
-
|
| 411 |
-
|
| 412 |
-
|
| 413 |
-
|
| 414 |
-
|
| 415 |
-
)
|
| 416 |
|
| 417 |
with gr.Row(equal_height=True):
|
| 418 |
# with gr.Column():
|
|
@@ -422,13 +422,14 @@ def create_ui() -> gr.Blocks:
|
|
| 422 |
# show_copy_button=True,
|
| 423 |
# info="Raw content extracted from the repository"
|
| 424 |
# )
|
| 425 |
-
with gr.Column():
|
| 426 |
-
|
| 427 |
-
|
| 428 |
-
|
| 429 |
-
|
| 430 |
-
|
| 431 |
-
|
|
|
|
| 432 |
|
| 433 |
gr.Markdown("### π Results Dashboard")
|
| 434 |
gr.Markdown("π‘ **Tip:** Click on any repository name to explore it in detail!")
|
|
@@ -559,12 +560,12 @@ def create_ui() -> gr.Blocks:
|
|
| 559 |
requirements = "\n".join([f"- {msg}" for msg in user_messages if msg.strip()])
|
| 560 |
return requirements
|
| 561 |
|
| 562 |
-
def handle_analyze_next(repo_ids: List[str], current_idx: int, user_requirements: str) -> Tuple[
|
| 563 |
"""Analyzes the next repository in the list."""
|
| 564 |
if not repo_ids:
|
| 565 |
-
return
|
| 566 |
if current_idx >= len(repo_ids):
|
| 567 |
-
return
|
| 568 |
|
| 569 |
repo_id_to_analyze = repo_ids[current_idx]
|
| 570 |
status = f"Status: Analyzing repository {current_idx + 1}/{len(repo_ids)}: {repo_id_to_analyze}"
|
|
@@ -577,7 +578,7 @@ def create_ui() -> gr.Blocks:
|
|
| 577 |
if next_idx >= len(repo_ids):
|
| 578 |
status += "\n\nFinished all analyses."
|
| 579 |
|
| 580 |
-
return
|
| 581 |
|
| 582 |
def handle_user_message(user_message: str, history: List[Dict[str, str]]) -> Tuple[List[Dict[str, str]], str]:
|
| 583 |
"""Appends the user's message to the history, preparing for the bot's response."""
|
|
@@ -675,19 +676,17 @@ def create_ui() -> gr.Blocks:
|
|
| 675 |
|
| 676 |
return "", gr.update(visible=False), gr.update()
|
| 677 |
|
| 678 |
-
def handle_analyze_all_repos(repo_ids: List[str], user_requirements: str, progress=gr.Progress()) -> Tuple[pd.DataFrame, str
|
| 679 |
"""Analyzes all repositories in the CSV file with progress tracking."""
|
| 680 |
if not repo_ids:
|
| 681 |
-
return pd.DataFrame(), "Status: No repositories to analyze. Please submit repo IDs first."
|
| 682 |
|
| 683 |
total_repos = len(repo_ids)
|
| 684 |
-
progress_text = f"Starting batch analysis of {total_repos} repositories..."
|
| 685 |
|
| 686 |
try:
|
| 687 |
# Start the progress tracking
|
| 688 |
progress(0, desc="Initializing batch analysis...")
|
| 689 |
|
| 690 |
-
all_summaries = []
|
| 691 |
successful_analyses = 0
|
| 692 |
failed_analyses = 0
|
| 693 |
csv_update_failures = 0
|
|
@@ -718,7 +717,6 @@ def create_ui() -> gr.Blocks:
|
|
| 718 |
break
|
| 719 |
|
| 720 |
if repo_updated:
|
| 721 |
-
all_summaries.append(f"β
{repo_id}: Analysis completed & CSV updated")
|
| 722 |
successful_analyses += 1
|
| 723 |
else:
|
| 724 |
# CSV update failed - try once more
|
|
@@ -747,10 +745,8 @@ def create_ui() -> gr.Blocks:
|
|
| 747 |
break
|
| 748 |
|
| 749 |
if retry_success:
|
| 750 |
-
all_summaries.append(f"β
{repo_id}: Analysis completed & CSV updated (retry)")
|
| 751 |
successful_analyses += 1
|
| 752 |
else:
|
| 753 |
-
all_summaries.append(f"β οΈ {repo_id}: Analysis completed but CSV update failed")
|
| 754 |
csv_update_failures += 1
|
| 755 |
|
| 756 |
# Longer delay to prevent file conflicts
|
|
@@ -758,7 +754,6 @@ def create_ui() -> gr.Blocks:
|
|
| 758 |
|
| 759 |
except Exception as e:
|
| 760 |
logger.error(f"Error analyzing {repo_id}: {e}")
|
| 761 |
-
all_summaries.append(f"β {repo_id}: Error - {str(e)[:100]}...")
|
| 762 |
failed_analyses += 1
|
| 763 |
# Still wait to prevent rapid failures
|
| 764 |
time.sleep(0.2)
|
|
@@ -771,34 +766,23 @@ def create_ui() -> gr.Blocks:
|
|
| 771 |
if csv_update_failures > 0:
|
| 772 |
final_status += f"\nβ οΈ CSV Update Issues: {csv_update_failures}/{total_repos}"
|
| 773 |
|
| 774 |
-
# Create progress summary
|
| 775 |
-
progress_summary = "\n".join(all_summaries[-15:]) # Show last 15 entries
|
| 776 |
-
if len(all_summaries) > 15:
|
| 777 |
-
progress_summary = f"... (showing last 15 of {len(all_summaries)} repositories)\n" + progress_summary
|
| 778 |
-
|
| 779 |
# Get final updated dataframe
|
| 780 |
updated_df = read_csv_to_dataframe()
|
| 781 |
|
| 782 |
logger.info(f"Batch analysis completed: {successful_analyses} successful, {failed_analyses} failed, {csv_update_failures} CSV update issues")
|
| 783 |
-
return updated_df, final_status
|
| 784 |
|
| 785 |
except Exception as e:
|
| 786 |
logger.error(f"Error in batch analysis: {e}")
|
| 787 |
error_status = f"β Batch analysis failed: {e}"
|
| 788 |
-
return read_csv_to_dataframe(), error_status
|
| 789 |
|
| 790 |
def handle_visit_repo(repo_id: str) -> Tuple[Any, str]:
|
| 791 |
"""Handle visiting the Hugging Face Space for the repository."""
|
| 792 |
if repo_id and repo_id.strip():
|
| 793 |
hf_url = f"https://huggingface.co/spaces/{repo_id.strip()}"
|
| 794 |
logger.info(f"User chose to visit: {hf_url}")
|
| 795 |
-
|
| 796 |
-
js_code = f"""
|
| 797 |
-
<script>
|
| 798 |
-
window.open('{hf_url}', '_blank');
|
| 799 |
-
</script>
|
| 800 |
-
"""
|
| 801 |
-
return gr.update(visible=False), f"π Opening: {hf_url}"
|
| 802 |
return gr.update(visible=False), ""
|
| 803 |
|
| 804 |
def handle_explore_repo(repo_id: str) -> Tuple[Any, Any, str]:
|
|
@@ -836,18 +820,15 @@ def create_ui() -> gr.Blocks:
|
|
| 836 |
analyze_next_btn.click(
|
| 837 |
fn=handle_analyze_next,
|
| 838 |
inputs=[repo_ids_state, current_repo_idx_state, user_requirements_state],
|
| 839 |
-
outputs=[
|
| 840 |
)
|
| 841 |
analyze_all_btn.click(
|
| 842 |
-
fn=lambda:
|
| 843 |
-
outputs=[
|
| 844 |
).then(
|
| 845 |
fn=handle_analyze_all_repos,
|
| 846 |
inputs=[repo_ids_state, user_requirements_state],
|
| 847 |
-
outputs=[df_output, status_box_analysis
|
| 848 |
-
).then(
|
| 849 |
-
fn=lambda: gr.update(visible=True), # Keep progress display visible with results
|
| 850 |
-
outputs=[progress_display]
|
| 851 |
)
|
| 852 |
|
| 853 |
# Chatbot Tab
|
|
@@ -891,7 +872,8 @@ def create_ui() -> gr.Blocks:
|
|
| 891 |
visit_repo_btn.click(
|
| 892 |
fn=handle_visit_repo,
|
| 893 |
inputs=[selected_repo_display],
|
| 894 |
-
outputs=[repo_action_modal, selected_repo_display]
|
|
|
|
| 895 |
)
|
| 896 |
explore_repo_btn.click(
|
| 897 |
fn=handle_explore_repo,
|
|
|
|
| 406 |
# Progress bar for batch analysis
|
| 407 |
with gr.Row():
|
| 408 |
analysis_progress = gr.Progress()
|
| 409 |
+
# progress_display = gr.Textbox(
|
| 410 |
+
# label="π Batch Analysis Progress",
|
| 411 |
+
# interactive=False,
|
| 412 |
+
# lines=2,
|
| 413 |
+
# visible=False,
|
| 414 |
+
# info="Shows progress when analyzing all repositories"
|
| 415 |
+
# )
|
| 416 |
|
| 417 |
with gr.Row(equal_height=True):
|
| 418 |
# with gr.Column():
|
|
|
|
| 422 |
# show_copy_button=True,
|
| 423 |
# info="Raw content extracted from the repository"
|
| 424 |
# )
|
| 425 |
+
# with gr.Column():
|
| 426 |
+
# summary_output = gr.Textbox(
|
| 427 |
+
# label="π― AI Analysis Summary",
|
| 428 |
+
# lines=20,
|
| 429 |
+
# show_copy_button=True,
|
| 430 |
+
# info="Detailed analysis and insights from AI"
|
| 431 |
+
# )
|
| 432 |
+
pass
|
| 433 |
|
| 434 |
gr.Markdown("### π Results Dashboard")
|
| 435 |
gr.Markdown("π‘ **Tip:** Click on any repository name to explore it in detail!")
|
|
|
|
| 560 |
requirements = "\n".join([f"- {msg}" for msg in user_messages if msg.strip()])
|
| 561 |
return requirements
|
| 562 |
|
| 563 |
+
def handle_analyze_next(repo_ids: List[str], current_idx: int, user_requirements: str) -> Tuple[pd.DataFrame, int, str]:
|
| 564 |
"""Analyzes the next repository in the list."""
|
| 565 |
if not repo_ids:
|
| 566 |
+
return pd.DataFrame(), 0, "Status: No repositories to analyze. Please submit repo IDs first."
|
| 567 |
if current_idx >= len(repo_ids):
|
| 568 |
+
return read_csv_to_dataframe(), current_idx, "Status: All repositories have been analyzed."
|
| 569 |
|
| 570 |
repo_id_to_analyze = repo_ids[current_idx]
|
| 571 |
status = f"Status: Analyzing repository {current_idx + 1}/{len(repo_ids)}: {repo_id_to_analyze}"
|
|
|
|
| 578 |
if next_idx >= len(repo_ids):
|
| 579 |
status += "\n\nFinished all analyses."
|
| 580 |
|
| 581 |
+
return df, next_idx, status
|
| 582 |
|
| 583 |
def handle_user_message(user_message: str, history: List[Dict[str, str]]) -> Tuple[List[Dict[str, str]], str]:
|
| 584 |
"""Appends the user's message to the history, preparing for the bot's response."""
|
|
|
|
| 676 |
|
| 677 |
return "", gr.update(visible=False), gr.update()
|
| 678 |
|
| 679 |
+
def handle_analyze_all_repos(repo_ids: List[str], user_requirements: str, progress=gr.Progress()) -> Tuple[pd.DataFrame, str]:
|
| 680 |
"""Analyzes all repositories in the CSV file with progress tracking."""
|
| 681 |
if not repo_ids:
|
| 682 |
+
return pd.DataFrame(), "Status: No repositories to analyze. Please submit repo IDs first."
|
| 683 |
|
| 684 |
total_repos = len(repo_ids)
|
|
|
|
| 685 |
|
| 686 |
try:
|
| 687 |
# Start the progress tracking
|
| 688 |
progress(0, desc="Initializing batch analysis...")
|
| 689 |
|
|
|
|
| 690 |
successful_analyses = 0
|
| 691 |
failed_analyses = 0
|
| 692 |
csv_update_failures = 0
|
|
|
|
| 717 |
break
|
| 718 |
|
| 719 |
if repo_updated:
|
|
|
|
| 720 |
successful_analyses += 1
|
| 721 |
else:
|
| 722 |
# CSV update failed - try once more
|
|
|
|
| 745 |
break
|
| 746 |
|
| 747 |
if retry_success:
|
|
|
|
| 748 |
successful_analyses += 1
|
| 749 |
else:
|
|
|
|
| 750 |
csv_update_failures += 1
|
| 751 |
|
| 752 |
# Longer delay to prevent file conflicts
|
|
|
|
| 754 |
|
| 755 |
except Exception as e:
|
| 756 |
logger.error(f"Error analyzing {repo_id}: {e}")
|
|
|
|
| 757 |
failed_analyses += 1
|
| 758 |
# Still wait to prevent rapid failures
|
| 759 |
time.sleep(0.2)
|
|
|
|
| 766 |
if csv_update_failures > 0:
|
| 767 |
final_status += f"\nβ οΈ CSV Update Issues: {csv_update_failures}/{total_repos}"
|
| 768 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 769 |
# Get final updated dataframe
|
| 770 |
updated_df = read_csv_to_dataframe()
|
| 771 |
|
| 772 |
logger.info(f"Batch analysis completed: {successful_analyses} successful, {failed_analyses} failed, {csv_update_failures} CSV update issues")
|
| 773 |
+
return updated_df, final_status
|
| 774 |
|
| 775 |
except Exception as e:
|
| 776 |
logger.error(f"Error in batch analysis: {e}")
|
| 777 |
error_status = f"β Batch analysis failed: {e}"
|
| 778 |
+
return read_csv_to_dataframe(), error_status
|
| 779 |
|
| 780 |
def handle_visit_repo(repo_id: str) -> Tuple[Any, str]:
|
| 781 |
"""Handle visiting the Hugging Face Space for the repository."""
|
| 782 |
if repo_id and repo_id.strip():
|
| 783 |
hf_url = f"https://huggingface.co/spaces/{repo_id.strip()}"
|
| 784 |
logger.info(f"User chose to visit: {hf_url}")
|
| 785 |
+
return gr.update(visible=False), hf_url
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 786 |
return gr.update(visible=False), ""
|
| 787 |
|
| 788 |
def handle_explore_repo(repo_id: str) -> Tuple[Any, Any, str]:
|
|
|
|
| 820 |
analyze_next_btn.click(
|
| 821 |
fn=handle_analyze_next,
|
| 822 |
inputs=[repo_ids_state, current_repo_idx_state, user_requirements_state],
|
| 823 |
+
outputs=[df_output, current_repo_idx_state, status_box_analysis]
|
| 824 |
)
|
| 825 |
analyze_all_btn.click(
|
| 826 |
+
fn=lambda: None, # No need to show progress display since it's commented out
|
| 827 |
+
outputs=[]
|
| 828 |
).then(
|
| 829 |
fn=handle_analyze_all_repos,
|
| 830 |
inputs=[repo_ids_state, user_requirements_state],
|
| 831 |
+
outputs=[df_output, status_box_analysis]
|
|
|
|
|
|
|
|
|
|
| 832 |
)
|
| 833 |
|
| 834 |
# Chatbot Tab
|
|
|
|
| 872 |
visit_repo_btn.click(
|
| 873 |
fn=handle_visit_repo,
|
| 874 |
inputs=[selected_repo_display],
|
| 875 |
+
outputs=[repo_action_modal, selected_repo_display],
|
| 876 |
+
js="(repo_id) => { if(repo_id && repo_id.trim()) { window.open('https://huggingface.co/spaces/' + repo_id.trim(), '_blank'); } }"
|
| 877 |
)
|
| 878 |
explore_repo_btn.click(
|
| 879 |
fn=handle_explore_repo,
|