Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import pipeline | |
| import requests | |
| import json | |
| import time | |
| import threading | |
| # Load AI models | |
| def load_models(): | |
| models = {} | |
| try: | |
| # Text generation model (using smaller open source alternative) | |
| models["gpt2"] = pipeline("text-generation", model="gpt2") | |
| # Classification models | |
| models["bert-base"] = pipeline("text-classification", model="bert-base-uncased") | |
| models["distilbert"] = pipeline("text-classification", model="distilbert-base-uncased") | |
| # Cybersecurity specific models | |
| models["phishing-bert"] = pipeline( | |
| "text-classification", | |
| model="deepset/bert-base-cased-squad2" # Using a QA model that can be fine-tuned for security | |
| ) | |
| except Exception as e: | |
| print(f"Error loading models: {str(e)}") | |
| # Fallback to at least one working model | |
| models["distilbert"] = pipeline("text-classification", model="distilbert-base-uncased") | |
| return models | |
| # Define functions to interact with AI models | |
| def analyze_text(text, model_name): | |
| if not text.strip(): | |
| return "Please provide some text to analyze." | |
| model = models.get(model_name) | |
| if not model: | |
| return f"Model {model_name} not found. Available models: {', '.join(models.keys())}" | |
| try: | |
| if model_name == "gpt2": | |
| result = model(text, max_length=100, num_return_sequences=1) | |
| return result[0]['generated_text'] | |
| else: | |
| result = model(text) | |
| return str(result) | |
| except Exception as e: | |
| return f"Error analyzing text: {str(e)}" | |
| def analyze_file(file, model_name): | |
| try: | |
| content = file.read().decode("utf-8") | |
| return analyze_text(content, model_name) | |
| except Exception as e: | |
| return f"Error processing file: {str(e)}" | |
| # Real-time monitoring and alerting | |
| alert_thresholds = { | |
| "phishing": 0.8, | |
| "malware": 0.8, | |
| "anomaly": 0.8 | |
| } | |
| def monitor_real_time_data(data_stream, model_name): | |
| if not data_stream.strip(): | |
| return "Please provide a data stream URL or content." | |
| try: | |
| # For demo purposes, we'll analyze the provided text as a single data point | |
| result = analyze_text(data_stream, model_name) | |
| return f"Monitoring result: {result}" | |
| except Exception as e: | |
| return f"Error monitoring data: {str(e)}" | |
| # Load models at startup | |
| models = load_models() | |
| # Gradio interface | |
| def create_gradio_interface(): | |
| with gr.Blocks() as demo: | |
| gr.Markdown("# Cybersecurity AI Platform") | |
| with gr.Tab("Text Analysis"): | |
| text_input = gr.Textbox( | |
| label="Enter text for analysis", | |
| placeholder="Enter text here..." | |
| ) | |
| model_dropdown = gr.Dropdown( | |
| choices=list(models.keys()), | |
| value=list(models.keys())[0], | |
| label="Select AI Model" | |
| ) | |
| text_output = gr.Textbox(label="Analysis Result") | |
| text_button = gr.Button("Analyze Text") | |
| text_button.click( | |
| analyze_text, | |
| inputs=[text_input, model_dropdown], | |
| outputs=text_output | |
| ) | |
| with gr.Tab("File Analysis"): | |
| file_input = gr.File(label="Upload file for analysis") | |
| file_model_dropdown = gr.Dropdown( | |
| choices=list(models.keys()), | |
| value=list(models.keys())[0], | |
| label="Select AI Model" | |
| ) | |
| file_output = gr.Textbox(label="Analysis Result") | |
| file_button = gr.Button("Analyze File") | |
| file_button.click( | |
| analyze_file, | |
| inputs=[file_input, file_model_dropdown], | |
| outputs=file_output | |
| ) | |
| with gr.Tab("Real-time Monitoring"): | |
| stream_input = gr.Textbox( | |
| label="Enter data stream content", | |
| placeholder="Enter data to monitor..." | |
| ) | |
| stream_model_dropdown = gr.Dropdown( | |
| choices=list(models.keys()), | |
| value=list(models.keys())[0], | |
| label="Select AI Model" | |
| ) | |
| stream_output = gr.Textbox(label="Monitoring Result") | |
| stream_button = gr.Button("Start Monitoring") | |
| stream_button.click( | |
| monitor_real_time_data, | |
| inputs=[stream_input, stream_model_dropdown], | |
| outputs=stream_output | |
| ) | |
| return demo | |
| if __name__ == "__main__": | |
| demo = create_gradio_interface() | |
| demo.launch() | |