| #!/usr/bin/env python3 | |
| """ | |
| MIMO - HuggingFace Spaces Entry Point | |
| Clean version with all dependencies pre-installed during build | |
| """ | |
| # CRITICAL: Import spaces FIRST before any CUDA initialization | |
| # This must be the very first import to avoid CUDA initialization conflicts | |
| try: | |
| import spaces | |
| HAS_SPACES = True | |
| print("β HF Spaces GPU support available") | |
| except ImportError: | |
| HAS_SPACES = False | |
| print("β οΈ spaces package not available") | |
| import os | |
| import sys | |
| import gradio as gr | |
| print("π MIMO HuggingFace Spaces starting...") | |
| print(f"π Python: {sys.version}") | |
| print(f"π Working dir: {os.getcwd()}") | |
| # Import the complete MIMO implementation | |
| try: | |
| from app_hf_spaces import CompleteMIMO, gradio_interface | |
| print("β Successfully imported MIMO modules") | |
| except ImportError as e: | |
| print(f"β Import error: {e}") | |
| import traceback | |
| traceback.print_exc() | |
| raise | |
| # HuggingFace Spaces GPU decorator | |
| if HAS_SPACES: | |
| def warmup(): | |
| """GPU warmup for HF Spaces detection""" | |
| import torch | |
| if torch.cuda.is_available(): | |
| x = torch.randn(1, device='cuda') | |
| return f"GPU: {torch.cuda.get_device_name()}" | |
| return "CPU mode" | |
| else: | |
| warmup = lambda: "CPU mode" | |
| # Launch the Gradio interface | |
| if __name__ == "__main__": | |
| print("π¬ Creating MIMO interface...") | |
| # Create the interface | |
| demo = gradio_interface() | |
| print("π Launching web server...") | |
| demo.queue(max_size=20) | |
| demo.launch( | |
| server_name="0.0.0.0", | |
| server_port=7860, | |
| share=False, | |
| show_error=True | |
| ) | |