Spaces:
Running
on
Zero
Running
on
Zero
| import os | |
| import sys | |
| import traceback | |
| import warnings | |
| warnings.filterwarnings("ignore") | |
| from ui_manager import UIManager | |
| def preload_models_to_cache(): | |
| """ | |
| Pre-download models to HuggingFace cache before GPU allocation. | |
| This runs on CPU and avoids downloading during @spaces.GPU execution. | |
| """ | |
| if not os.getenv('SPACE_ID'): | |
| return # Skip if not on Spaces | |
| print("📦 Pre-downloading models to cache (CPU only, no GPU usage)...") | |
| try: | |
| from diffusers import ControlNetModel | |
| import torch | |
| # Pre-download ControlNet models to cache | |
| models_to_cache = [ | |
| ("diffusers/controlnet-canny-sdxl-1.0", "Canny ControlNet"), | |
| ("diffusers/controlnet-depth-sdxl-1.0", "Depth ControlNet"), | |
| ] | |
| for model_id, model_name in models_to_cache: | |
| print(f" ⬇️ Downloading {model_name} ({model_id})...") | |
| try: | |
| _ = ControlNetModel.from_pretrained( | |
| model_id, | |
| torch_dtype=torch.float16, | |
| use_safetensors=True, | |
| local_files_only=False # Allow download | |
| ) | |
| print(f" ✅ {model_name} cached") | |
| except Exception as e: | |
| print(f" ⚠️ {model_name} download failed (will retry on-demand): {e}") | |
| print("✅ Model pre-caching complete") | |
| except Exception as e: | |
| print(f"⚠️ Model pre-caching failed: {e}") | |
| print(" Models will be downloaded on first use instead.") | |
| def launch_final_blend_sceneweaver(share: bool = True, debug: bool = False): | |
| """Launch SceneWeaver Application""" | |
| print("🎨 Starting SceneWeaver...") | |
| print("✨ AI-Powered Image Background Generation") | |
| try: | |
| # Pre-download models on Spaces to avoid downloading during GPU time | |
| preload_models_to_cache() | |
| # Test imports first | |
| print("🔍 Testing imports...") | |
| try: | |
| # Test creating UIManager | |
| print("🔍 Creating UIManager instance...") | |
| ui = UIManager() | |
| print("✅ UIManager instance created successfully") | |
| # Note: On Hugging Face Spaces, models are pre-cached at startup | |
| if os.getenv('SPACE_ID'): | |
| print("\n🔧 Detected Hugging Face Spaces environment") | |
| print("⚡ Models pre-cached - ready for fast inference") | |
| print(" Expected inference time: ~300-350s (with cached models)") | |
| print() | |
| # Launch UI | |
| print("🚀 Launching interface...") | |
| interface = ui.launch(share=share, debug=debug) | |
| print("✅ Interface launched successfully") | |
| return interface | |
| except ImportError as import_error: | |
| print(f"❌ Import failed: {import_error}") | |
| print(f"Traceback: {traceback.format_exc()}") | |
| raise | |
| except Exception as e: | |
| print(f"❌ Failed to launch: {e}") | |
| print(f"Full traceback: {traceback.format_exc()}") | |
| raise | |
| def launch_ui(share: bool = True, debug: bool = False): | |
| """Convenience function for Jupyter notebooks""" | |
| return launch_final_blend_sceneweaver(share=share, debug=debug) | |
| def main(): | |
| """Main entry point""" | |
| # Check if running in Jupyter/Colab | |
| try: | |
| get_ipython() | |
| is_jupyter = True | |
| except NameError: | |
| is_jupyter = False | |
| if not is_jupyter and len(sys.argv) > 1 and not any('-f' in arg for arg in sys.argv): | |
| # Command line mode with arguments | |
| share = '--no-share' not in sys.argv | |
| debug = '--debug' in sys.argv | |
| else: | |
| # Default mode | |
| share = True | |
| debug = False | |
| try: | |
| interface = launch_final_blend_sceneweaver(share=share, debug=debug) | |
| if not is_jupyter: | |
| print("🛑 Press Ctrl+C to stop") | |
| try: | |
| interface.block_thread() | |
| except KeyboardInterrupt: | |
| print("👋 Stopped") | |
| return interface | |
| except Exception as e: | |
| print(f"❌ Error: {e}") | |
| if not is_jupyter: | |
| sys.exit(1) | |
| raise | |
| if __name__ == "__main__": | |
| main() |