```python from typing import Optional, Dict, Any from huggingface_hub import InferenceClient class ModelsRegistry: def __init__(self): # Initialize with default models self.models = { "vision_feature_extractor": "google/vit-base-patch16-224", "ocr": "microsoft/trocr-base-stage1", "sentiment": "yiyanghkust/finbert-tone", "timeseries": "huggingface/time-series-transformer-tiny", "general_llm": "mistralai/Mistral-7B-v0.1" } self.client = InferenceClient() def get_model(self, model_name: str) -> Optional[Dict[str, Any]]: return self.models.get(model_name) async def analyze_sentiment(self, text: str) -> Dict[str, float]: """Get sentiment scores using HF model""" try: result = await self.client.post( f"https://api-inference.huggingface.co/models/{self.models['sentiment']}", json={"inputs": text} ) return {item['label']: item['score'] for item in result} except Exception: return {"neutral": 0.5, "positive": 0.25, "negative": 0.25} async def analyze_image(self, image_data: bytes) -> Dict[str, Any]: """Get image features using HF model""" try: result = await self.client.post( f"https://api-inference.huggingface.co/models/{self.models['vision_feature_extractor']}", data=image_data ) return {"features": result} except Exception: return {"features": []} async def generate_text(self, prompt: str) -> str: """Get text generation from HF model""" try: result = await self.client.post( f"https://api-inference.huggingface.co/models/{self.models['general_llm']}", json={"inputs": prompt} ) return result[0]['generated_text'] except Exception: return "Model unavailable - using fallback response" ```