Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import torch | |
| import os | |
| from bdh import BDH, BDHConfig | |
| # Initialize BDH with CORRECT parameters | |
| config = BDHConfig( | |
| vocab_size=50257, | |
| n_embd=256, # mniejsze dla bezpieczeństwa | |
| n_head=4, | |
| n_layer=6, # mniej warstw | |
| dropout=0.1, | |
| mlp_internal_dim_multiplier=128 # to jest zamiast block_size! | |
| ) | |
| model = BDH(config) | |
| device = 'cuda' if torch.cuda.is_available() else 'cpu' | |
| model = model.to(device) | |
| # Load checkpoint if exists | |
| checkpoint_path = './checkpoints/bdh_compressed.pt' | |
| if os.path.exists(checkpoint_path): | |
| try: | |
| checkpoint = torch.load(checkpoint_path, map_location=device) | |
| model.load_state_dict(checkpoint['model_state_dict']) | |
| model.eval() | |
| print("✅ Loaded trained model!") | |
| except Exception as e: | |
| print(f"⚠️ Could not load checkpoint: {e}") | |
| else: | |
| print("⚠️ No checkpoint found, using random weights") | |
| # Memory storage | |
| memory = {} | |
| def chat(message, history): | |
| """Smart Alec chat function""" | |
| # Memory commands | |
| if message.startswith('/remember '): | |
| try: | |
| _, content = message.split('/remember ', 1) | |
| key, value = content.split('=', 1) | |
| memory[key.strip()] = value.strip() | |
| return f"✅ Zapamiętałem: {key.strip()}" | |
| except: | |
| return "❌ Format: /remember key = value" | |
| elif message.startswith('/recall '): | |
| key = message.replace('/recall ', '').strip() | |
| if key in memory: | |
| return f"📝 {key}: {memory[key]}" | |
| return f"❌ Nie pamiętam '{key}'" | |
| elif message == '/memories': | |
| if memory: | |
| return "🧠 Pamięć:\n" + "\n".join([f"• {k}: {v}" for k, v in memory.items()]) | |
| return "🧠 Pamięć pusta" | |
| # Sarcastic responses (mock for now) | |
| import random | |
| responses = [ | |
| f"*mruga* '{message}' - seriously, Księżniczko? Let me think about that... or not.", | |
| f"Oh, '{message}'? How delightfully predictable. *sarcastic smile*", | |
| f"'{message}' - wow, revolutionary. Never heard that before. *yawns*" | |
| ] | |
| return random.choice(responses) | |
| # Gradio Interface | |
| iface = gr.ChatInterface( | |
| fn=chat, | |
| title="🐉 Smart Alec - Baby Dragon Hatchling", | |
| description=""" | |
| **Post-Transformer AI with Synaptic Plasticity** | |
| Commands: | |
| - `/remember key = value` - Save to memory | |
| - `/recall key` - Get from memory | |
| - `/memories` - List all memories | |
| Made with 💜 for Sylwia | Early Stage | |
| """, | |
| examples=[ | |
| "/remember name = Sylwia", | |
| "/recall name", | |
| "/memories", | |
| "Hello Alec!" | |
| ], | |
| theme=gr.themes.Soft() | |
| ) | |
| if __name__ == "__main__": | |
| print("🐉 Smart Alec awakens...") | |
| iface.launch() | |