Spaces:
Sleeping
Sleeping
| #!/usr/bin/env python3 | |
| """ | |
| Test LLM integration for surf spot finder | |
| """ | |
| import asyncio | |
| import sys | |
| import os | |
| # Add parent directory to path | |
| sys.path.insert(0, os.path.dirname(os.path.dirname(__file__))) | |
| from tools.llm_agent_tool import SurfLLMAgent, LLMAgentInput | |
| async def test_llm_fallback(): | |
| """Test LLM agent fallback functionality""" | |
| print("π€ Testing LLM Agent (Fallback Mode)...") | |
| agent = SurfLLMAgent() | |
| # Sample surf spots data | |
| sample_spots = [ | |
| { | |
| "id": "tarifa_spain", | |
| "name": "Tarifa", | |
| "location": "Spain, Andalusia", | |
| "distance_km": 15.2, | |
| "score": 85.5, | |
| "characteristics": { | |
| "break_type": "beach_break", | |
| "skill_level": ["beginner", "intermediate", "advanced"] | |
| }, | |
| "conditions": { | |
| "wave_height": 1.5, | |
| "wind_speed": 12, | |
| "wind_direction": 135 | |
| }, | |
| "explanation": "Good wave height and favorable wind direction for this beach break" | |
| }, | |
| { | |
| "id": "el_palmar_spain", | |
| "name": "El Palmar", | |
| "location": "Spain, Andalusia", | |
| "distance_km": 25.8, | |
| "score": 72.3, | |
| "characteristics": { | |
| "break_type": "beach_break", | |
| "skill_level": ["beginner", "intermediate"] | |
| }, | |
| "conditions": { | |
| "wave_height": 1.2, | |
| "wind_speed": 8, | |
| "wind_direction": 90 | |
| }, | |
| "explanation": "Moderate conditions suitable for beginners and intermediate surfers" | |
| } | |
| ] | |
| test_input = LLMAgentInput( | |
| user_location="MΓ‘laga, Spain", | |
| user_preferences={"skill_level": "intermediate", "board_type": "shortboard"}, | |
| surf_spots=sample_spots | |
| ) | |
| try: | |
| result = await agent.run(test_input) | |
| if result.success: | |
| print("β LLM Agent Test Results:") | |
| print(f" Summary: {result.summary}") | |
| if result.reasoning and result.reasoning != "Using fallback analysis due to LLM unavailability": | |
| print(f" π― LLM-powered reasoning detected!") | |
| print(f" Reasoning length: {len(result.reasoning)} characters") | |
| else: | |
| print(f" π Fallback analysis used (expected without API keys)") | |
| print(f" Recommendations: {len(result.recommendations)} spots") | |
| return True | |
| else: | |
| print(f"β LLM Agent failed: {result.error}") | |
| return False | |
| except Exception as e: | |
| print(f"β LLM Agent test error: {e}") | |
| return False | |
| def test_llm_provider_detection(): | |
| """Test which LLM providers are available""" | |
| print("\nπ Testing LLM Provider Availability...") | |
| agent = SurfLLMAgent() | |
| provider = agent._get_available_provider() | |
| print(f" OpenAI API Key: {'β ' if agent.openai_api_key else 'β'}") | |
| print(f" Anthropic API Key: {'β ' if agent.anthropic_api_key else 'β'}") | |
| print(f" OpenRouter API Key: {'β ' if agent.openrouter_api_key else 'β'}") | |
| print(f" Selected Provider: {provider or 'None (fallback mode)'}") | |
| return True | |
| async def main(): | |
| """Run LLM integration tests""" | |
| print("π LLM Integration Tests") | |
| print("=" * 40) | |
| # Test provider detection | |
| provider_ok = test_llm_provider_detection() | |
| # Test LLM agent functionality | |
| llm_ok = await test_llm_fallback() | |
| print("\n" + "=" * 40) | |
| print("π LLM Integration Summary:") | |
| print(f" Provider Detection: {'β ' if provider_ok else 'β'}") | |
| print(f" LLM Agent: {'β ' if llm_ok else 'β'}") | |
| if provider_ok and llm_ok: | |
| print("\nπ LLM integration ready!") | |
| print("π‘ To enable full LLM reasoning, set:") | |
| print(" - OPENAI_API_KEY=your_key") | |
| print(" - ANTHROPIC_API_KEY=your_key") | |
| print(" - OPENROUTER_API_KEY=your_key") | |
| print(" Without API keys, fallback analysis will be used.") | |
| else: | |
| print("\nβ οΈ LLM integration issues detected") | |
| if __name__ == "__main__": | |
| asyncio.run(main()) |