surf-spot-finder-mcp / mcp_server /tests /test_llm_integration.py
D3MI4N's picture
Production ready app version
7b6b271
#!/usr/bin/env python3
"""
Test LLM integration for surf spot finder
"""
import asyncio
import sys
import os
# Add parent directory to path
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from tools.llm_agent_tool import SurfLLMAgent, LLMAgentInput
async def test_llm_fallback():
"""Test LLM agent fallback functionality"""
print("πŸ€– Testing LLM Agent (Fallback Mode)...")
agent = SurfLLMAgent()
# Sample surf spots data
sample_spots = [
{
"id": "tarifa_spain",
"name": "Tarifa",
"location": "Spain, Andalusia",
"distance_km": 15.2,
"score": 85.5,
"characteristics": {
"break_type": "beach_break",
"skill_level": ["beginner", "intermediate", "advanced"]
},
"conditions": {
"wave_height": 1.5,
"wind_speed": 12,
"wind_direction": 135
},
"explanation": "Good wave height and favorable wind direction for this beach break"
},
{
"id": "el_palmar_spain",
"name": "El Palmar",
"location": "Spain, Andalusia",
"distance_km": 25.8,
"score": 72.3,
"characteristics": {
"break_type": "beach_break",
"skill_level": ["beginner", "intermediate"]
},
"conditions": {
"wave_height": 1.2,
"wind_speed": 8,
"wind_direction": 90
},
"explanation": "Moderate conditions suitable for beginners and intermediate surfers"
}
]
test_input = LLMAgentInput(
user_location="MΓ‘laga, Spain",
user_preferences={"skill_level": "intermediate", "board_type": "shortboard"},
surf_spots=sample_spots
)
try:
result = await agent.run(test_input)
if result.success:
print("βœ… LLM Agent Test Results:")
print(f" Summary: {result.summary}")
if result.reasoning and result.reasoning != "Using fallback analysis due to LLM unavailability":
print(f" 🎯 LLM-powered reasoning detected!")
print(f" Reasoning length: {len(result.reasoning)} characters")
else:
print(f" πŸ”„ Fallback analysis used (expected without API keys)")
print(f" Recommendations: {len(result.recommendations)} spots")
return True
else:
print(f"❌ LLM Agent failed: {result.error}")
return False
except Exception as e:
print(f"❌ LLM Agent test error: {e}")
return False
def test_llm_provider_detection():
"""Test which LLM providers are available"""
print("\nπŸ”‘ Testing LLM Provider Availability...")
agent = SurfLLMAgent()
provider = agent._get_available_provider()
print(f" OpenAI API Key: {'βœ…' if agent.openai_api_key else '❌'}")
print(f" Anthropic API Key: {'βœ…' if agent.anthropic_api_key else '❌'}")
print(f" OpenRouter API Key: {'βœ…' if agent.openrouter_api_key else '❌'}")
print(f" Selected Provider: {provider or 'None (fallback mode)'}")
return True
async def main():
"""Run LLM integration tests"""
print("πŸš€ LLM Integration Tests")
print("=" * 40)
# Test provider detection
provider_ok = test_llm_provider_detection()
# Test LLM agent functionality
llm_ok = await test_llm_fallback()
print("\n" + "=" * 40)
print("πŸ“Š LLM Integration Summary:")
print(f" Provider Detection: {'βœ…' if provider_ok else '❌'}")
print(f" LLM Agent: {'βœ…' if llm_ok else '❌'}")
if provider_ok and llm_ok:
print("\nπŸŽ‰ LLM integration ready!")
print("πŸ’‘ To enable full LLM reasoning, set:")
print(" - OPENAI_API_KEY=your_key")
print(" - ANTHROPIC_API_KEY=your_key")
print(" - OPENROUTER_API_KEY=your_key")
print(" Without API keys, fallback analysis will be used.")
else:
print("\n⚠️ LLM integration issues detected")
if __name__ == "__main__":
asyncio.run(main())