Spaces:
Sleeping
Sleeping
| #!/usr/bin/env python3 | |
| """ | |
| Test OpenAI API key functionality | |
| """ | |
| import os | |
| import sys | |
| import asyncio | |
| from typing import Dict, Any | |
| # Load environment variables first | |
| try: | |
| from dotenv import load_dotenv | |
| load_dotenv() | |
| except ImportError: | |
| pass | |
| # Add parent directory to path | |
| sys.path.insert(0, os.path.dirname(os.path.dirname(__file__))) | |
| async def test_openai_api_key(): | |
| """Test if OpenAI API key works with our LLM agent""" | |
| print("π Testing OpenAI API Key...") | |
| try: | |
| from tools.llm_agent_tool import SurfLLMAgent, LLMAgentInput | |
| # Check if API key is set | |
| api_key = os.getenv("OPENAI_API_KEY") | |
| if not api_key: | |
| print(" β No OPENAI_API_KEY found in environment") | |
| return False | |
| print(f" π API Key found: {api_key[:10]}...{api_key[-4:]}") | |
| # Create agent | |
| agent = SurfLLMAgent() | |
| provider = agent._get_available_provider() | |
| print(f" π€ Selected provider: {provider}") | |
| if provider != "openai": | |
| print(f" β οΈ OpenAI not selected as provider (using {provider})") | |
| return False | |
| # Create sample surf data for testing | |
| sample_spots = [ | |
| { | |
| "id": "test_spot", | |
| "name": "Tarifa", | |
| "location": "Spain, Andalusia", | |
| "distance_km": 15.2, | |
| "score": 85.5, | |
| "characteristics": { | |
| "break_type": "beach_break", | |
| "skill_level": ["beginner", "intermediate", "advanced"] | |
| }, | |
| "conditions": { | |
| "wave_height": 1.5, | |
| "wind_speed": 12, | |
| "wind_direction": 135, | |
| "swell_direction": 225 | |
| }, | |
| "explanation": "Excellent conditions with good wave height and favorable wind direction" | |
| } | |
| ] | |
| # Test LLM agent | |
| test_input = LLMAgentInput( | |
| user_location="MΓ‘laga, Spain", | |
| user_preferences={"skill_level": "intermediate", "board_type": "shortboard"}, | |
| surf_spots=sample_spots | |
| ) | |
| print(" π§ Testing LLM reasoning...") | |
| result = await agent.run(test_input) | |
| if result.success: | |
| print(" β LLM API call successful!") | |
| print(f" π Summary: {result.summary[:100]}...") | |
| # Check if we got real LLM reasoning (not fallback) | |
| if "fallback" not in result.reasoning.lower() and "no llm api key" not in result.reasoning.lower(): | |
| print(" π― Real LLM reasoning detected!") | |
| print(f" π Reasoning length: {len(result.reasoning)} characters") | |
| # Show a snippet of the reasoning | |
| reasoning_preview = result.reasoning[:200] + "..." if len(result.reasoning) > 200 else result.reasoning | |
| print(f" π Reasoning preview: {reasoning_preview}") | |
| return True | |
| else: | |
| print(" β οΈ Fallback reasoning used (API may not be working)") | |
| return False | |
| else: | |
| print(f" β LLM agent failed: {result.error}") | |
| return False | |
| except Exception as e: | |
| print(f" π₯ Exception during API test: {e}") | |
| return False | |
| async def test_direct_openai_call(): | |
| """Test a direct OpenAI API call to isolate issues""" | |
| print("\nπ§ Testing Direct OpenAI API Call...") | |
| try: | |
| import httpx | |
| api_key = os.getenv("OPENAI_API_KEY") | |
| if not api_key: | |
| print(" β No API key available") | |
| return False | |
| async with httpx.AsyncClient() as client: | |
| response = await client.post( | |
| "https://api.openai.com/v1/chat/completions", | |
| headers={ | |
| "Authorization": f"Bearer {api_key}", | |
| "Content-Type": "application/json" | |
| }, | |
| json={ | |
| "model": "gpt-4o-mini", | |
| "messages": [ | |
| {"role": "system", "content": "You are a surf expert."}, | |
| {"role": "user", "content": "Give me one sentence about surfing in Tarifa, Spain."} | |
| ], | |
| "max_tokens": 100 | |
| }, | |
| timeout=30.0 | |
| ) | |
| if response.status_code == 200: | |
| data = response.json() | |
| content = data["choices"][0]["message"]["content"] | |
| print(f" β Direct API call successful!") | |
| print(f" π Response: {content}") | |
| return True | |
| else: | |
| print(f" β API call failed: {response.status_code}") | |
| print(f" π Response: {response.text}") | |
| return False | |
| except Exception as e: | |
| print(f" π₯ Direct API call exception: {e}") | |
| return False | |
| def check_environment(): | |
| """Check environment setup""" | |
| print("π Checking Environment Setup...") | |
| # Check for .env file | |
| env_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), '.env') | |
| if os.path.exists(env_path): | |
| print(f" β .env file found at: {env_path}") | |
| else: | |
| print(f" β οΈ No .env file found at: {env_path}") | |
| # Check environment variables | |
| api_key = os.getenv("OPENAI_API_KEY") | |
| if api_key: | |
| print(f" β OPENAI_API_KEY is set ({len(api_key)} characters)") | |
| if api_key.startswith("sk-"): | |
| print(" β API key format looks correct (starts with 'sk-')") | |
| else: | |
| print(" β οΈ API key doesn't start with 'sk-' - might be invalid format") | |
| else: | |
| print(" β OPENAI_API_KEY not found in environment") | |
| return bool(api_key) | |
| async def main(): | |
| """Run all API tests""" | |
| print("π OpenAI API Key Testing") | |
| print("=" * 50) | |
| # Check environment | |
| env_ok = check_environment() | |
| if not env_ok: | |
| print("\nβ Environment not properly configured") | |
| print("π‘ To enable OpenAI API:") | |
| print(" 1. Create a .env file in project root") | |
| print(" 2. Add: OPENAI_API_KEY=sk-your-key-here") | |
| print(" 3. Get API key from: https://platform.openai.com/api-keys") | |
| return False | |
| # Test direct API call | |
| direct_ok = await test_direct_openai_call() | |
| # Test LLM agent integration | |
| agent_ok = await test_openai_api_key() | |
| print("\n" + "=" * 50) | |
| print("π API Test Results:") | |
| print(f" Environment Setup: {'β ' if env_ok else 'β'}") | |
| print(f" Direct API Call: {'β ' if direct_ok else 'β'}") | |
| print(f" LLM Agent Integration: {'β ' if agent_ok else 'β'}") | |
| if env_ok and direct_ok and agent_ok: | |
| print("\nπ OpenAI API is working perfectly!") | |
| print("π€ Your surf agent will use advanced LLM reasoning") | |
| elif env_ok and direct_ok: | |
| print("\nπ§ OpenAI API works, but there's an integration issue") | |
| elif env_ok: | |
| print("\nβ οΈ API key found but not working - check key validity") | |
| else: | |
| print("\nβ OpenAI API not properly configured") | |
| print("π The app will use intelligent fallback reasoning instead") | |
| return env_ok and direct_ok and agent_ok | |
| if __name__ == "__main__": | |
| asyncio.run(main()) |