deploy-ready-copilot / sponsor_llms.py
HIMANSHUKUMARJHA's picture
Add sponsor LLM controls and secrets guidance
cd249cc
"""Sponsor LLM integrations (Gemini, OpenAI) for cross-evidence synthesis."""
from __future__ import annotations
import os
from typing import Dict, List, Optional
try:
import google.generativeai as genai
GEMINI_AVAILABLE = True
except ImportError:
GEMINI_AVAILABLE = False
try:
from openai import OpenAI
OPENAI_AVAILABLE = True
except ImportError:
OPENAI_AVAILABLE = False
def _normalize_priority(priority: Optional[List[str] | str]) -> List[str]:
"""Normalize preferred sponsor list."""
if priority is None:
env_priority = os.getenv("SPONSOR_LLM_PRIORITY", "gemini,openai")
priority = env_priority
if isinstance(priority, str):
priority = [item.strip().lower() for item in priority.split(",") if item.strip()]
return [p for p in priority if p in {"gemini", "openai", "both"}]
class SponsorLLMClient:
"""Unified interface for sponsor LLMs (Gemini, OpenAI)."""
def __init__(self):
self.gemini_client = None
self.openai_client = None
self.default_priority = _normalize_priority(None)
self._init_gemini()
self._init_openai()
def _init_gemini(self):
"""Initialize Google Gemini client."""
if not GEMINI_AVAILABLE:
return
api_key = os.getenv("GOOGLE_API_KEY") or os.getenv("GEMINI_API_KEY")
if api_key:
try:
genai.configure(api_key=api_key)
model_id = os.getenv("GEMINI_MODEL", "gemini-2.0-flash-exp")
self.gemini_client = genai.GenerativeModel(model_id)
except Exception as e:
print(f"Gemini init failed: {e}")
def _init_openai(self):
"""Initialize OpenAI client."""
if not OPENAI_AVAILABLE:
return
api_key = os.getenv("OPENAI_API_KEY")
if api_key:
try:
self.openai_client = OpenAI(api_key=api_key)
except Exception as e:
print(f"OpenAI init failed: {e}")
def synthesize_with_gemini(
self, evidence_list: List[str], plan_summary: str
) -> str:
"""Use Gemini to synthesize evidence into actionable insights."""
if not self.gemini_client:
return "[Gemini not available] Evidence synthesis skipped."
prompt = (
"As a deployment readiness analyst, synthesize these evidence points"
f" into actionable insights:\n\nPlan: {plan_summary}\n\nEvidence:\n"
+ "\n".join(f"- {e}" for e in evidence_list)
+ "\n\nProvide a concise synthesis focusing on deployment risks and readiness."
)
try:
model = os.getenv("GEMINI_MODEL", "gemini-2.0-flash-exp")
response = self.gemini_client.generate_content(
prompt,
generation_config={"temperature": 0.2},
safety_settings=None
)
return response.text.strip()
except Exception as e:
return f"[Gemini error: {e}]"
def synthesize_with_openai(
self, evidence_list: List[str], plan_summary: str
) -> str:
"""Use OpenAI to synthesize evidence into actionable insights."""
if not self.openai_client:
return "[OpenAI not available] Evidence synthesis skipped."
prompt = (
"As a deployment readiness analyst, synthesize these evidence points"
f" into actionable insights:\n\nPlan: {plan_summary}\n\nEvidence:\n"
+ "\n".join(f"- {e}" for e in evidence_list)
+ "\n\nProvide a concise synthesis focusing on deployment risks and readiness."
)
try:
response = self.openai_client.chat.completions.create(
model=os.getenv("OPENAI_MODEL", "gpt-4o-mini"),
messages=[
{"role": "system", "content": "You are a deployment readiness analyst."},
{"role": "user", "content": prompt}
],
temperature=0.2,
max_tokens=500
)
return response.choices[0].message.content.strip()
except Exception as e:
return f"[OpenAI error: {e}]"
def cross_validate_evidence(
self, claude_evidence: str, plan_summary: str, preferred: Optional[List[str] | str] = None
) -> Dict[str, str]:
"""Use sponsor LLMs to cross-validate Claude's evidence analysis."""
order = _normalize_priority(preferred) or self.default_priority
results: Dict[str, str] = {}
for provider in order:
if provider == "gemini" and self.gemini_client:
results["gemini_synthesis"] = self.synthesize_with_gemini(
[claude_evidence], plan_summary
)
elif provider == "openai" and self.openai_client:
results["openai_synthesis"] = self.synthesize_with_openai(
[claude_evidence], plan_summary
)
elif provider == "both":
if self.gemini_client:
results["gemini_synthesis"] = self.synthesize_with_gemini(
[claude_evidence], plan_summary
)
if self.openai_client:
results["openai_synthesis"] = self.synthesize_with_openai(
[claude_evidence], plan_summary
)
return results