File size: 5,459 Bytes
011336e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cd249cc
 
 
 
 
 
 
 
 
 
011336e
 
 
 
 
 
cd249cc
011336e
 
 
 
 
 
 
 
 
 
 
 
cd249cc
 
011336e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cd249cc
 
 
 
 
 
011336e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cd249cc
011336e
 
cd249cc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
011336e
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
"""Sponsor LLM integrations (Gemini, OpenAI) for cross-evidence synthesis."""

from __future__ import annotations

import os
from typing import Dict, List, Optional

try:
    import google.generativeai as genai
    GEMINI_AVAILABLE = True
except ImportError:
    GEMINI_AVAILABLE = False

try:
    from openai import OpenAI
    OPENAI_AVAILABLE = True
except ImportError:
    OPENAI_AVAILABLE = False


def _normalize_priority(priority: Optional[List[str] | str]) -> List[str]:
    """Normalize preferred sponsor list."""
    if priority is None:
        env_priority = os.getenv("SPONSOR_LLM_PRIORITY", "gemini,openai")
        priority = env_priority
    if isinstance(priority, str):
        priority = [item.strip().lower() for item in priority.split(",") if item.strip()]
    return [p for p in priority if p in {"gemini", "openai", "both"}]


class SponsorLLMClient:
    """Unified interface for sponsor LLMs (Gemini, OpenAI)."""

    def __init__(self):
        self.gemini_client = None
        self.openai_client = None
        self.default_priority = _normalize_priority(None)
        self._init_gemini()
        self._init_openai()

    def _init_gemini(self):
        """Initialize Google Gemini client."""
        if not GEMINI_AVAILABLE:
            return

        api_key = os.getenv("GOOGLE_API_KEY") or os.getenv("GEMINI_API_KEY")
        if api_key:
            try:
                genai.configure(api_key=api_key)
                model_id = os.getenv("GEMINI_MODEL", "gemini-2.0-flash-exp")
                self.gemini_client = genai.GenerativeModel(model_id)
            except Exception as e:
                print(f"Gemini init failed: {e}")

    def _init_openai(self):
        """Initialize OpenAI client."""
        if not OPENAI_AVAILABLE:
            return

        api_key = os.getenv("OPENAI_API_KEY")
        if api_key:
            try:
                self.openai_client = OpenAI(api_key=api_key)
            except Exception as e:
                print(f"OpenAI init failed: {e}")

    def synthesize_with_gemini(
        self, evidence_list: List[str], plan_summary: str
    ) -> str:
        """Use Gemini to synthesize evidence into actionable insights."""
        if not self.gemini_client:
            return "[Gemini not available] Evidence synthesis skipped."

        prompt = (
            "As a deployment readiness analyst, synthesize these evidence points"
            f" into actionable insights:\n\nPlan: {plan_summary}\n\nEvidence:\n"
            + "\n".join(f"- {e}" for e in evidence_list)
            + "\n\nProvide a concise synthesis focusing on deployment risks and readiness."
        )

        try:
            model = os.getenv("GEMINI_MODEL", "gemini-2.0-flash-exp")
            response = self.gemini_client.generate_content(
                prompt,
                generation_config={"temperature": 0.2},
                safety_settings=None
            )
            return response.text.strip()
        except Exception as e:
            return f"[Gemini error: {e}]"

    def synthesize_with_openai(
        self, evidence_list: List[str], plan_summary: str
    ) -> str:
        """Use OpenAI to synthesize evidence into actionable insights."""
        if not self.openai_client:
            return "[OpenAI not available] Evidence synthesis skipped."

        prompt = (
            "As a deployment readiness analyst, synthesize these evidence points"
            f" into actionable insights:\n\nPlan: {plan_summary}\n\nEvidence:\n"
            + "\n".join(f"- {e}" for e in evidence_list)
            + "\n\nProvide a concise synthesis focusing on deployment risks and readiness."
        )

        try:
            response = self.openai_client.chat.completions.create(
                model=os.getenv("OPENAI_MODEL", "gpt-4o-mini"),
                messages=[
                    {"role": "system", "content": "You are a deployment readiness analyst."},
                    {"role": "user", "content": prompt}
                ],
                temperature=0.2,
                max_tokens=500
            )
            return response.choices[0].message.content.strip()
        except Exception as e:
            return f"[OpenAI error: {e}]"

    def cross_validate_evidence(
        self, claude_evidence: str, plan_summary: str, preferred: Optional[List[str] | str] = None
    ) -> Dict[str, str]:
        """Use sponsor LLMs to cross-validate Claude's evidence analysis."""
        order = _normalize_priority(preferred) or self.default_priority
        results: Dict[str, str] = {}

        for provider in order:
            if provider == "gemini" and self.gemini_client:
                results["gemini_synthesis"] = self.synthesize_with_gemini(
                    [claude_evidence], plan_summary
                )
            elif provider == "openai" and self.openai_client:
                results["openai_synthesis"] = self.synthesize_with_openai(
                    [claude_evidence], plan_summary
                )
            elif provider == "both":
                if self.gemini_client:
                    results["gemini_synthesis"] = self.synthesize_with_gemini(
                        [claude_evidence], plan_summary
                    )
                if self.openai_client:
                    results["openai_synthesis"] = self.synthesize_with_openai(
                        [claude_evidence], plan_summary
                    )

        return results