File size: 9,415 Bytes
25e624c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
"""
Gemini Client - Wrapper for Google Gen AI SDK
Includes mock implementation for testing
"""

import time
import random
from typing import Generator, Optional


class MockGeminiClient:
    """
    Mock Gemini client for testing without API key
    Simulates streaming responses with realistic delays
    """
    
    # Pre-defined responses based on sentiment/keywords
    RESPONSES = {
        "happy": [
            "That's wonderful to hear! It's great when things are going well. ",
            "Positive energy is contagious! Keep that great attitude going. ",
            "I'm glad you're feeling good! What's making your day special?"
        ],
        "sad": [
            "I'm sorry to hear you're feeling down. ",
            "It's okay to have difficult days. Would you like to talk about it? ",
            "I understand that can be tough. Remember, things often get better."
        ],
        "angry": [
            "I can sense your frustration. Let's work through this together. ",
            "That does sound frustrating. Take a deep breath. ",
            "I understand why that would be upsetting. How can I help?"
        ],
        "question": [
            "That's a great question! Let me explain. ",
            "I'd be happy to help you understand this. ",
            "Excellent question! Here's what you need to know. "
        ],
        "default": [
            "I understand. Let me help you with that. ",
            "Thank you for sharing. Here's my response. ",
            "I see what you mean. Let me provide some insight. "
        ]
    }
    
    EXPLANATIONS = {
        "photosynthesis": (
            "Photosynthesis is the process by which plants convert sunlight into energy. "
            "The plant absorbs light through chlorophyll in its leaves, "
            "combines it with water from the roots and carbon dioxide from the air, "
            "and produces glucose for energy and oxygen as a byproduct. "
            "It's essentially how plants make their own food!"
        ),
        "default_explanation": (
            "This is a fascinating topic! The key concepts involve understanding "
            "the fundamental principles and how they interact with each other. "
            "Would you like me to go into more detail on any specific aspect?"
        )
    }
    
    def __init__(self, api_key: Optional[str] = None):
        self.api_key = api_key
        self.is_mock = api_key is None or api_key == "" or api_key == "mock"
        self.chat_history = []
    
    def _detect_intent(self, message: str) -> str:
        """Detect the intent/sentiment of the message"""
        message_lower = message.lower()
        
        if any(word in message_lower for word in ["happy", "great", "wonderful", "love", "excited", "amazing"]):
            return "happy"
        elif any(word in message_lower for word in ["sad", "upset", "down", "depressed", "unhappy"]):
            return "sad"
        elif any(word in message_lower for word in ["angry", "frustrated", "annoying", "hate", "frustrating"]):
            return "angry"
        elif "?" in message or any(word in message_lower for word in ["how", "what", "why", "explain", "can you"]):
            return "question"
        else:
            return "default"
    
    def _generate_mock_response(self, message: str) -> str:
        """Generate a contextual mock response"""
        intent = self._detect_intent(message)
        
        # Check for specific topics
        if "photosynthesis" in message.lower():
            base = random.choice(self.RESPONSES["question"])
            return base + self.EXPLANATIONS["photosynthesis"]
        
        base_response = random.choice(self.RESPONSES[intent])
        
        # Add some contextual follow-up
        if intent == "happy":
            base_response += "Your positive energy really shines through! 😊"
        elif intent == "sad":
            base_response += "I'm here to listen and help however I can."
        elif intent == "angry":
            base_response += "Let's see if we can find a solution together."
        elif intent == "question":
            base_response += self.EXPLANATIONS["default_explanation"]
        else:
            base_response += "Is there anything specific you'd like to explore further?"
        
        return base_response
    
    def stream_chat(self, message: str) -> Generator[str, None, None]:
        """
        Stream a chat response, simulating token-by-token generation
        
        Args:
            message: The user's input message
            
        Yields:
            String chunks of the response
        """
        self.chat_history.append({"role": "user", "content": message})
        
        response = self._generate_mock_response(message)
        
        # Simulate streaming by yielding word by word
        words = response.split(" ")
        
        for i, word in enumerate(words):
            # Add space before word (except first)
            if i > 0:
                yield " "
            yield word
            
            # Simulate network delay
            time.sleep(random.uniform(0.02, 0.08))
        
        self.chat_history.append({"role": "assistant", "content": response})
    
    def generate_content(self, message: str) -> str:
        """
        Generate a complete response (non-streaming)
        
        Args:
            message: The user's input message
            
        Returns:
            Complete response string
        """
        self.chat_history.append({"role": "user", "content": message})
        response = self._generate_mock_response(message)
        self.chat_history.append({"role": "assistant", "content": response})
        return response
    
    def clear_history(self):
        """Clear chat history"""
        self.chat_history = []


class GeminiClient:
    """
    Gemini Client with automatic fallback to mock
    Uses real API when key is provided, mock otherwise
    """
    
    MODEL_NAME = "gemini-2.5-flash-lite"  # Cost-effective, fast model
    
    def __init__(self, api_key: Optional[str] = None):
        self.api_key = api_key
        self._client = None
        self._chat = None
        self.is_mock = False
        
        if api_key and api_key not in ["", "mock", "test"]:
            try:
                from google import genai
                self._client = genai.Client(api_key=api_key)
                self._chat = self._client.chats.create(model=self.MODEL_NAME)
                print(f"✅ Using real Gemini API (model: {self.MODEL_NAME})")
            except Exception as e:
                print(f"⚠️ Failed to initialize Gemini API: {e}")
                print("📝 Falling back to mock client")
                self.is_mock = True
                self._mock = MockGeminiClient()
        else:
            print("📝 Using mock Gemini client (no API key provided)")
            self.is_mock = True
            self._mock = MockGeminiClient()
    
    def stream_chat(self, message: str) -> Generator[str, None, None]:
        """
        Stream a chat response
        
        Args:
            message: The user's input message
            
        Yields:
            String chunks of the response
        """
        if self.is_mock:
            yield from self._mock.stream_chat(message)
        else:
            try:
                for chunk in self._chat.send_message_stream(message):
                    if chunk.text:
                        yield chunk.text
            except Exception as e:
                print(f"⚠️ Gemini API error: {e}")
                # Fallback to mock on error
                yield from MockGeminiClient().stream_chat(message)
    
    def generate_content(self, message: str) -> str:
        """
        Generate a complete response (non-streaming)
        
        Args:
            message: The user's input message
            
        Returns:
            Complete response string
        """
        if self.is_mock:
            return self._mock.generate_content(message)
        else:
            try:
                response = self._chat.send_message(message)
                return response.text
            except Exception as e:
                print(f"⚠️ Gemini API error: {e}")
                return MockGeminiClient().generate_content(message)
    
    def reset_chat(self):
        """Reset the chat session"""
        if self.is_mock:
            self._mock.clear_history()
        else:
            try:
                from google import genai
                self._chat = self._client.chats.create(model=self.MODEL_NAME)
            except Exception:
                pass


# Testing
if __name__ == "__main__":
    print("=" * 50)
    print("Testing Gemini Client (Mock Mode)")
    print("=" * 50)
    
    client = GeminiClient()  # No API key = mock mode
    
    test_messages = [
        "Hello! I'm so happy today!",
        "I'm feeling frustrated with this problem.",
        "Can you explain how photosynthesis works?",
        "This is just a neutral statement.",
    ]
    
    for msg in test_messages:
        print(f"\n👤 User: {msg}")
        print("🤖 AI: ", end="", flush=True)
        
        for chunk in client.stream_chat(msg):
            print(chunk, end="", flush=True)
        
        print("\n" + "-" * 40)
    
    print("\n✅ Mock client test completed!")