ladybug11 commited on
Commit
059772d
·
1 Parent(s): d42e715

add gemini quote integration

Browse files
Files changed (3) hide show
  1. app.py +65 -65
  2. quote_generator_gemini.py +286 -0
  3. requirements.txt +2 -1
app.py CHANGED
@@ -12,11 +12,20 @@ import textwrap
12
  import numpy as np
13
  from elevenlabs import ElevenLabs, VoiceSettings
14
 
 
 
 
15
  # Initialize clients
16
  openai_client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
17
  PEXELS_API_KEY = os.getenv("PEXELS_API_KEY")
18
  elevenlabs_client = ElevenLabs(api_key=os.getenv("ELEVENLABS_API_KEY"))
19
 
 
 
 
 
 
 
20
  # Initialize MCP Client (connecting to existing MCP server)
21
  try:
22
  mcp_client = MCPClient("https://abidlabs-mcp-tools.hf.space")
@@ -29,50 +38,35 @@ except Exception as e:
29
  @tool
30
  def generate_quote_tool(niche: str, style: str) -> str:
31
  """
32
- Generate a powerful inspirational quote using OpenAI.
 
33
 
34
  Args:
35
  niche: The category of quote (Motivation, Business, Fitness, etc.)
36
  style: The visual style (Cinematic, Nature, Urban, Minimal, Abstract)
37
 
38
  Returns:
39
- A powerful quote string
40
  """
41
 
42
- prompt = f"""Generate a UNIQUE and powerful {niche} quote suitable for an Instagram/TikTok video.
43
-
44
- Style: {style}
45
-
46
- Requirements:
47
- - 2-4 sentences (can be longer)
48
- - Inspirational and impactful
49
- - Deep and meaningful
50
- - Should resonate deeply with viewers
51
- - Must be DIFFERENT from typical motivational quotes
52
- - Add unexpected wisdom or fresh perspective
53
-
54
- Return ONLY the quote text, nothing else."""
55
-
56
- import time
57
- import random
58
-
59
  try:
60
- # Add randomness with timestamp
61
- seed = int(time.time() * 1000) % 1000
62
-
63
- response = openai_client.chat.completions.create(
64
- model="gpt-4o-mini",
65
- messages=[
66
- {"role": "system", "content": f"You are a quote generator for social media content. Seed: {seed}"},
67
- {"role": "user", "content": prompt}
68
- ],
69
- max_tokens=150,
70
- temperature=0.9 # Higher temperature for more variety
71
- )
72
 
73
- quote = response.choices[0].message.content.strip()
74
- quote = quote.strip('"').strip("'")
75
- return quote
 
 
 
 
 
 
 
 
 
 
 
 
76
 
77
  except Exception as e:
78
  return f"Error generating quote: {str(e)}"
@@ -486,7 +480,7 @@ def mcp_agent_pipeline(niche, style, num_variations=1):
486
  """
487
  MCP-POWERED AUTONOMOUS AGENT PIPELINE
488
  Uses smolagents with proper MCP server integration
489
- Generates multiple video variations
490
  """
491
 
492
  status_log = []
@@ -495,7 +489,7 @@ def mcp_agent_pipeline(niche, style, num_variations=1):
495
  if agent_error:
496
  status_log.append(f"❌ Agent initialization failed: {agent_error}")
497
  status_log.append("\n🔄 Falling back to direct tool execution...\n")
498
- return fallback_pipeline(niche, style, num_variations, add_voice)
499
 
500
  try:
501
  # STEP 1: Agent receives task
@@ -504,8 +498,8 @@ def mcp_agent_pipeline(niche, style, num_variations=1):
504
  status_log.append(f" → Create {num_variations} video variations")
505
  status_log.append("")
506
 
507
- # STEP 2: Agent executes quote generation
508
- status_log.append("🧠 **MCP TOOL: generate_quote_tool**")
509
  quote = generate_quote_tool(niche, style)
510
 
511
  if "Error" in quote:
@@ -529,7 +523,7 @@ def mcp_agent_pipeline(niche, style, num_variations=1):
529
 
530
  status_log.append("")
531
 
532
- # STEP 5: Create multiple video variations
533
  status_log.append(f"🎬 **MCP TOOL: create_quote_video_tool (x{len(video_results)})**")
534
  status_log.append(f" ⏳ Creating {len(video_results)} video variations...")
535
 
@@ -575,17 +569,18 @@ def mcp_agent_pipeline(niche, style, num_variations=1):
575
 
576
  status_log.append("")
577
 
578
- # STEP 6: MCP Server integration status
579
- status_log.append("🔗 **MCP SERVER STATUS:**")
 
 
 
580
  if mcp_enabled:
581
- status_log.append(" ✅ Connected to: abidlabs-mcp-tools.hf.space")
582
- else:
583
- status_log.append(" ⚠️ MCP server connection pending")
584
  status_log.append("")
585
 
586
- # STEP 7: Success!
587
  status_log.append("✨ **PIPELINE COMPLETE!**")
588
- status_log.append(f" 🎬 Created {len(created_videos)} video variations")
589
  status_log.append(f" 📥 Choose your favorite and download!")
590
 
591
  final_status = "\n".join(status_log)
@@ -601,7 +596,7 @@ def fallback_pipeline(niche, style, num_variations=1):
601
  status_log.append("🔄 **FALLBACK MODE (Direct Tool Execution)**\n")
602
 
603
  # Generate quote
604
- status_log.append("🧠 Generating quote...")
605
  quote = generate_quote_tool(niche, style)
606
 
607
  if "Error" in quote:
@@ -668,18 +663,24 @@ def fallback_pipeline(niche, style, num_variations=1):
668
  return "\n".join(status_log), created_videos
669
 
670
  # Gradio Interface
671
- with gr.Blocks(title="AIQuoteClipGenerator - MCP Edition", theme=gr.themes.Soft()) as demo:
672
  gr.Markdown("""
673
  # 🎬 AIQuoteClipGenerator
674
- ### MCP-Powered Autonomous AI Agent
675
 
676
- **MCP Integration Features:**
677
- - 🔗 **MCP Server:** Connected to smolagents framework
678
- - 🛠️ **3 Custom MCP Tools:** Quote generation + Video search + Video creation
 
679
  - 🤖 **Agent Reasoning:** Autonomous task execution
680
- - ⚡ **Tool Orchestration:** Intelligent pipeline management
681
- - 🚀 **Modal Processing:** 4-8x faster video creation
682
  - 🎨 **Multiple Variations:** Get different video styles
 
 
 
 
 
 
683
  """)
684
 
685
  # Example Gallery - Instagram-style grid
@@ -756,7 +757,7 @@ with gr.Blocks(title="AIQuoteClipGenerator - MCP Edition", theme=gr.themes.Soft(
756
  info="Generate multiple versions to choose from"
757
  )
758
 
759
- generate_btn = gr.Button("🤖 Run MCP Agent", variant="primary", size="lg")
760
 
761
  with gr.Column():
762
  gr.Markdown("### 📊 MCP Agent Activity Log")
@@ -773,22 +774,21 @@ with gr.Blocks(title="AIQuoteClipGenerator - MCP Edition", theme=gr.themes.Soft(
773
  gr.Markdown("""
774
  ---
775
  ### ✨ Features
 
776
  - 🎨 **Multiple Variations** - Get 1-3 different videos to choose from
777
  - ⚡ **Modal Processing** - 4-8x faster with serverless compute
778
- - 🎯 **3 MCP Tools** - Quote, Video Search, Video Creation
779
-
780
- ### ✨ MCP Implementation
781
- - ✅ **smolagents Framework** - Proper MCP integration
782
- - ✅ **Custom MCP Tools** - 3 tools working autonomously
783
- - ✅ **CodeAgent** - Autonomous reasoning and execution
784
- - ✅ **MCP Client** - Connected to external MCP servers
785
- - ✅ **MoviePy + PIL** - Professional text overlay
786
- - ✅ **Modal** - Fast serverless video processing
787
 
788
  ### 🏆 Hackathon: MCP 1st Birthday
789
  **Track:** Track 2 - MCP in Action
790
  **Category:** Productivity Tools
791
- **Built with:** Gradio + smolagents + OpenAI + Pexels + Modal + MCP
 
 
 
 
 
 
792
  """)
793
 
794
  def process_and_display(niche, style, num_variations):
 
12
  import numpy as np
13
  from elevenlabs import ElevenLabs, VoiceSettings
14
 
15
+ # Import our new Gemini quote generator
16
+ from quote_generator_gemini import HybridQuoteGenerator
17
+
18
  # Initialize clients
19
  openai_client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
20
  PEXELS_API_KEY = os.getenv("PEXELS_API_KEY")
21
  elevenlabs_client = ElevenLabs(api_key=os.getenv("ELEVENLABS_API_KEY"))
22
 
23
+ # Initialize Hybrid Quote Generator (Gemini + OpenAI fallback)
24
+ hybrid_quote_generator = HybridQuoteGenerator(
25
+ gemini_key=os.getenv("GEMINI_API_KEY"),
26
+ openai_client=openai_client
27
+ )
28
+
29
  # Initialize MCP Client (connecting to existing MCP server)
30
  try:
31
  mcp_client = MCPClient("https://abidlabs-mcp-tools.hf.space")
 
38
  @tool
39
  def generate_quote_tool(niche: str, style: str) -> str:
40
  """
41
+ Generate a powerful inspirational quote using Gemini AI with variety tracking.
42
+ Falls back to OpenAI if Gemini is unavailable.
43
 
44
  Args:
45
  niche: The category of quote (Motivation, Business, Fitness, etc.)
46
  style: The visual style (Cinematic, Nature, Urban, Minimal, Abstract)
47
 
48
  Returns:
49
+ A powerful, unique quote string
50
  """
51
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
52
  try:
53
+ result = hybrid_quote_generator.generate_quote(niche, style, prefer_gemini=True)
 
 
 
 
 
 
 
 
 
 
 
54
 
55
+ if result["success"]:
56
+ quote = result["quote"]
57
+ source = result["source"]
58
+
59
+ # Log which generator was used
60
+ if source == "gemini":
61
+ stats = result.get("stats", {})
62
+ print(f"✨ Generated with Gemini (Total: {stats.get('total_quotes_generated', 0)})")
63
+ else:
64
+ print(f"✨ Generated with OpenAI (fallback)")
65
+
66
+ return quote
67
+ else:
68
+ error_msg = result.get("error", "Unknown error")
69
+ return f"Error generating quote: {error_msg}"
70
 
71
  except Exception as e:
72
  return f"Error generating quote: {str(e)}"
 
480
  """
481
  MCP-POWERED AUTONOMOUS AGENT PIPELINE
482
  Uses smolagents with proper MCP server integration
483
+ Generates multiple video variations with Gemini-powered quotes
484
  """
485
 
486
  status_log = []
 
489
  if agent_error:
490
  status_log.append(f"❌ Agent initialization failed: {agent_error}")
491
  status_log.append("\n🔄 Falling back to direct tool execution...\n")
492
+ return fallback_pipeline(niche, style, num_variations)
493
 
494
  try:
495
  # STEP 1: Agent receives task
 
498
  status_log.append(f" → Create {num_variations} video variations")
499
  status_log.append("")
500
 
501
+ # STEP 2: Agent executes quote generation with Gemini
502
+ status_log.append("🧠 **GEMINI AI: generate_quote_tool**")
503
  quote = generate_quote_tool(niche, style)
504
 
505
  if "Error" in quote:
 
523
 
524
  status_log.append("")
525
 
526
+ # STEP 4: Create multiple video variations
527
  status_log.append(f"🎬 **MCP TOOL: create_quote_video_tool (x{len(video_results)})**")
528
  status_log.append(f" ⏳ Creating {len(video_results)} video variations...")
529
 
 
569
 
570
  status_log.append("")
571
 
572
+ # STEP 5: Integration status
573
+ status_log.append("🔗 **AI INTEGRATIONS:**")
574
+ status_log.append(" ✅ Gemini API - Quote generation with variety tracking")
575
+ status_log.append(" ✅ Pexels API - Video search")
576
+ status_log.append(" ✅ Modal Compute - Fast video processing")
577
  if mcp_enabled:
578
+ status_log.append(" ✅ MCP Server - abidlabs-mcp-tools.hf.space")
 
 
579
  status_log.append("")
580
 
581
+ # STEP 6: Success!
582
  status_log.append("✨ **PIPELINE COMPLETE!**")
583
+ status_log.append(f" 🎬 Created {len(created_videos)} unique video variations")
584
  status_log.append(f" 📥 Choose your favorite and download!")
585
 
586
  final_status = "\n".join(status_log)
 
596
  status_log.append("🔄 **FALLBACK MODE (Direct Tool Execution)**\n")
597
 
598
  # Generate quote
599
+ status_log.append("🧠 Generating quote with Gemini...")
600
  quote = generate_quote_tool(niche, style)
601
 
602
  if "Error" in quote:
 
663
  return "\n".join(status_log), created_videos
664
 
665
  # Gradio Interface
666
+ with gr.Blocks(title="AIQuoteClipGenerator - MCP + Gemini Edition", theme=gr.themes.Soft()) as demo:
667
  gr.Markdown("""
668
  # 🎬 AIQuoteClipGenerator
669
+ ### MCP-Powered with Gemini AI Integration
670
 
671
+ **Key Features:**
672
+ - 🌟 **Gemini AI:** No more repetitive quotes! Smart variety tracking
673
+ - 🔗 **MCP Server:** smolagents framework integration
674
+ - 🛠️ **4 Custom MCP Tools:** Quote (Gemini) + Video search + Voice + Video creation
675
  - 🤖 **Agent Reasoning:** Autonomous task execution
676
+ - ⚡ **Modal Processing:** 4-8x faster video creation
 
677
  - 🎨 **Multiple Variations:** Get different video styles
678
+
679
+ **Prize Eligibility:**
680
+ - ✅ Gemini API Integration ($10K Creative category)
681
+ - ✅ OpenAI Fallback (API Integration $1K credits)
682
+ - ✅ Modal Innovation Award ($2.5K)
683
+ - ✅ ElevenLabs Voice Award (~$2K + AirPods)
684
  """)
685
 
686
  # Example Gallery - Instagram-style grid
 
757
  info="Generate multiple versions to choose from"
758
  )
759
 
760
+ generate_btn = gr.Button("🤖 Run MCP Agent with Gemini", variant="primary", size="lg")
761
 
762
  with gr.Column():
763
  gr.Markdown("### 📊 MCP Agent Activity Log")
 
774
  gr.Markdown("""
775
  ---
776
  ### ✨ Features
777
+ - 🌟 **Gemini AI** - Eliminates repetitive quotes with smart history tracking
778
  - 🎨 **Multiple Variations** - Get 1-3 different videos to choose from
779
  - ⚡ **Modal Processing** - 4-8x faster with serverless compute
780
+ - 🎯 **4 MCP Tools** - Quote (Gemini), Video Search, Voice, Video Creation
 
 
 
 
 
 
 
 
781
 
782
  ### 🏆 Hackathon: MCP 1st Birthday
783
  **Track:** Track 2 - MCP in Action
784
  **Category:** Productivity Tools
785
+ **Built with:** Gradio + smolagents + Gemini + OpenAI + Pexels + Modal + ElevenLabs + MCP
786
+
787
+ **Prize Targets:**
788
+ - Google Gemini Creative Award ($10K)
789
+ - Modal Innovation Award ($2.5K)
790
+ - OpenAI API Integration ($1K credits)
791
+ - ElevenLabs Voice Award (~$2K + AirPods)
792
  """)
793
 
794
  def process_and_display(niche, style, num_variations):
quote_generator_gemini.py ADDED
@@ -0,0 +1,286 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Gemini-powered quote generator with variety tracking
3
+ Eliminates repetitive quotes by maintaining history
4
+ """
5
+ import google.generativeai as genai
6
+ import os
7
+ import json
8
+ import time
9
+ from typing import List, Optional
10
+
11
+ class QuoteGenerator:
12
+ """
13
+ Gemini-powered quote generator with built-in variety tracking.
14
+ Prevents repetitive quotes by maintaining history in persistent storage.
15
+ """
16
+
17
+ def __init__(self, api_key: Optional[str] = None, history_file: str = "/data/quote_history.json"):
18
+ """
19
+ Initialize with Gemini API.
20
+
21
+ Args:
22
+ api_key: Gemini API key (defaults to GEMINI_API_KEY env var)
23
+ history_file: Path to persistent quote history storage
24
+ """
25
+ api_key = api_key or os.getenv("GEMINI_API_KEY")
26
+ if not api_key:
27
+ raise ValueError("GEMINI_API_KEY not found in environment variables")
28
+
29
+ genai.configure(api_key=api_key)
30
+ self.model = genai.GenerativeModel('gemini-pro')
31
+
32
+ # Persistent storage for quote history
33
+ self.history_file = history_file
34
+ self.recent_quotes = self._load_history()
35
+ self.max_history = 100 # Keep last 100 quotes
36
+
37
+ def _load_history(self) -> List[str]:
38
+ """Load quote history from persistent storage"""
39
+ try:
40
+ if os.path.exists(self.history_file):
41
+ with open(self.history_file, 'r') as f:
42
+ data = json.load(f)
43
+ return data.get('quotes', [])
44
+ except Exception as e:
45
+ print(f"Could not load history: {e}")
46
+ return []
47
+
48
+ def _save_history(self):
49
+ """Save quote history to persistent storage"""
50
+ try:
51
+ os.makedirs(os.path.dirname(self.history_file), exist_ok=True)
52
+ with open(self.history_file, 'w') as f:
53
+ json.dump({'quotes': self.recent_quotes[-self.max_history:]}, f)
54
+ except Exception as e:
55
+ print(f"Could not save history: {e}")
56
+
57
+ def generate_quote(self, niche: str, style: str) -> str:
58
+ """
59
+ Generate a unique quote using Gemini with variety enforcement.
60
+
61
+ Args:
62
+ niche: Quote category (Motivation, Business, etc.)
63
+ style: Visual style (Cinematic, Nature, etc.)
64
+
65
+ Returns:
66
+ A unique quote string
67
+ """
68
+
69
+ # Get recent quotes to avoid (last 20)
70
+ recent_quotes_text = ""
71
+ if self.recent_quotes:
72
+ recent_quotes_text = "\n\nPREVIOUSLY GENERATED (DO NOT REPEAT OR PARAPHRASE THESE):\n"
73
+ for i, quote in enumerate(self.recent_quotes[-20:], 1):
74
+ recent_quotes_text += f"{i}. {quote}\n"
75
+
76
+ # Build prompt with variety instructions
77
+ prompt = f"""Generate a UNIQUE and powerful {niche} quote suitable for an Instagram/TikTok video.
78
+
79
+ Visual Style: {style}
80
+
81
+ CRITICAL REQUIREMENTS:
82
+ - 2-4 sentences maximum (can be longer if deeply meaningful)
83
+ - Must be COMPLETELY DIFFERENT from all previously generated quotes
84
+ - Inspirational and impactful
85
+ - Deep and meaningful insights
86
+ - Fresh perspective that viewers haven't heard before
87
+ - Unexpected wisdom or unique angle
88
+ - DO NOT use clichés or overused phrases
89
+
90
+ {recent_quotes_text}
91
+
92
+ IMPORTANT: Your quote must be COMPLETELY ORIGINAL and DIFFERENT in:
93
+ - Core message and theme
94
+ - Wording and phrasing
95
+ - Perspective and angle
96
+ - Examples or metaphors used
97
+
98
+ Return ONLY the quote text, nothing else. No quotation marks, no attribution."""
99
+
100
+ try:
101
+ # Add timestamp-based variation
102
+ seed_value = int(time.time() * 1000) % 10000
103
+
104
+ # Generate with high temperature for maximum variety
105
+ response = self.model.generate_content(
106
+ prompt,
107
+ generation_config={
108
+ "temperature": 1.0, # Maximum creativity
109
+ "top_p": 0.95,
110
+ "top_k": 64,
111
+ "max_output_tokens": 200,
112
+ }
113
+ )
114
+
115
+ quote = response.text.strip()
116
+ quote = quote.strip('"').strip("'").strip()
117
+
118
+ # Add to history and save
119
+ self.recent_quotes.append(quote)
120
+ self._save_history()
121
+
122
+ return quote
123
+
124
+ except Exception as e:
125
+ raise Exception(f"Gemini quote generation failed: {str(e)}")
126
+
127
+ def get_stats(self) -> dict:
128
+ """Get statistics about quote generation"""
129
+ return {
130
+ "total_quotes_generated": len(self.recent_quotes),
131
+ "unique_quotes": len(set(self.recent_quotes)),
132
+ "history_size": len(self.recent_quotes[-self.max_history:])
133
+ }
134
+
135
+ def clear_history(self):
136
+ """Clear quote history (use with caution)"""
137
+ self.recent_quotes = []
138
+ self._save_history()
139
+
140
+
141
+ # Hybrid generator with fallback
142
+ class HybridQuoteGenerator:
143
+ """
144
+ Hybrid system using Gemini as primary, OpenAI as fallback.
145
+ Maximizes prize eligibility while ensuring reliability.
146
+ """
147
+
148
+ def __init__(self, gemini_key: Optional[str] = None, openai_client = None):
149
+ """
150
+ Initialize hybrid generator.
151
+
152
+ Args:
153
+ gemini_key: Gemini API key
154
+ openai_client: OpenAI client instance (for fallback)
155
+ """
156
+ self.openai_client = openai_client
157
+
158
+ try:
159
+ self.gemini_generator = QuoteGenerator(api_key=gemini_key)
160
+ self.gemini_available = True
161
+ print("✅ Gemini generator initialized")
162
+ except Exception as e:
163
+ self.gemini_available = False
164
+ print(f"⚠️ Gemini not available: {e}")
165
+
166
+ def generate_quote(self, niche: str, style: str, prefer_gemini: bool = True) -> dict:
167
+ """
168
+ Generate quote with automatic fallback.
169
+
170
+ Args:
171
+ niche: Quote category
172
+ style: Visual style
173
+ prefer_gemini: Try Gemini first if True
174
+
175
+ Returns:
176
+ Dict with quote, source, and metadata
177
+ """
178
+
179
+ # Try Gemini first
180
+ if prefer_gemini and self.gemini_available:
181
+ try:
182
+ quote = self.gemini_generator.generate_quote(niche, style)
183
+ stats = self.gemini_generator.get_stats()
184
+ return {
185
+ "quote": quote,
186
+ "source": "gemini",
187
+ "stats": stats,
188
+ "success": True
189
+ }
190
+ except Exception as e:
191
+ print(f"⚠️ Gemini failed, falling back to OpenAI: {e}")
192
+
193
+ # Fallback to OpenAI
194
+ if self.openai_client:
195
+ try:
196
+ quote = self._generate_openai(niche, style)
197
+ return {
198
+ "quote": quote,
199
+ "source": "openai",
200
+ "stats": None,
201
+ "success": True
202
+ }
203
+ except Exception as e:
204
+ return {
205
+ "quote": None,
206
+ "source": None,
207
+ "error": f"Both generators failed: {str(e)}",
208
+ "success": False
209
+ }
210
+
211
+ return {
212
+ "quote": None,
213
+ "source": None,
214
+ "error": "No generator available",
215
+ "success": False
216
+ }
217
+
218
+ def _generate_openai(self, niche: str, style: str) -> str:
219
+ """OpenAI fallback generator"""
220
+ prompt = f"""Generate a UNIQUE and powerful {niche} quote suitable for an Instagram/TikTok video.
221
+
222
+ Style: {style}
223
+
224
+ Requirements:
225
+ - 2-4 sentences (can be longer)
226
+ - Inspirational and impactful
227
+ - Deep and meaningful
228
+ - Should resonate deeply with viewers
229
+ - Must be DIFFERENT from typical motivational quotes
230
+ - Add unexpected wisdom or fresh perspective
231
+
232
+ Return ONLY the quote text, nothing else."""
233
+
234
+ seed = int(time.time() * 1000) % 1000
235
+
236
+ response = self.openai_client.chat.completions.create(
237
+ model="gpt-4o-mini",
238
+ messages=[
239
+ {"role": "system", "content": f"You are a quote generator. Seed: {seed}"},
240
+ {"role": "user", "content": prompt}
241
+ ],
242
+ max_tokens=150,
243
+ temperature=0.9
244
+ )
245
+
246
+ quote = response.choices[0].message.content.strip()
247
+ return quote.strip('"').strip("'")
248
+
249
+
250
+ # Integration function for smolagents tool
251
+ def create_hybrid_generator(openai_client):
252
+ """
253
+ Create hybrid generator instance for use in app.
254
+ Call this once at startup.
255
+ """
256
+ return HybridQuoteGenerator(
257
+ gemini_key=os.getenv("GEMINI_API_KEY"),
258
+ openai_client=openai_client
259
+ )
260
+
261
+
262
+ # Example usage and testing
263
+ if __name__ == "__main__":
264
+ # Test Gemini generator
265
+ print("Testing Gemini Quote Generator with Variety Tracking\n")
266
+ print("="*60)
267
+
268
+ try:
269
+ generator = QuoteGenerator()
270
+
271
+ # Generate 5 quotes to show variety
272
+ print("\nGenerating 5 quotes about Motivation/Cinematic:\n")
273
+ for i in range(5):
274
+ quote = generator.generate_quote("Motivation", "Cinematic")
275
+ print(f"{i+1}. {quote}\n")
276
+
277
+ # Show stats
278
+ stats = generator.get_stats()
279
+ print("\nStats:")
280
+ print(f" Total generated: {stats['total_quotes_generated']}")
281
+ print(f" Unique quotes: {stats['unique_quotes']}")
282
+ print(f" History size: {stats['history_size']}")
283
+
284
+ except Exception as e:
285
+ print(f"Error: {e}")
286
+ print("\nMake sure GEMINI_API_KEY is set in environment variables")
requirements.txt CHANGED
@@ -11,4 +11,5 @@ proglog
11
  numpy
12
  Pillow
13
  elevenlabs
14
- modal
 
 
11
  numpy
12
  Pillow
13
  elevenlabs
14
+ modal
15
+ google-generativeai==0.8.3