admin08077 commited on
Commit
5f2cf90
·
verified ·
1 Parent(s): 5196f44

Update services/geminiService.ts

Browse files
Files changed (1) hide show
  1. services/geminiService.ts +100 -111
services/geminiService.ts CHANGED
@@ -1,12 +1,6 @@
1
 
2
- import { GoogleGenAI, Type, Modality } from "@google/genai";
3
  import { SimulationResult, AIInsight } from "../types/index";
4
 
5
- // Direct initialization as per instructions
6
- const getAI = () => new GoogleGenAI({ apiKey: process.env.API_KEY as string });
7
-
8
- export { Type, Modality };
9
-
10
  export const TTS_LANGUAGES = [
11
  { name: 'English', code: 'en' }, { name: 'French', code: 'fr' }, { name: 'German', code: 'de' },
12
  { name: 'Spanish', code: 'es' }, { name: 'Portuguese', code: 'pt' }, { name: 'Chinese', code: 'zh' },
@@ -18,20 +12,42 @@ export const TTS_VOICES = [
18
  { name: 'Kore', style: 'Firm' }, { name: 'Fenrir', style: 'Excitable' }, { name: 'Leda', style: 'Youthful' }
19
  ];
20
 
21
- function decodeBase64(base64: string) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  const binaryString = atob(base64);
23
- const bytes = new Uint8Array(binaryString.length);
24
- for (let i = 0; i < binaryString.length; i++) {
 
25
  bytes[i] = binaryString.charCodeAt(i);
26
  }
27
  return bytes;
28
  }
29
 
30
- async function decodeAudioData(data: Uint8Array, ctx: AudioContext, sampleRate: number, numChannels: number): Promise<AudioBuffer> {
31
- const byteLen = data.byteLength - (data.byteLength % 2);
32
- const dataInt16 = new Int16Array(data.buffer, 0, byteLen / 2);
 
 
 
 
 
33
  const frameCount = dataInt16.length / numChannels;
34
  const buffer = ctx.createBuffer(numChannels, frameCount, sampleRate);
 
35
  for (let channel = 0; channel < numChannels; channel++) {
36
  const channelData = buffer.getChannelData(channel);
37
  for (let i = 0; i < frameCount; i++) {
@@ -41,141 +57,114 @@ async function decodeAudioData(data: Uint8Array, ctx: AudioContext, sampleRate:
41
  return buffer;
42
  }
43
 
44
- let audioContext: AudioContext | null = null;
45
- export const getAudioContext = () => {
46
- if (!audioContext) {
47
- audioContext = new (window.AudioContext || (window as any).webkitAudioContext)({ sampleRate: 24000 });
48
- }
49
- return audioContext;
50
- };
51
-
52
- // fix: Added support for multi-speaker synthesis and updated config type signature
53
- export const synthesizeSpeech = async (config: {
54
- text: string,
55
- voiceName: string,
56
- directorNotes?: string,
57
- multiSpeaker?: { speaker1: string, voice1: string, speaker2: string, voice2: string }
58
  }) => {
59
  try {
60
- const ai = getAI();
61
- const promptText = config.directorNotes ? `${config.directorNotes} ${config.text}` : config.text;
62
 
63
- // fix: Define speechConfig based on presence of multi-speaker configuration
64
- const speechConfig: any = config.multiSpeaker ? {
65
- multiSpeakerVoiceConfig: {
66
- speakerVoiceConfigs: [
67
- {
68
- speaker: config.multiSpeaker.speaker1,
69
- voiceConfig: { prebuiltVoiceConfig: { voiceName: config.multiSpeaker.voice1 } }
70
- },
71
- {
72
- speaker: config.multiSpeaker.speaker2,
73
- voiceConfig: { prebuiltVoiceConfig: { voiceName: config.multiSpeaker.voice2 } }
74
- }
75
- ]
76
- }
77
- } : {
78
- voiceConfig: { prebuiltVoiceConfig: { voiceName: config.voiceName } }
79
- };
80
-
81
- const response = await ai.models.generateContent({
82
- model: "gemini-2.5-flash-preview-tts",
83
- contents: [{ parts: [{ text: promptText }] }],
84
- config: {
85
- responseModalities: [Modality.AUDIO],
86
- speechConfig
87
- }
 
 
88
  });
89
- const base64Audio = response.candidates?.[0]?.content?.parts?.[0]?.inlineData?.data;
 
 
 
90
  if (base64Audio) {
91
- const ctx = getAudioContext();
92
- if (ctx.state === 'suspended') await ctx.resume();
93
- const audioBuffer = await decodeAudioData(decodeBase64(base64Audio), ctx, 24000, 1);
94
- const source = ctx.createBufferSource();
95
  source.buffer = audioBuffer;
96
- source.connect(ctx.destination);
97
  source.start();
98
  return true;
99
  }
 
100
  } catch (error) {
101
- console.error("Advanced Synthesis failure:", error);
 
102
  }
103
- return false;
104
  };
105
 
106
- export const speakText = async (text: string) => synthesizeSpeech({ text, voiceName: 'Zephyr' });
107
-
108
- export const callGemini = async (model: string, contents: any, config: any = {}) => {
109
- const ai = getAI();
110
- const normalizedContents = typeof contents === 'string' ? [{ parts: [{ text: contents }] }] :
111
- (Array.isArray(contents) ? contents : [contents]);
112
- return await ai.models.generateContent({
113
- model: model || 'gemini-3-flash-preview',
114
- contents: normalizedContents,
115
- config
116
- });
117
  };
118
 
119
  export const processVoiceCommand = async (command: string) => {
120
  try {
121
- const prompt = `You are the Lumina Neural Parser. Analyze: "${command}". Extract amount, recipient, category. Return ONLY JSON: { "action": "SEND_MONEY", "amount": number, "recipient": string, "category": string, "narration": "Confirming dispatch..." }`;
122
- const response = await callGemini('gemini-3-flash-preview', prompt, { responseMimeType: "application/json" });
123
- return JSON.parse(response.text || '{}');
124
  } catch (error) {
125
- return { action: "ERROR", narration: "Communication link unstable." };
126
  }
127
  };
128
 
129
  export const getFinancialAdviceStream = async (query: string, context: any) => {
130
- const ai = getAI();
131
- return await ai.models.generateContentStream({
132
- model: 'gemini-3-flash-preview',
133
- contents: [{ parts: [{ text: `Context: ${JSON.stringify(context)}. User Query: ${query}` }] }],
134
- config: { systemInstruction: "You are the Lumina Quantum Financial Advisor. Be professional, concise, and technically accurate." }
135
- });
136
  };
137
 
138
- // fix: Implemented getSystemIntelligenceFeed missing in services/geminiService.ts
139
  export const getSystemIntelligenceFeed = async (): Promise<AIInsight[]> => {
140
  try {
141
- const ai = getAI();
142
- const response = await ai.models.generateContent({
143
- model: 'gemini-3-flash-preview',
144
- contents: [{ parts: [{ text: "Generate 4 brief institutional financial intelligence alerts for a quantum ledger. Format as JSON array: [{title, description, severity: 'INFO'|'CRITICAL'}]" }] }],
145
- config: { responseMimeType: "application/json" }
146
- });
147
- return JSON.parse(response.text || '[]');
148
- } catch (error) {
149
- console.error("Intelligence feed failure:", error);
150
- return [
151
- { id: '1', title: "Node Sync Active", description: "All global registry nodes reporting stable parity.", severity: "INFO" }
152
- ];
153
  }
154
  };
155
 
156
  export const runSimulationForecast = async (prompt: string): Promise<SimulationResult> => {
157
  try {
158
- const ai = getAI();
159
- const response = await ai.models.generateContent({
160
- model: 'gemini-3-flash-preview',
161
- contents: [{ parts: [{ text: `Perform financial simulation for: ${prompt}. Return JSON.` }] }],
162
- config: { responseMimeType: "application/json" }
163
- });
164
- return JSON.parse(response.text || '{}');
165
- } catch (error) {
166
- return { outcomeNarrative: "Simulation failed.", projectedValue: 0, confidenceScore: 0, status: "ERROR", simulationId: "ERR_A1" };
167
  }
168
  };
169
 
170
  export const getPortfolioSuggestions = async (context: any) => {
171
  try {
172
- const ai = getAI();
173
- const response = await ai.models.generateContent({
174
- model: 'gemini-3-flash-preview',
175
- contents: [{ parts: [{ text: `Strategize for: ${JSON.stringify(context)}. Return 3 strategies as JSON array.` }] }],
176
- config: { responseMimeType: "application/json" }
177
- });
178
- return JSON.parse(response.text || '[]');
179
  } catch {
180
  return [];
181
  }
 
1
 
 
2
  import { SimulationResult, AIInsight } from "../types/index";
3
 
 
 
 
 
 
4
  export const TTS_LANGUAGES = [
5
  { name: 'English', code: 'en' }, { name: 'French', code: 'fr' }, { name: 'German', code: 'de' },
6
  { name: 'Spanish', code: 'es' }, { name: 'Portuguese', code: 'pt' }, { name: 'Chinese', code: 'zh' },
 
12
  { name: 'Kore', style: 'Firm' }, { name: 'Fenrir', style: 'Excitable' }, { name: 'Leda', style: 'Youthful' }
13
  ];
14
 
15
+ export const callGemini = async (model: string, contents: any, config: any = {}) => {
16
+ const normalizedContents = typeof contents === 'string' ? [{ parts: [{ text: contents }] }] :
17
+ (Array.isArray(contents) ? contents : [contents]);
18
+
19
+ const response = await fetch('/api/gemini/generate', {
20
+ method: 'POST',
21
+ headers: { 'Content-Type': 'application/json' },
22
+ body: JSON.stringify({ model, contents: normalizedContents, config })
23
+ });
24
+
25
+ if (!response.ok) throw new Error('AI Bridge Failed');
26
+ return await response.json();
27
+ };
28
+
29
+ // Helper function for base64 decoding (manual implementation as per guidelines)
30
+ function decode(base64: string) {
31
  const binaryString = atob(base64);
32
+ const len = binaryString.length;
33
+ const bytes = new Uint8Array(len);
34
+ for (let i = 0; i < len; i++) {
35
  bytes[i] = binaryString.charCodeAt(i);
36
  }
37
  return bytes;
38
  }
39
 
40
+ // Helper function for audio decoding (manual implementation as per guidelines)
41
+ async function decodeAudioData(
42
+ data: Uint8Array,
43
+ ctx: AudioContext,
44
+ sampleRate: number,
45
+ numChannels: number,
46
+ ): Promise<AudioBuffer> {
47
+ const dataInt16 = new Int16Array(data.buffer);
48
  const frameCount = dataInt16.length / numChannels;
49
  const buffer = ctx.createBuffer(numChannels, frameCount, sampleRate);
50
+
51
  for (let channel = 0; channel < numChannels; channel++) {
52
  const channelData = buffer.getChannelData(channel);
53
  for (let i = 0; i < frameCount; i++) {
 
57
  return buffer;
58
  }
59
 
60
+ /**
61
+ * Synthesizes speech using gemini-2.5-flash-preview-tts
62
+ */
63
+ export const synthesizeSpeech = async (params: {
64
+ text: string;
65
+ voiceName: string;
66
+ directorNotes?: string;
67
+ multiSpeaker?: {
68
+ speaker1: string;
69
+ voice1: string;
70
+ speaker2: string;
71
+ voice2: string;
72
+ };
 
73
  }) => {
74
  try {
75
+ const prompt = params.directorNotes ? `${params.directorNotes} Text: ${params.text}` : params.text;
 
76
 
77
+ let speechConfig: any = {};
78
+ if (params.multiSpeaker) {
79
+ speechConfig = {
80
+ multiSpeakerVoiceConfig: {
81
+ speakerVoiceConfigs: [
82
+ {
83
+ speaker: params.multiSpeaker.speaker1,
84
+ voiceConfig: { prebuiltVoiceConfig: { voiceName: params.multiSpeaker.voice1 } }
85
+ },
86
+ {
87
+ speaker: params.multiSpeaker.speaker2,
88
+ voiceConfig: { prebuiltVoiceConfig: { voiceName: params.multiSpeaker.voice2 } }
89
+ }
90
+ ]
91
+ }
92
+ };
93
+ } else {
94
+ speechConfig = {
95
+ voiceConfig: {
96
+ prebuiltVoiceConfig: { voiceName: params.voiceName },
97
+ },
98
+ };
99
+ }
100
+
101
+ const data = await callGemini('gemini-2.5-flash-preview-tts', prompt, {
102
+ responseModalities: ["AUDIO"],
103
+ speechConfig
104
  });
105
+
106
+ // Extract binary audio data from response candidates
107
+ const base64Audio = data.candidates?.[0]?.content?.parts?.find((p: any) => p.inlineData)?.inlineData?.data;
108
+
109
  if (base64Audio) {
110
+ const audioContext = new (window.AudioContext || (window as any).webkitAudioContext)({ sampleRate: 24000 });
111
+ const audioBuffer = await decodeAudioData(decode(base64Audio), audioContext, 24000, 1);
112
+ const source = audioContext.createBufferSource();
 
113
  source.buffer = audioBuffer;
114
+ source.connect(audioContext.destination);
115
  source.start();
116
  return true;
117
  }
118
+ return false;
119
  } catch (error) {
120
+ console.error("Speech synthesis failed:", error);
121
+ return false;
122
  }
 
123
  };
124
 
125
+ export const speakText = async (text: string) => {
126
+ // Use synthesizeSpeech with default voice
127
+ return synthesizeSpeech({ text, voiceName: 'Zephyr' });
 
 
 
 
 
 
 
 
128
  };
129
 
130
  export const processVoiceCommand = async (command: string) => {
131
  try {
132
+ const prompt = `Analyze: "${command}". Return JSON: { "action": "SEND_MONEY", "amount": number, "recipient": string, "category": string, "narration": "Confirming..." }`;
133
+ const data = await callGemini('gemini-3-flash-preview', prompt, { responseMimeType: "application/json" });
134
+ return JSON.parse(data.text || '{}');
135
  } catch (error) {
136
+ return { action: "ERROR", narration: "Link unstable." };
137
  }
138
  };
139
 
140
  export const getFinancialAdviceStream = async (query: string, context: any) => {
141
+ // Non-streaming fallback for proxy
142
+ const data = await callGemini('gemini-3-flash-preview', `Context: ${JSON.stringify(context)}. User: ${query}`);
143
+ return [{ text: data.text }];
 
 
 
144
  };
145
 
 
146
  export const getSystemIntelligenceFeed = async (): Promise<AIInsight[]> => {
147
  try {
148
+ const data = await callGemini('gemini-3-flash-preview', "Generate 4 brief alerts JSON: [{title, description, severity: 'INFO'|'CRITICAL'}]", { responseMimeType: "application/json" });
149
+ return JSON.parse(data.text || '[]');
150
+ } catch {
151
+ return [{ id: '1', title: "Node Sync Active", description: "Operational parity.", severity: "INFO" }];
 
 
 
 
 
 
 
 
152
  }
153
  };
154
 
155
  export const runSimulationForecast = async (prompt: string): Promise<SimulationResult> => {
156
  try {
157
+ const data = await callGemini('gemini-3-flash-preview', `Simulate: ${prompt}. Return JSON.`, { responseMimeType: "application/json" });
158
+ return JSON.parse(data.text || '{}');
159
+ } catch {
160
+ return { outcomeNarrative: "Failed.", projectedValue: 0, confidenceScore: 0, status: "ERROR", simulationId: "ERR_A1" };
 
 
 
 
 
161
  }
162
  };
163
 
164
  export const getPortfolioSuggestions = async (context: any) => {
165
  try {
166
+ const data = await callGemini('gemini-3-flash-preview', `Strategize: ${JSON.stringify(context)}. JSON array of 3 strategies.`, { responseMimeType: "application/json" });
167
+ return JSON.parse(data.text || '[]');
 
 
 
 
 
168
  } catch {
169
  return [];
170
  }