aarnal80 commited on
Commit
de7af8e
verified
1 Parent(s): 2a7dc49

Update js/iaConfigModule.js

Browse files
Files changed (1) hide show
  1. js/iaConfigModule.js +197 -249
js/iaConfigModule.js CHANGED
@@ -1,277 +1,225 @@
1
- // js/iaConfigModule.js
2
- const defaultConfig = {
3
- llm: {
4
- provider: "deepseek",
5
- apiKeys: { deepseek: "", openai: "" },
6
- model: "deepseek-chat"
7
- },
8
- transcription: {
9
- provider: "openai",
10
- apiKeys: { openai: "", deepgram: "" },
11
- models: { openai: "whisper-1", deepgram: "nova-2" }
12
- }
13
- };
14
-
15
- // Lista de modelos actualizada (2025)
16
- export const llmProviders = [
17
- {
18
- name: "OpenAI",
19
- value: "openai",
20
- models: [
21
- // GPT-5 (IDs habituales)
22
- "gpt-5",
23
- "gpt-5-mini",
24
- "gpt-5-nano",
25
- "gpt-5-chat-latest",
26
- // Otros modelos de referencia
27
- "gpt-4o-mini-2024-07-18",
28
- "chatgpt-4o-latest",
29
- "o1-mini-2024-09-12",
30
- "o4-mini-2025-04-16"
31
- ],
32
- url: "https://api.openai.com"
33
- },
34
- {
35
- name: "DeepSeek",
36
- value: "deepseek",
37
- models: ["deepseek-chat", "deepseek-reasoner"],
38
- url: "https://api.deepseek.com"
39
- }
40
- ];
41
 
42
- export const transcriptionProviders = [
43
- { name: "OpenAI Whisper", value: "openai", models: ["whisper-1"], url: "https://api.openai.com" },
44
- { name: "Deepgram", value: "deepgram", models: ["nova-2", "whisper-large"], url: "https://api.deepgram.com" }
45
- ];
46
-
47
- function saveConfig(config) {
48
- localStorage.setItem("iaConfig", JSON.stringify(config));
49
  }
50
 
51
- function loadConfig() {
52
- const config = JSON.parse(localStorage.getItem("iaConfig")) || structuredClone(defaultConfig);
53
-
54
- // Migrar configuraci贸n antigua de transcription
55
- if (config.transcription.apiKey !== undefined) {
56
- const oldKey = config.transcription.apiKey;
57
- const oldModel = config.transcription.model;
58
- config.transcription.apiKeys = { [config.transcription.provider]: oldKey, deepgram: "" };
59
- config.transcription.models = { [config.transcription.provider]: oldModel, deepgram: "nova-2" };
60
- delete config.transcription.apiKey;
61
- delete config.transcription.model;
62
- saveConfig(config);
 
 
63
  }
 
64
 
65
- // Migrar configuraci贸n antigua de LLM apiKey a apiKeys
66
- if (config.llm.apiKey !== undefined) {
67
- const old = config.llm.apiKey;
68
- config.llm.apiKeys = { ...defaultConfig.llm.apiKeys, [config.llm.provider]: old };
69
- delete config.llm.apiKey;
70
- saveConfig(config);
71
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
72
 
73
- // Migrar modelos obsoletos de DeepSeek a 'deepseek-chat'
74
- if (config.llm.provider === "deepseek" && (config.llm.model === "deepseek-v3" || config.llm.model === "deepseek-llm")) {
75
- config.llm.model = "deepseek-chat";
76
- console.log("[iaConfigModule] Migrado modelo DeepSeek a deepseek-chat");
77
- saveConfig(config);
78
- }
79
 
80
- // Migrar IDs antiguos inventados a los oficiales de GPT-5
81
- if (config.llm.provider === "openai") {
82
- if (config.llm.model === "gpt-5-2025-05-01") config.llm.model = "gpt-5";
83
- if (config.llm.model === "gpt-5-mini-2025-05-01") config.llm.model = "gpt-5-mini";
84
- saveConfig(config);
85
  }
86
 
87
- return config;
88
- }
 
89
 
90
- export function getIaConfig() {
91
- return loadConfig();
92
  }
93
 
94
- export function renderIaConfigForm(containerId) {
95
- let config = loadConfig();
96
- const container = document.getElementById(containerId);
97
- if (!container) {
98
- console.error(`[iaConfigModule] No se encontr贸 el contenedor '${containerId}'`);
99
- document.body.insertAdjacentHTML(
100
- "beforeend",
101
- `<div style='color:red'>[Error] No se encontr贸 el contenedor '${containerId}' para la configuraci贸n IA.</div>`
102
- );
103
- return;
104
- }
105
 
106
- function maskApiKey(key) {
107
- if (!key) return "";
108
- if (key.length <= 8) return "*".repeat(key.length);
109
- return key.substring(0, 3) + "-****" + key.slice(-4);
 
110
  }
111
 
112
- container.innerHTML = `
113
- <div class="flex justify-between items-center mb-6 border-b pb-2 border-blue-100">
114
- <h2 class="text-xl font-bold text-blue-700 flex items-center">
115
- <i class='fas fa-cogs mr-2'></i>Configurar Proveedores IA
116
- </h2>
117
- <button id="btnCloseConfig" type="button" class="text-gray-500 hover:text-blue-600 text-2xl focus:outline-none" aria-label="Cerrar">
118
- <i class="fas fa-times"></i>
119
- </button>
120
- </div>
121
- <form id="iaConfigForm" class="space-y-6">
122
- <div class="bg-blue-50 p-4 rounded-lg border border-blue-100 mb-2">
123
- <label class="block font-semibold text-blue-800 mb-2">Proveedor LLM</label>
124
- <select id="llmProvider" class="w-full mb-3 p-2 rounded border border-gray-300 focus:ring-2 focus:ring-blue-300">
125
- ${llmProviders.map(p => `<option value="${p.value}">${p.name}</option>`).join("")}
126
- </select>
127
- <div class="flex items-center mb-3">
128
- <input type="password" id="llmApiKey" class="flex-1 p-2 rounded border border-gray-300 mr-2 bg-gray-100" placeholder="API Key LLM" autocomplete="off">
129
- <button class="text-blue-700 hover:text-blue-900 px-3 py-2 rounded focus:outline-none border border-blue-200 bg-white" type="button" id="toggleLlmApiKey">
130
- <i class="fas fa-eye"></i>
131
- </button>
132
- </div>
133
- <select id="llmModel" class="w-full p-2 rounded border border-gray-300 focus:ring-2 focus:ring-blue-300"></select>
134
- </div>
135
- <div class="bg-purple-50 p-4 rounded-lg border border-purple-100 mb-2">
136
- <label class="block font-semibold text-purple-800 mb-2">Proveedor Transcripci贸n</label>
137
- <select id="transProvider" class="w-full mb-3 p-2 rounded border border-gray-300 focus:ring-2 focus:ring-purple-300">
138
- ${transcriptionProviders.map(p => `<option value="${p.value}">${p.name}</option>`).join("")}
139
- </select>
140
- <div class="flex items-center mb-3">
141
- <input type="password" id="transApiKey" class="flex-1 p-2 rounded border border-gray-300 mr-2 bg-gray-100" placeholder="API Key Transcripci贸n" autocomplete="off">
142
- <button class="text-purple-700 hover:text-purple-900 px-3 py-2 rounded focus:outline-none border border-purple-200 bg-white" type="button" id="toggleTransApiKey">
143
- <i class="fas fa-eye"></i>
144
- </button>
145
- </div>
146
- <select id="transModel" class="w-full p-2 rounded border border-gray-300 focus:ring-2 focus:ring-purple-300"></select>
147
- </div>
148
- <button type="submit" class="w-full bg-blue-600 hover:bg-blue-700 text-white font-semibold py-3 rounded-lg shadow transition-colors flex items-center justify-center text-lg">
149
- <i class="fas fa-save mr-2"></i>Guardar configuraci贸n
150
- </button>
151
- </form>
152
- `;
153
-
154
- // Bot贸n de cerrar modal
155
- const closeBtn = document.getElementById("btnCloseConfig");
156
- if (closeBtn) {
157
- closeBtn.addEventListener("click", () => {
158
- const modal = document.getElementById("configModal");
159
- if (modal) modal.classList.remove("active");
160
- });
161
- }
162
 
163
- // Set initial values
164
- document.getElementById("llmProvider").value = config.llm.provider;
165
- document.getElementById("llmApiKey").value = maskApiKey(config.llm.apiKeys[config.llm.provider] || "");
166
- document.getElementById("transProvider").value = config.transcription.provider;
167
- document.getElementById("transApiKey").value = maskApiKey(config.transcription.apiKeys[config.transcription.provider] || "");
168
 
169
- // API Key toggle (ver/ocultar)
170
- document.getElementById("toggleLlmApiKey").addEventListener("click", () => {
171
- const input = document.getElementById("llmApiKey");
172
- input.type = input.type === "password" ? "text" : "password";
173
- });
174
- document.getElementById("toggleTransApiKey").addEventListener("click", () => {
175
- const input = document.getElementById("transApiKey");
176
- input.type = input.type === "password" ? "text" : "password";
 
 
 
 
177
  });
178
 
179
- // Populate models
180
- function updateLlmModels() {
181
- const prov = document.getElementById("llmProvider").value;
182
- const providerObj = llmProviders.find(p => p.value === prov);
183
- const models = providerObj.models;
184
- const sel = document.getElementById("llmModel");
185
- sel.innerHTML = models.map(m => `<option value="${m}">${m}</option>`).join("");
186
-
187
- // Si el modelo guardado no est谩 en la lista del proveedor, usar el primero y persistir
188
- if (!models.includes(config.llm.model)) {
189
- config.llm.model = models[0];
190
- saveConfig(config);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
191
  }
192
- sel.value = config.llm.model;
193
- }
194
 
195
- function updateTransModels() {
196
- const prov = document.getElementById("transProvider").value;
197
- const providerObj = transcriptionProviders.find(p => p.value === prov);
198
- const models = providerObj.models;
199
- const sel = document.getElementById("transModel");
200
- sel.innerHTML = models.map(m => `<option value="${m}">${m}</option>`).join("");
201
- sel.value = config.transcription.models[prov] || models[0];
202
- // Actualizar API Key input al cambiar proveedor
203
- document.getElementById("transApiKey").value = maskApiKey(config.transcription.apiKeys[prov] || "");
204
  }
205
 
206
- document.getElementById("llmProvider").addEventListener("change", () => {
207
- const p = document.getElementById("llmProvider").value;
208
- updateLlmModels();
209
- // Refrescar config con todas las llaves y mostrar la del proveedor seleccionado
210
- const fresh = loadConfig();
211
- const keyEl = document.getElementById("llmApiKey");
212
- if (keyEl) keyEl.value = maskApiKey(fresh.llm.apiKeys[p] || "");
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
213
  });
214
 
215
- document.getElementById("transProvider").addEventListener("change", updateTransModels);
216
-
217
- updateLlmModels();
218
- updateTransModels();
219
-
220
- // Save on submit
221
- document.getElementById("iaConfigForm").addEventListener("submit", e => {
222
- e.preventDefault();
223
- // Persistir configuraci贸n por proveedor
224
- const prev = config;
225
- const newConfig = { ...prev };
226
-
227
- // LLM: provider, apiKeys y model
228
- const llProv = document.getElementById("llmProvider").value;
229
- const rawKey = document.getElementById("llmApiKey").value;
230
- const oldKey = prev.llm.apiKeys[llProv] || "";
231
- const newKey = rawKey === maskApiKey(oldKey) ? oldKey : rawKey;
232
-
233
- newConfig.llm = { ...prev.llm, provider: llProv, model: document.getElementById("llmModel").value };
234
- newConfig.llm.apiKeys = { ...prev.llm.apiKeys, [llProv]: newKey };
235
-
236
- // Transcription: provider, apiKeys y models por proveedor
237
- const tp = document.getElementById("transProvider").value;
238
- const rawKeyTrans = document.getElementById("transApiKey").value;
239
- const existingKeyTrans = prev.transcription.apiKeys[tp] || "";
240
- const actualKeyTrans = rawKeyTrans === maskApiKey(existingKeyTrans) ? existingKeyTrans : rawKeyTrans;
241
-
242
- newConfig.transcription.provider = tp;
243
- newConfig.transcription.apiKeys = { ...prev.transcription.apiKeys, [tp]: actualKeyTrans };
244
- newConfig.transcription.models = { ...prev.transcription.models, [tp]: document.getElementById("transModel").value };
245
-
246
- console.log("[iaConfigModule] Configuraci贸n guardada:", newConfig);
247
- saveConfig(newConfig);
248
- // Actualizar config local despu茅s de guardar
249
- config = newConfig;
250
-
251
- // Notificar cambio
252
- document.dispatchEvent(new CustomEvent("iaConfigChanged"));
253
-
254
- // Ofuscar de nuevo las API keys en UI
255
- document.getElementById("llmApiKey").value = maskApiKey(newConfig.llm.apiKeys[newConfig.llm.provider] || "");
256
- document.getElementById("transApiKey").value = maskApiKey(newConfig.transcription.apiKeys[tp] || "");
257
- document.getElementById("llmApiKey").type = "password";
258
- document.getElementById("transApiKey").type = "password";
259
-
260
- // Mensaje de 茅xito discreto
261
- let msg = document.getElementById("iaConfigSavedMsg");
262
- if (!msg) {
263
- msg = document.createElement("div");
264
- msg.id = "iaConfigSavedMsg";
265
- msg.className = "fixed left-1/2 top-6 -translate-x-1/2 bg-green-500 text-white px-6 py-3 rounded shadow text-lg z-50";
266
- msg.innerHTML = '<i class="fas fa-check-circle mr-2"></i>隆Configuraci贸n guardada!';
267
- document.body.appendChild(msg);
268
- } else {
269
- msg.style.display = "block";
270
  }
271
- setTimeout(() => { msg.style.display = "none"; }, 2000);
 
272
 
273
- // Cierra el modal
274
- const modal = document.getElementById("configModal");
275
- if (modal) modal.classList.remove("active");
276
- });
 
277
  }
 
1
+ // js/llmClient.js
2
+ import { getIaConfig, llmProviders } from "./iaConfigModule.js";
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
 
4
+ /** Detecta familia GPT-5 (incluye submodelos y alias) */
5
+ function isGpt5(model = "") {
6
+ const m = String(model || "").toLowerCase();
7
+ return m.startsWith("gpt-5");
 
 
 
8
  }
9
 
10
+ /** Borrado profundo de claves en cualquier nivel */
11
+ function deepDeleteKeys(obj, keys = []) {
12
+ if (!obj || typeof obj !== "object") return;
13
+ for (const k of Object.keys(obj)) {
14
+ if (keys.includes(k)) {
15
+ delete obj[k];
16
+ continue;
17
+ }
18
+ const val = obj[k];
19
+ if (Array.isArray(val)) {
20
+ for (const item of val) deepDeleteKeys(item, keys);
21
+ } else if (val && typeof val === "object") {
22
+ deepDeleteKeys(val, keys);
23
+ }
24
  }
25
+ }
26
 
27
+ /** Sanea el payload para modelos GPT-5: elimina params no soportados */
28
+ function sanitizeForGpt5(payload) {
29
+ // Claves creativas y afines que algunos SDKs/flows reinyectan
30
+ const toStrip = [
31
+ "temperature",
32
+ "top_p",
33
+ "frequency_penalty",
34
+ "presence_penalty",
35
+ "best_of",
36
+ "n",
37
+ "logit_bias",
38
+ // Algunas libs anidan config aqu铆:
39
+ "generation_config"
40
+ ];
41
+ deepDeleteKeys(payload, toStrip);
42
+
43
+ // Si exist铆a generation_config, aseg煤rate de borrarlo entero
44
+ if (payload.generation_config) delete payload.generation_config;
45
+
46
+ return payload;
47
+ }
48
 
49
+ /** Construye body para Chat Completions (OpenAI) */
50
+ function buildOpenAIChatBody(model, messages, options = {}) {
51
+ const body = { model, messages };
 
 
 
52
 
53
+ if (!isGpt5(model)) {
54
+ if (typeof options.temperature === "number") body.temperature = options.temperature;
55
+ if (typeof options.top_p === "number") body.top_p = options.top_p;
56
+ if (typeof options.frequency_penalty === "number") body.frequency_penalty = options.frequency_penalty;
57
+ if (typeof options.presence_penalty === "number") body.presence_penalty = options.presence_penalty;
58
  }
59
 
60
+ if (options.response_format) body.response_format = options.response_format;
61
+ if (options.tools) body.tools = options.tools;
62
+ if (options.tool_choice) body.tool_choice = options.tool_choice;
63
 
64
+ return isGpt5(model) ? sanitizeForGpt5(body) : body;
 
65
  }
66
 
67
+ /** Construye body para Responses API (por si la usas en otro flujo) */
68
+ function buildOpenAIResponsesBody(model, input, options = {}) {
69
+ const body = { model, input };
 
 
 
 
 
 
 
 
70
 
71
+ if (!isGpt5(model)) {
72
+ if (typeof options.temperature === "number") body.temperature = options.temperature;
73
+ if (typeof options.top_p === "number") body.top_p = options.top_p;
74
+ if (typeof options.frequency_penalty === "number") body.frequency_penalty = options.frequency_penalty;
75
+ if (typeof options.presence_penalty === "number") body.presence_penalty = options.presence_penalty;
76
  }
77
 
78
+ if (options.response_format) body.response_format = options.response_format;
79
+ if (options.tools) body.tools = options.tools;
80
+ if (options.tool_choice) body.tool_choice = options.tool_choice;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81
 
82
+ return isGpt5(model) ? sanitizeForGpt5(body) : body;
83
+ }
 
 
 
84
 
85
+ /** POST con reintento si hay error de par谩metros no soportados */
86
+ async function hardenedPost(url, apiKey, body, model) {
87
+ // Sanea una vez m谩s justo antes de enviar
88
+ if (isGpt5(model)) sanitizeForGpt5(body);
89
+
90
+ let res = await fetch(url, {
91
+ method: "POST",
92
+ headers: {
93
+ "Authorization": `Bearer ${apiKey}`,
94
+ "Content-Type": "application/json"
95
+ },
96
+ body: JSON.stringify(body)
97
  });
98
 
99
+ if (!res.ok) {
100
+ let errJson = null, txt = "";
101
+ try { errJson = await res.json(); } catch { txt = await res.text(); }
102
+
103
+ const msg = errJson?.error?.message || txt || "";
104
+ const code = errJson?.error?.code || "";
105
+ const unsupported =
106
+ msg.includes("Unsupported value") ||
107
+ msg.includes("does not support") ||
108
+ code === "unsupported_value";
109
+
110
+ if (unsupported) {
111
+ // Reintento: payload m铆nimo (sin ning煤n creativo), ultra-saneado
112
+ const minimal = isGpt5(model)
113
+ ? sanitizeForGpt5({ ...body, model })
114
+ : { ...body, model };
115
+ delete minimal.temperature;
116
+ delete minimal.top_p;
117
+ delete minimal.frequency_penalty;
118
+ delete minimal.presence_penalty;
119
+ delete minimal.best_of;
120
+ delete minimal.n;
121
+ delete minimal.logit_bias;
122
+ delete minimal.generation_config;
123
+
124
+ res = await fetch(url, {
125
+ method: "POST",
126
+ headers: {
127
+ "Authorization": `Bearer ${apiKey}`,
128
+ "Content-Type": "application/json"
129
+ },
130
+ body: JSON.stringify(minimal)
131
+ });
132
  }
 
 
133
 
134
+ if (!res.ok) {
135
+ const finalTxt = await res.text();
136
+ throw new Error(`OpenAI error ${res.status}: ${finalTxt}`);
137
+ }
 
 
 
 
 
138
  }
139
 
140
+ return res.json();
141
+ }
142
+
143
+ /** Chat Completions */
144
+ async function callOpenAIChat({ apiKey, model, messages, options }) {
145
+ const providerInfo = llmProviders.find(p => p.value === "openai");
146
+ const base = providerInfo?.url || "https://api.openai.com";
147
+ const url = `${base}/v1/chat/completions`;
148
+
149
+ const body = buildOpenAIChatBody(model, messages, options);
150
+ const data = await hardenedPost(url, apiKey, body, model);
151
+ const text = data?.choices?.[0]?.message?.content ?? "";
152
+ return { text, raw: data };
153
+ }
154
+
155
+ /** Responses API (por si alguna parte de tu app la usa) */
156
+ async function callOpenAIResponses({ apiKey, model, input, options }) {
157
+ const providerInfo = llmProviders.find(p => p.value === "openai");
158
+ const base = providerInfo?.url || "https://api.openai.com";
159
+ const url = `${base}/v1/responses`;
160
+
161
+ const body = buildOpenAIResponsesBody(model, input, options);
162
+ const data = await hardenedPost(url, apiKey, body, model);
163
+ // Normalizamos a "text"
164
+ const outMsg =
165
+ data?.output?.[0]?.content?.[0]?.text ??
166
+ data?.content?.[0]?.text ??
167
+ data?.choices?.[0]?.message?.content ??
168
+ "";
169
+ return { text: outMsg, raw: data };
170
+ }
171
+
172
+ /** DeepSeek */
173
+ async function callDeepSeekChat({ apiKey, model, messages, options }) {
174
+ const providerInfo = llmProviders.find(p => p.value === "deepseek");
175
+ const base = providerInfo?.url || "https://api.deepseek.com";
176
+ const url = `${base}/chat/completions`;
177
+
178
+ const body = { model, messages };
179
+ if (typeof options?.temperature === "number") body.temperature = options.temperature;
180
+ if (typeof options?.top_p === "number") body.top_p = options.top_p;
181
+ if (typeof options?.frequency_penalty === "number") body.frequency_penalty = options.frequency_penalty;
182
+ if (typeof options?.presence_penalty === "number") body.presence_penalty = options.presence_penalty;
183
+
184
+ const res = await fetch(url, {
185
+ method: "POST",
186
+ headers: {
187
+ "Authorization": `Bearer ${apiKey}`,
188
+ "Content-Type": "application/json"
189
+ },
190
+ body: JSON.stringify(body)
191
  });
192
 
193
+ if (!res.ok) {
194
+ const txt = await res.text();
195
+ throw new Error(`DeepSeek error ${res.status}: ${txt}`);
196
+ }
197
+
198
+ const data = await res.json();
199
+ const text = data?.choices?.[0]?.message?.content ?? "";
200
+ return { text, raw: data };
201
+ }
202
+
203
+ /** Punto 煤nico */
204
+ export async function callLLM(messages, options = {}) {
205
+ const cfg = getIaConfig();
206
+ const provider = cfg.llm.provider;
207
+ const model = cfg.llm.model;
208
+ if (!provider) throw new Error("Proveedor LLM no configurado.");
209
+ const apiKey = cfg.llm.apiKeys?.[provider];
210
+ if (!apiKey) throw new Error(`API Key no configurada para proveedor '${provider}'.`);
211
+
212
+ if (provider === "openai") {
213
+ // Si en alg煤n lugar pasan "input" en vez de "messages", usa Responses API
214
+ if (options && Object.prototype.hasOwnProperty.call(options, "input")) {
215
+ return await callOpenAIResponses({ apiKey, model, input: options.input, options });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
216
  }
217
+ return await callOpenAIChat({ apiKey, model, messages, options });
218
+ }
219
 
220
+ if (provider === "deepseek") {
221
+ return await callDeepSeekChat({ apiKey, model, messages, options });
222
+ }
223
+
224
+ throw new Error(`Proveedor no soportado: ${provider}`);
225
  }