Reubencf's picture
Added messaging appLets Chat
062c414
import { NextRequest, NextResponse } from 'next/server'
import { GoogleGenAI } from "@google/genai"
export async function POST(request: NextRequest) {
try {
const { message, history } = await request.json()
const apiKey = process.env.GEMINI_API_KEY
if (!apiKey) {
return NextResponse.json(
{ error: 'Gemini API key not configured' },
{ status: 500 }
)
}
const ai = new GoogleGenAI({ apiKey })
// Convert history to Gemini format
const contents = history?.map((msg: any) => ({
role: msg.role === 'assistant' ? 'model' : 'user',
parts: [{ text: msg.content }]
})) || []
// Add current message
contents.push({
role: 'user',
parts: [{ text: message }]
})
const stream = new ReadableStream({
async start(controller) {
try {
const result = await ai.models.generateContentStream({
model: 'gemini-flash-latest',
contents,
config: {
thinkingConfig: {
includeThoughts: true,
thinkingBudget: -1 // Dynamic thinking: model adjusts based on request complexity
}
}
})
for await (const chunk of result) {
// Process each part to separate thoughts from actual response
let text = ''
let thought = ''
if (chunk.candidates?.[0]?.content?.parts) {
for (const part of chunk.candidates[0].content.parts) {
if (!part.text) {
continue
}
// @ts-ignore - thought property exists on parts with thinking mode
else if (part.thought) {
// When part.thought is true, part.text contains the thinking summary
thought += part.text
}
else {
// When part.thought is false/undefined, part.text contains the answer
text += part.text
}
}
}
const data = JSON.stringify({ text, thought })
controller.enqueue(new TextEncoder().encode(`data: ${data}\n\n`))
}
controller.close()
} catch (error) {
console.error('Streaming error:', error)
controller.error(error)
}
}
})
return new NextResponse(stream, {
headers: {
'Content-Type': 'text/event-stream; charset=utf-8',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
},
})
} catch (error) {
console.error('Error in Gemini chat API:', error)
return NextResponse.json(
{ error: 'Failed to process request' },
{ status: 500 }
)
}
}