index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio/config/ProcessingMode.java
|
package ai.driftkit.audio.config;
/**
* Enumeration of supported processing modes.
*/
public enum ProcessingMode {
BATCH("batch"),
STREAMING("streaming");
private final String value;
ProcessingMode(String value) {
this.value = value;
}
public String getValue() {
return value;
}
public static ProcessingMode fromValue(String value) {
for (ProcessingMode mode : values()) {
if (mode.value.equals(value)) {
return mode;
}
}
throw new IllegalArgumentException("Unknown processing mode: " + value);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio/config/VadConfig.java
|
package ai.driftkit.audio.config;
import lombok.Data;
/**
* Configuration for Voice Activity Detection (VAD).
*/
@Data
public class VadConfig {
/**
* Enable/disable VAD.
* Default: true
*/
private boolean enabled = true;
/**
* Energy threshold for speech detection (0.0-1.0).
* Lower values are more sensitive.
* Default: 0.005
*/
private double threshold = 0.005;
/**
* Minimum duration of speech to consider (milliseconds).
* Default: 250ms
*/
private int minSpeechDurationMs = 250;
/**
* Duration of silence before finalizing chunk (milliseconds).
* Default: 1000ms
*/
private int silenceDurationMs = 1000;
/**
* Enable adaptive threshold adjustment.
* Default: true
*/
private boolean adaptiveThreshold = true;
/**
* Base noise level for adaptive threshold.
* Default: 0.001
*/
private double noiseLevel = 0.001;
}
|
0
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio/engine/AbstractTranscriptionEngine.java
|
package ai.driftkit.audio.engine;
import lombok.extern.slf4j.Slf4j;
import ai.driftkit.audio.config.AudioProcessingConfig;
import ai.driftkit.audio.model.TranscriptionResult;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Consumer;
/**
* Abstract base class for transcription engines providing common functionality.
*/
@Slf4j
public abstract class AbstractTranscriptionEngine implements TranscriptionEngine {
protected final AudioProcessingConfig config;
protected final Map<String, StreamingSession> streamingSessions = new ConcurrentHashMap<>();
protected AbstractTranscriptionEngine(AudioProcessingConfig config) {
this.config = config;
}
@Override
public CompletableFuture<TranscriptionResult> transcribeBatch(
byte[] audioData,
int sampleRate,
String languageCode) {
if (!supportsBatchMode()) {
throw new UnsupportedOperationException(
getName() + " does not support batch transcription mode");
}
return doTranscribeBatch(audioData, sampleRate, languageCode);
}
@Override
public void startStreamingSession(
String sessionId,
int sampleRate,
String languageCode,
Consumer<TranscriptionResult> resultCallback) {
if (!supportsStreamingMode()) {
throw new UnsupportedOperationException(
getName() + " does not support streaming transcription mode");
}
if (streamingSessions.containsKey(sessionId)) {
throw new IllegalStateException(
"Streaming session already exists: " + sessionId);
}
StreamingSession session = createStreamingSession(
sessionId, sampleRate, languageCode, resultCallback);
streamingSessions.put(sessionId, session);
log.debug("Started streaming session {} for engine {}", sessionId, getName());
}
@Override
public void sendStreamingAudio(String sessionId, byte[] audioData) {
if (!supportsStreamingMode()) {
throw new UnsupportedOperationException(
getName() + " does not support streaming transcription mode");
}
StreamingSession session = streamingSessions.get(sessionId);
if (session == null) {
throw new IllegalStateException(
"No active streaming session found: " + sessionId);
}
session.sendAudio(audioData);
}
@Override
public void stopStreamingSession(String sessionId) {
if (!supportsStreamingMode()) {
throw new UnsupportedOperationException(
getName() + " does not support streaming transcription mode");
}
StreamingSession session = streamingSessions.remove(sessionId);
if (session != null) {
session.close();
log.debug("Stopped streaming session {} for engine {}", sessionId, getName());
}
}
@Override
public boolean isStreamingSessionActive(String sessionId) {
StreamingSession session = streamingSessions.get(sessionId);
return session != null && session.isActive();
}
@Override
public void shutdown() {
// Close all active streaming sessions
streamingSessions.values().forEach(StreamingSession::close);
streamingSessions.clear();
doShutdown();
log.info("{} engine shut down", getName());
}
/**
* Perform batch transcription implementation.
*/
protected abstract CompletableFuture<TranscriptionResult> doTranscribeBatch(
byte[] audioData, int sampleRate, String languageCode);
/**
* Create a new streaming session implementation.
*/
protected abstract StreamingSession createStreamingSession(
String sessionId,
int sampleRate,
String languageCode,
Consumer<TranscriptionResult> resultCallback);
/**
* Perform engine-specific shutdown tasks.
*/
protected abstract void doShutdown();
/**
* Interface for streaming session implementations.
*/
protected interface StreamingSession {
void sendAudio(byte[] audioData);
void close();
boolean isActive();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio/engine/TranscriptionEngineFactory.java
|
package ai.driftkit.audio.engine;
import lombok.extern.slf4j.Slf4j;
import ai.driftkit.audio.config.AudioProcessingConfig;
import ai.driftkit.audio.config.EngineType;
import ai.driftkit.audio.config.ProcessingMode;
import ai.driftkit.audio.engine.impl.AssemblyAIEngine;
import ai.driftkit.audio.engine.impl.DeepgramEngine;
import org.springframework.stereotype.Component;
import java.util.HashMap;
import java.util.Map;
/**
* Factory for creating transcription engine instances based on configuration.
*/
@Slf4j
@Component
public class TranscriptionEngineFactory {
private final AudioProcessingConfig config;
public TranscriptionEngineFactory(AudioProcessingConfig config) {
this.config = config;
}
/**
* Create a transcription engine based on the configured engine type.
*
* @return Configured transcription engine
* @throws IllegalArgumentException if engine type is not supported
*/
public TranscriptionEngine createEngine() {
EngineType engineType = config.getEngine();
TranscriptionEngine engine;
switch (engineType) {
case ASSEMBLYAI:
engine = new AssemblyAIEngine(config);
break;
case DEEPGRAM:
engine = new DeepgramEngine(config);
break;
default:
throw new IllegalArgumentException("Unsupported transcription engine: " + engineType);
}
// Validate processing mode compatibility
switch (config.getProcessingMode()) {
case STREAMING:
if (!engine.supportsStreamingMode()) {
throw new IllegalStateException(
String.format("Engine '%s' does not support streaming mode", engine.getName()));
}
break;
case BATCH:
if (!engine.supportsBatchMode()) {
throw new IllegalStateException(
String.format("Engine '%s' does not support batch mode", engine.getName()));
}
break;
}
engine.initialize();
log.info("Created {} transcription engine in {} mode",
engine.getName(), config.getProcessingMode());
return engine;
}
/**
* Get the supported engines and their capabilities.
*
* @return Map of engine names to their configurations
*/
public static Map<EngineType, EngineConfiguration> getSupportedEngines() {
Map<EngineType, EngineConfiguration> engines = new HashMap<>();
// Add AssemblyAI
AudioProcessingConfig dummyConfig = new AudioProcessingConfig();
AssemblyAIEngine assemblyAI = new AssemblyAIEngine(dummyConfig);
engines.put(EngineType.ASSEMBLYAI, assemblyAI.getConfiguration());
// Add Deepgram
DeepgramEngine deepgram = new DeepgramEngine(dummyConfig);
engines.put(EngineType.DEEPGRAM, deepgram.getConfiguration());
return engines;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio/engine
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio/engine/impl/AssemblyAIEngine.java
|
package ai.driftkit.audio.engine.impl;
import com.assemblyai.api.AssemblyAI;
import com.assemblyai.api.resources.files.types.UploadedFile;
import com.assemblyai.api.resources.transcripts.types.*;
import lombok.extern.slf4j.Slf4j;
import ai.driftkit.audio.config.AudioProcessingConfig;
import ai.driftkit.audio.engine.AbstractTranscriptionEngine;
import ai.driftkit.audio.engine.EngineConfiguration;
import ai.driftkit.audio.model.TranscriptionResult;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.function.Consumer;
/**
* AssemblyAI transcription engine implementation.
* Supports batch transcription mode only.
*/
@Slf4j
public class AssemblyAIEngine extends AbstractTranscriptionEngine {
private static final String ENGINE_NAME = "AssemblyAI";
private AssemblyAI client;
public AssemblyAIEngine(AudioProcessingConfig config) {
super(config);
}
@Override
public String getName() {
return ENGINE_NAME;
}
@Override
public boolean supportsBatchMode() {
return true;
}
@Override
public boolean supportsStreamingMode() {
return false; // AssemblyAI supports real-time streaming, but English only
}
@Override
public void initialize() {
String apiKey = config.getAssemblyai().getApiKey();
if (apiKey == null || apiKey.trim().isEmpty()) {
throw new IllegalStateException("AssemblyAI API key is not configured");
}
this.client = AssemblyAI.builder()
.apiKey(apiKey)
.build();
log.info("AssemblyAI engine initialized");
}
@Override
protected CompletableFuture<TranscriptionResult> doTranscribeBatch(
byte[] audioData, int sampleRate, String languageCode) {
return CompletableFuture.supplyAsync(() -> {
try {
// Upload audio data directly
UploadedFile uploadedFile = client.files().upload(audioData);
// Configure transcription
String effectiveLanguage = languageCode != null ? languageCode : config.getAssemblyai().getLanguageCode().getValue();
TranscriptOptionalParams params = TranscriptOptionalParams.builder()
.languageCode(TranscriptLanguageCode.valueOf(effectiveLanguage.toUpperCase()))
.build();
// Submit transcription and wait for completion
Transcript transcript = client.transcripts().transcribe(uploadedFile.getUploadUrl(), params);
// Build result
return buildTranscriptionResult(transcript);
} catch (Exception e) {
log.error("AssemblyAI transcription failed", e);
return TranscriptionResult.builder()
.error(true)
.errorMessage("Transcription failed: " + e.getMessage())
.timestamp(System.currentTimeMillis())
.build();
}
});
}
@Override
protected StreamingSession createStreamingSession(
String sessionId,
int sampleRate,
String languageCode,
Consumer<TranscriptionResult> resultCallback) {
throw new UnsupportedOperationException(
"AssemblyAI does not support streaming transcription");
}
@Override
protected void doShutdown() {
// AssemblyAI client doesn't require explicit shutdown
}
@Override
public EngineConfiguration getConfiguration() {
Map<String, String> requiredConfig = new HashMap<>();
requiredConfig.put("audio.processing.assemblyai.api-key", "AssemblyAI API key");
Map<String, String> optionalConfig = new HashMap<>();
optionalConfig.put("audio.processing.assemblyai.language-code", "Language code (default: en)");
return EngineConfiguration.builder()
.engineType(ENGINE_NAME)
.requiredConfig(requiredConfig)
.optionalConfig(optionalConfig)
.processingMode(EngineConfiguration.ProcessingMode.BATCH_ONLY)
.supportedFormats(EngineConfiguration.AudioFormat.builder()
.supportedSampleRates(new int[]{8000, 16000, 22050, 44100, 48000})
.supportedChannels(new int[]{1, 2})
.supportedBitsPerSample(new int[]{16})
.supportedEncodings(new String[]{"PCM", "WAV", "MP3", "M4A"})
.build())
.requiresConversion(true)
.build();
}
private TranscriptionResult buildTranscriptionResult(Transcript transcript) {
if (transcript.getStatus() == TranscriptStatus.ERROR) {
return TranscriptionResult.builder()
.error(true)
.errorMessage(transcript.getError().orElse("Unknown error"))
.timestamp(System.currentTimeMillis())
.build();
}
return TranscriptionResult.builder()
.text(transcript.getText().orElse(""))
.confidence(transcript.getConfidence().orElse(0.0))
.language(transcript.getLanguageCode().map(Object::toString).orElse("unknown"))
.timestamp(System.currentTimeMillis())
.error(false)
.metadata(buildMetadata(transcript))
.build();
}
private Map<String, Object> buildMetadata(Transcript transcript) {
Map<String, Object> metadata = new HashMap<>();
metadata.put("transcriptId", transcript.getId());
metadata.put("duration", transcript.getAudioDuration());
metadata.put("words", transcript.getWords());
return metadata;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio/engine
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio/engine/impl/DeepgramEngine.java
|
package ai.driftkit.audio.engine.impl;
import ch.qos.logback.core.util.StringUtil;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.micrometer.common.util.StringUtils;
import lombok.extern.slf4j.Slf4j;
import ai.driftkit.audio.config.AudioProcessingConfig;
import ai.driftkit.audio.engine.AbstractTranscriptionEngine;
import ai.driftkit.audio.engine.EngineConfiguration;
import ai.driftkit.audio.model.TranscriptionResult;
import ai.driftkit.audio.model.WordBuffer;
import ai.driftkit.audio.model.WordInfo;
import ai.driftkit.audio.model.SegmentResult;
import ai.driftkit.audio.model.deepgram.DeepgramResponse;
import ai.driftkit.audio.model.deepgram.DeepgramAlternative;
import ai.driftkit.audio.model.deepgram.DeepgramWord;
import okhttp3.*;
import okio.ByteString;
import javax.net.ssl.SSLSocketFactory;
import java.io.IOException;
import java.net.URI;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
/**
* Deepgram transcription engine implementation.
* Supports both batch and streaming transcription modes.
*/
@Slf4j
public class DeepgramEngine extends AbstractTranscriptionEngine {
private static final String ENGINE_NAME = "Deepgram";
private static final String DEEPGRAM_API_URL = "https://api.deepgram.com/v1/listen";
private static final String DEEPGRAM_WS_URL = "wss://api.deepgram.com/v1/listen";
private OkHttpClient httpClient;
private final ObjectMapper objectMapper = new ObjectMapper();
private final ScheduledExecutorService reconnectExecutor = Executors.newSingleThreadScheduledExecutor();
public DeepgramEngine(AudioProcessingConfig config) {
super(config);
}
@Override
public String getName() {
return ENGINE_NAME;
}
@Override
public boolean supportsBatchMode() {
return true;
}
@Override
public boolean supportsStreamingMode() {
return true;
}
@Override
public void initialize() {
String apiKey = config.getDeepgram().getApiKey();
if (apiKey == null || apiKey.trim().isEmpty()) {
throw new IllegalStateException("Deepgram API key is not configured");
}
this.httpClient = new OkHttpClient.Builder()
.connectTimeout(30, TimeUnit.SECONDS)
.readTimeout(30, TimeUnit.SECONDS)
.writeTimeout(30, TimeUnit.SECONDS)
.build();
log.info("Deepgram engine initialized");
}
@Override
protected CompletableFuture<TranscriptionResult> doTranscribeBatch(
byte[] audioData, int sampleRate, String languageCode) {
return CompletableFuture.supplyAsync(() -> {
try {
String url = buildBatchUrl(sampleRate, languageCode);
RequestBody body = RequestBody.create(
audioData,
MediaType.parse("audio/wav")
);
Request request = new Request.Builder()
.url(url)
.header("Authorization", "Token " + config.getDeepgram().getApiKey())
.header("Content-Type", "audio/wav")
.post(body)
.build();
try (Response response = httpClient.newCall(request).execute()) {
if (!response.isSuccessful()) {
throw new IOException("Unexpected response: " + response);
}
String responseBody = response.body().string();
return parseDeepgramResponse(responseBody);
}
} catch (Exception e) {
log.error("Deepgram batch transcription failed", e);
return TranscriptionResult.builder()
.error(true)
.errorMessage("Transcription failed: " + e.getMessage())
.timestamp(System.currentTimeMillis())
.build();
}
});
}
@Override
protected StreamingSession createStreamingSession(
String sessionId,
int sampleRate,
String languageCode,
Consumer<TranscriptionResult> resultCallback) {
return new DeepgramStreamingSession(sessionId, sampleRate, languageCode, resultCallback);
}
@Override
protected void doShutdown() {
if (httpClient != null) {
httpClient.dispatcher().executorService().shutdown();
httpClient.connectionPool().evictAll();
}
if (reconnectExecutor != null && !reconnectExecutor.isShutdown()) {
reconnectExecutor.shutdown();
}
}
@Override
public EngineConfiguration getConfiguration() {
Map<String, String> requiredConfig = new HashMap<>();
requiredConfig.put("audio.processing.deepgram.api-key", "Deepgram API key");
Map<String, String> optionalConfig = new HashMap<>();
optionalConfig.put("audio.processing.deepgram.language", "Language code (default: en)");
optionalConfig.put("audio.processing.deepgram.model", "Model to use (default: nova-2)");
optionalConfig.put("audio.processing.deepgram.punctuate", "Add punctuation (default: true)");
optionalConfig.put("audio.processing.deepgram.interim-results", "Enable interim results for streaming (default: true)");
return EngineConfiguration.builder()
.engineType(ENGINE_NAME)
.requiredConfig(requiredConfig)
.optionalConfig(optionalConfig)
.processingMode(EngineConfiguration.ProcessingMode.BOTH)
.supportedFormats(EngineConfiguration.AudioFormat.builder()
.supportedSampleRates(new int[]{8000, 16000, 24000, 48000})
.supportedChannels(new int[]{1, 2})
.supportedBitsPerSample(new int[]{16})
.supportedEncodings(new String[]{"linear16", "flac", "mulaw", "amr", "opus"})
.build())
.maxStreamingChunkSize(8192) // 8KB chunks
.recommendedBufferSizeMs(100) // 100ms buffers
.requiresConversion(false) // Deepgram accepts raw PCM
.build();
}
private String buildBatchUrl(int sampleRate, String languageCode) {
StringBuilder url = new StringBuilder(DEEPGRAM_API_URL);
url.append("?encoding=linear16");
url.append("&sample_rate=").append(sampleRate);
String effectiveLanguage = languageCode != null ? languageCode : config.getDeepgram().getLanguage().getValue();
url.append("&language=").append(effectiveLanguage);
url.append("&model=").append(config.getDeepgram().getModel());
url.append("&punctuate=").append(config.getDeepgram().isPunctuate());
return url.toString();
}
private String buildStreamingUrl(int sampleRate, String languageCode) {
StringBuilder url = new StringBuilder(DEEPGRAM_WS_URL);
url.append("?encoding=linear16");
url.append("&sample_rate=").append(sampleRate);
String effectiveLanguage = languageCode != null ? languageCode : config.getDeepgram().getLanguage().getValue();
url.append("&language=").append(effectiveLanguage);
url.append("&model=").append(config.getDeepgram().getModel());
url.append("&punctuate=").append(config.getDeepgram().isPunctuate());
url.append("&interim_results=").append(config.getDeepgram().isInterimResults());
return url.toString();
}
private TranscriptionResult parseDeepgramResponse(String json) {
try {
DeepgramResponse response = objectMapper.readValue(json, DeepgramResponse.class);
// Handle streaming response format (has direct channel)
if (response.getChannel() != null) {
var channel = response.getChannel();
if (channel.getAlternatives() != null && !channel.getAlternatives().isEmpty()) {
var alternative = channel.getAlternatives().get(0);
String transcript = alternative.getTranscript();
Double confidence = alternative.getConfidence();
return TranscriptionResult.builder()
.text(transcript)
.confidence(confidence)
.language(response.getLanguage() != null ? response.getLanguage() : "en")
.timestamp(System.currentTimeMillis())
.error(false)
.metadata(response.toMap())
.build();
}
}
// Handle batch response format (has results.channels)
if (response.getResults() != null && response.getResults().getChannels() != null && !response.getResults().getChannels().isEmpty()) {
var channel = response.getResults().getChannels().get(0);
if (channel.getAlternatives() != null && !channel.getAlternatives().isEmpty()) {
var alternative = channel.getAlternatives().get(0);
String transcript = alternative.getTranscript();
Double confidence = alternative.getConfidence();
return TranscriptionResult.builder()
.text(transcript)
.confidence(confidence)
.language(response.getLanguage() != null ? response.getLanguage() : "en")
.timestamp(System.currentTimeMillis())
.error(false)
.metadata(response.toMap())
.build();
}
}
return TranscriptionResult.builder()
.error(true)
.errorMessage("No transcription results found")
.timestamp(System.currentTimeMillis())
.build();
} catch (Exception e) {
log.error("Failed to parse Deepgram response", e);
return TranscriptionResult.builder()
.error(true)
.errorMessage("Failed to parse response: " + e.getMessage())
.timestamp(System.currentTimeMillis())
.build();
}
}
/**
* WebSocket-based streaming session for Deepgram.
*/
private class DeepgramStreamingSession implements StreamingSession {
private final String sessionId;
private final int sampleRate;
private final String languageCode;
private final Consumer<TranscriptionResult> resultCallback;
private WebSocket webSocket;
private volatile boolean active = false;
private volatile boolean shouldReconnect = true;
private final WordBuffer wordBuffer = new WordBuffer();
private final AtomicInteger reconnectAttempts = new AtomicInteger(0);
private static final int MAX_RECONNECT_ATTEMPTS = 5;
private static final long RECONNECT_DELAY_MS = 1000;
private static final long MAX_RECONNECT_DELAY_MS = 30000;
DeepgramStreamingSession(String sessionId, int sampleRate, String languageCode,
Consumer<TranscriptionResult> resultCallback) {
this.sessionId = sessionId;
this.sampleRate = sampleRate;
this.languageCode = languageCode;
this.resultCallback = resultCallback;
connect();
}
private void connect() {
try {
String url = buildStreamingUrl(sampleRate, languageCode);
Request request = new Request.Builder()
.url(url)
.header("Authorization", "Token " + config.getDeepgram().getApiKey())
.build();
WebSocketListener listener = new DeepgramWebSocketListener();
webSocket = httpClient.newWebSocket(request, listener);
active = true;
log.debug("Deepgram streaming session {} started", sessionId);
} catch (Exception e) {
log.error("Failed to start Deepgram streaming session", e);
if (shouldReconnect) {
scheduleReconnect();
} else {
throw new RuntimeException("Failed to start streaming session", e);
}
}
}
private void scheduleReconnect() {
int attempts = reconnectAttempts.incrementAndGet();
if (attempts > MAX_RECONNECT_ATTEMPTS) {
log.error("Max reconnection attempts ({}) reached for session {}", MAX_RECONNECT_ATTEMPTS, sessionId);
active = false;
shouldReconnect = false;
resultCallback.accept(TranscriptionResult.builder()
.error(true)
.errorMessage("Max reconnection attempts reached")
.timestamp(System.currentTimeMillis())
.build());
return;
}
long delay = Math.min(RECONNECT_DELAY_MS * (1L << (attempts - 1)), MAX_RECONNECT_DELAY_MS);
log.info("Scheduling reconnection attempt {} for session {} in {}ms", attempts, sessionId, delay);
reconnectExecutor.schedule(() -> {
if (shouldReconnect && !active) {
log.info("Attempting to reconnect session {} (attempt {})", sessionId, attempts);
connect();
}
}, delay, TimeUnit.MILLISECONDS);
}
private void onConnectionSuccess() {
reconnectAttempts.set(0);
log.info("Deepgram streaming session {} reconnected successfully", sessionId);
}
private void onConnectionFailure(Throwable t) {
active = false;
log.error("Deepgram WebSocket connection failed for session {}: {}", sessionId, t.getMessage());
if (shouldReconnect) {
scheduleReconnect();
} else {
resultCallback.accept(TranscriptionResult.builder()
.error(true)
.errorMessage("WebSocket connection failed: " + t.getMessage())
.timestamp(System.currentTimeMillis())
.build());
}
}
@Override
public void sendAudio(byte[] audioData) {
if (webSocket != null && active) {
webSocket.send(ByteString.of(audioData));
}
}
@Override
public void close() {
shouldReconnect = false;
active = false;
if (webSocket != null) {
webSocket.close(1000, "Session closed");
log.debug("Deepgram streaming session {} closed", sessionId);
}
}
@Override
public boolean isActive() {
return active;
}
private class DeepgramWebSocketListener extends WebSocketListener {
List<String> list = new ArrayList<>();
@Override
public void onOpen(WebSocket webSocket, Response response) {
log.debug("Deepgram WebSocket opened for session {}", sessionId);
onConnectionSuccess();
}
@Override
public void onMessage(WebSocket webSocket, String text) {
try {
DeepgramResponse response = objectMapper.readValue(text, DeepgramResponse.class);
// Early exit if no channel or alternatives
if (response.getChannel() == null || response.getChannel().getAlternatives() == null
|| response.getChannel().getAlternatives().isEmpty()) {
return;
}
// Find the best alternative (highest confidence)
DeepgramAlternative bestAlternative = findBestAlternative(response.getChannel().getAlternatives());
// Convert words and update buffer
List<WordInfo> words = convertToWordInfoList(bestAlternative);
boolean isFinal = Boolean.TRUE.equals(response.getIsFinal());
SegmentResult segmentResult = wordBuffer.updateWords(words, isFinal);
// Create and send result only if we have new content
if (StringUtils.isNotBlank(segmentResult.getText())) {
TranscriptionResult result = createTranscriptionResult(response, bestAlternative, segmentResult, isFinal);
list.add(text);
resultCallback.accept(result);
}
} catch (Exception e) {
log.error("Error processing Deepgram message", e);
}
}
private DeepgramAlternative findBestAlternative(List<DeepgramAlternative> alternatives) {
DeepgramAlternative best = alternatives.get(0);
for (DeepgramAlternative alt : alternatives) {
if (alt.getConfidence() == null || best.getConfidence() == null) {
continue;
}
if (alt.getConfidence() > best.getConfidence()) {
best = alt;
}
}
return best;
}
private List<WordInfo> convertToWordInfoList(DeepgramAlternative alternative) {
List<WordInfo> words = new ArrayList<>();
if (alternative.getWords() == null) {
return words;
}
for (DeepgramWord word : alternative.getWords()) {
words.add(WordInfo.builder()
.word(word.getWord())
.punctuatedWord(word.getPunctuatedWord())
.start(word.getStart())
.end(word.getEnd())
.confidence(word.getConfidence())
.language(word.getLanguage())
.build());
}
return words;
}
private TranscriptionResult createTranscriptionResult(DeepgramResponse response,
DeepgramAlternative alternative, SegmentResult segmentResult, boolean isFinal) {
return TranscriptionResult.builder()
.text(alternative.getTranscript()) // Original transcript from this response
.mergedTranscript(segmentResult.getText()) // Current segment only (since last final)
.words(segmentResult.getWords()) // Words for current segment only
.confidence(segmentResult.getConfidence())
.language(response.getLanguage() != null ? response.getLanguage() : "en")
.timestamp(System.currentTimeMillis())
.interim(!isFinal)
.error(false)
.metadata(response.toMap())
.build();
}
@Override
public void onClosing(WebSocket webSocket, int code, String reason) {
webSocket.close(1000, null);
active = false;
if (shouldReconnect && code != 1000) {
log.warn("Deepgram WebSocket closing unexpectedly for session {} with code {}: {}", sessionId, code, reason);
scheduleReconnect();
}
}
@Override
public void onFailure(WebSocket webSocket, Throwable t, Response response) {
onConnectionFailure(t);
}
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio/service/AudioAnalyzer.java
|
package ai.driftkit.audio.service;
import ai.driftkit.audio.config.AudioProcessingConfig;
import ai.driftkit.audio.model.AudioAnalysis;
/**
* Service for analyzing audio buffers and detecting voice activity
*/
public class AudioAnalyzer {
private final AudioProcessingConfig config;
// Adaptive sensitivity fields
private volatile long lastVoiceDetectedTime = System.currentTimeMillis();
private volatile boolean sensitivityBoosted = false;
private volatile int currentSilenceThreshold;
private volatile int currentVoiceThreshold;
// Silence reset timer fields
private volatile long lastSoundDetectedTime = System.currentTimeMillis();
private volatile int dynamicSilenceThreshold = -1;
private volatile boolean timerRunning = false;
private final Object thresholdLock = new Object();
// Calibration support
private double backgroundNoiseLevel = 0;
private boolean isCalibrated = false;
public AudioAnalyzer(AudioProcessingConfig config) {
this.config = config;
}
/**
* Calibrate background noise level
* @param samples Array of AudioAnalysis samples from calibration phase
* @return Adjusted thresholds based on background noise
*/
public CalibrationResult calibrateBackgroundNoise(AudioAnalysis[] samples) {
// Default thresholds based on VAD config
double defaultSilenceThreshold = config.getVad().getThreshold() * 100; // Convert to 0-100 scale
double defaultVoiceThreshold = defaultSilenceThreshold * 2;
if (samples == null || samples.length == 0) {
return new CalibrationResult((int)defaultSilenceThreshold, (int)defaultVoiceThreshold);
}
// Calculate average background noise
double totalAmplitude = 0;
for (AudioAnalysis sample : samples) {
totalAmplitude += sample.getAmplitude();
}
backgroundNoiseLevel = totalAmplitude / samples.length;
// Calculate adjusted thresholds
int adjustedSilenceThreshold = (int)(backgroundNoiseLevel * 150); // 1.5x background
int adjustedVoiceThreshold = (int)(backgroundNoiseLevel * 250); // 2.5x background
// Use adjusted thresholds if they're higher than configured
int finalSilenceThreshold = Math.max(adjustedSilenceThreshold, (int)defaultSilenceThreshold);
int finalVoiceThreshold = Math.max(adjustedVoiceThreshold, (int)defaultVoiceThreshold);
// Update current thresholds
currentSilenceThreshold = finalSilenceThreshold;
currentVoiceThreshold = finalVoiceThreshold;
isCalibrated = true;
return new CalibrationResult(finalSilenceThreshold, finalVoiceThreshold, backgroundNoiseLevel);
}
/**
* Analyze audio buffer for voice activity and silence detection
*/
public AudioAnalysis analyzeBuffer(byte[] buffer, int length) {
// Convert bytes to 16-bit samples and calculate RMS (Root Mean Square)
long sum = 0;
int sampleCount = length / 2; // 16-bit samples
for (int i = 0; i < length - 1; i += 2) {
// Convert two bytes to a 16-bit sample (big-endian)
short sample = (short)((buffer[i] << 8) | (buffer[i + 1] & 0xFF));
sum += sample * sample;
}
double rms = Math.sqrt((double)sum / sampleCount);
boolean isSilent = rms < getCurrentSilenceThreshold();
// Update sound detection time for silence reset timer
updateSoundDetectionTime(rms);
return new AudioAnalysis(isSilent, rms);
}
/**
* Analyze entire chunk for voice activity
*/
public boolean analyzeChunkForVoice(byte[] chunkData) {
// Analyze the entire chunk in segments to detect voice activity
int segmentSize = config.getSampleRate() * 2; // 1 second segments
int numSegments = chunkData.length / segmentSize;
int voiceSegments = 0;
double maxSegmentAmplitude = 0;
for (int i = 0; i < numSegments; i++) {
int start = i * segmentSize;
int length = Math.min(segmentSize, chunkData.length - start);
if (length > 0) {
byte[] segment = new byte[length];
System.arraycopy(chunkData, start, segment, 0, length);
AudioAnalysis analysis = analyzeBuffer(segment, length);
if (analysis.getAmplitude() > maxSegmentAmplitude) {
maxSegmentAmplitude = analysis.getAmplitude();
}
if (analysis.getAmplitude() > getCurrentVoiceThreshold()) {
voiceSegments++;
}
}
}
// Calculate average amplitude across segments
double avgAmplitude = 0;
if (numSegments > 0) {
double totalAmplitude = 0;
for (int i = 0; i < numSegments; i++) {
int start = i * segmentSize;
int length = Math.min(segmentSize, chunkData.length - start);
if (length > 0) {
byte[] segment = new byte[length];
System.arraycopy(chunkData, start, segment, 0, length);
AudioAnalysis analysis = analyzeBuffer(segment, length);
totalAmplitude += analysis.getAmplitude();
}
}
avgAmplitude = totalAmplitude / numSegments;
}
// Consider it has voice if:
// 1. At least one segment has voice activity, OR
// 2. Maximum amplitude is above 70% of threshold (more lenient)
// 3. Average amplitude across segments is reasonably high
boolean hasVoice = voiceSegments > 0 ||
maxSegmentAmplitude > (getCurrentVoiceThreshold() * 0.7) ||
avgAmplitude > (getCurrentVoiceThreshold() * 0.5);
return hasVoice;
}
/**
* Initialize adaptive sensitivity settings
*/
public void initializeAdaptiveSensitivity() {
int baseThreshold = (int)(config.getVad().getThreshold() * 100);
currentSilenceThreshold = baseThreshold;
currentVoiceThreshold = baseThreshold * 2;
lastVoiceDetectedTime = System.currentTimeMillis();
sensitivityBoosted = false;
}
/**
* Update adaptive sensitivity based on voice activity
*/
public void updateAdaptiveSensitivity(boolean voiceDetected) {
long currentTime = System.currentTimeMillis();
if (voiceDetected) {
lastVoiceDetectedTime = currentTime;
if (sensitivityBoosted) {
// Reset to normal sensitivity when voice is detected
int baseThreshold = (int)(config.getVad().getThreshold() * 100);
currentSilenceThreshold = baseThreshold;
currentVoiceThreshold = baseThreshold * 2;
sensitivityBoosted = false;
}
} else {
// Check if we should boost sensitivity after 5 seconds of silence
long silenceDuration = currentTime - lastVoiceDetectedTime;
if (!sensitivityBoosted && silenceDuration > 5000) {
currentSilenceThreshold = currentSilenceThreshold / 2;
currentVoiceThreshold = currentVoiceThreshold / 2;
sensitivityBoosted = true;
}
}
}
/**
* Get current silence threshold (may be adapted)
*/
public int getCurrentSilenceThreshold() {
return currentSilenceThreshold > 0 ? currentSilenceThreshold : (int)(config.getVad().getThreshold() * 100);
}
/**
* Get current voice activity threshold (may be adapted)
*/
public int getCurrentVoiceThreshold() {
return currentVoiceThreshold > 0 ? currentVoiceThreshold : (int)(config.getVad().getThreshold() * 200);
}
/**
* Update last sound detected time when analyzing audio
*/
private void updateSoundDetectionTime(double amplitude) {
// If sound is detected (not silence), update the timer
if (amplitude > getCurrentSilenceThreshold()) {
lastSoundDetectedTime = System.currentTimeMillis();
}
}
/**
* Set dynamic silence threshold (for background noise adaptation)
*/
public void setDynamicSilenceThreshold(int threshold) {
this.currentSilenceThreshold = threshold;
}
/**
* Set dynamic voice threshold (for background noise adaptation)
*/
public void setDynamicVoiceThreshold(int threshold) {
this.currentVoiceThreshold = threshold;
}
/**
* Decrease silence threshold and start 15-second timer to reset
* @return new threshold value
*/
public int decreaseSilenceThreshold() {
synchronized (thresholdLock) {
// Set to a very low threshold
int newThreshold = 15;
dynamicSilenceThreshold = newThreshold;
currentSilenceThreshold = newThreshold;
// Reset the timer
lastSoundDetectedTime = System.currentTimeMillis();
// Start timer thread if not already running
startSilenceResetTimer();
return newThreshold;
}
}
/**
* Start a timer that resets silence threshold after 15 seconds of silence
*/
private void startSilenceResetTimer() {
if (timerRunning) {
return; // Timer already running
}
timerRunning = true;
new Thread(() -> {
while (timerRunning) {
try {
Thread.sleep(1000); // Check every second
long currentTime = System.currentTimeMillis();
long silenceDuration = currentTime - lastSoundDetectedTime;
// Check if 15 seconds of silence have passed
if (silenceDuration >= 15000) {
synchronized (thresholdLock) {
dynamicSilenceThreshold = 0;
currentSilenceThreshold = 0;
timerRunning = false;
break;
}
}
} catch (InterruptedException e) {
break;
}
}
timerRunning = false;
}).start();
}
/**
* Reset all thresholds to default values
*/
public void resetThresholds() {
synchronized (thresholdLock) {
int baseThreshold = (int)(config.getVad().getThreshold() * 100);
currentSilenceThreshold = baseThreshold;
currentVoiceThreshold = baseThreshold * 2;
dynamicSilenceThreshold = -1;
sensitivityBoosted = false;
isCalibrated = false;
timerRunning = false;
}
}
/**
* Get current configuration state for debugging
*/
public String getDebugInfo() {
return String.format("AudioAnalyzer Debug Info:\n" +
" Background Noise: %.1f\n" +
" Current Silence Threshold: %d\n" +
" Current Voice Threshold: %d\n" +
" Is Calibrated: %b\n" +
" Sensitivity Boosted: %b\n" +
" Dynamic Threshold: %d\n" +
" Timer Running: %b",
backgroundNoiseLevel,
getCurrentSilenceThreshold(),
getCurrentVoiceThreshold(),
isCalibrated,
sensitivityBoosted,
dynamicSilenceThreshold,
timerRunning
);
}
/**
* Calibration result class
*/
public static class CalibrationResult {
private final int silenceThreshold;
private final int voiceThreshold;
private final double backgroundNoise;
public CalibrationResult(int silenceThreshold, int voiceThreshold) {
this(silenceThreshold, voiceThreshold, 0);
}
public CalibrationResult(int silenceThreshold, int voiceThreshold, double backgroundNoise) {
this.silenceThreshold = silenceThreshold;
this.voiceThreshold = voiceThreshold;
this.backgroundNoise = backgroundNoise;
}
public int getSilenceThreshold() { return silenceThreshold; }
public int getVoiceThreshold() { return voiceThreshold; }
public double getBackgroundNoise() { return backgroundNoise; }
}
}
|
0
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio/service/AudioConverter.java
|
package ai.driftkit.audio.service;
import lombok.extern.slf4j.Slf4j;
import ai.driftkit.audio.config.AudioProcessingConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import ws.schild.jave.*;
import ws.schild.jave.encode.AudioAttributes;
import ws.schild.jave.encode.EncodingAttributes;
import javax.sound.sampled.*;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* Service for converting audio formats using Java libraries and FFmpeg fallback
*/
@Slf4j
@Service
public class AudioConverter {
@Autowired
private AudioProcessingConfig config;
/**
* Convert raw PCM audio data to MP3 format using ffmpeg
*/
public File convertRawToMp3WithFfmpeg(byte[] rawPcmData, String sessionPrefix) throws IOException, InterruptedException {
File debugDir = new File(config.getDebug().getOutputPath());
if (!debugDir.exists()) {
debugDir.mkdirs();
}
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd_HHmmss");
String timestamp = sdf.format(new Date());
File rawFile = new File(debugDir, sessionPrefix + "temp_raw_" + timestamp + ".pcm");
File mp3File = new File(debugDir, sessionPrefix + "audio_" + timestamp + ".mp3");
// Write raw PCM data to file
try (FileOutputStream fos = new FileOutputStream(rawFile)) {
fos.write(rawPcmData);
}
// Build ffmpeg command
ProcessBuilder pb = new ProcessBuilder(
"ffmpeg", "-y",
"-f", "s16be",
"-ar", String.valueOf(config.getSampleRate()),
"-ac", "1",
"-i", rawFile.getAbsolutePath(),
"-codec:a", "mp3",
"-b:a", "64k", // 64 kbps for smaller file size
"-ar", "16000", // Keep same sample rate
mp3File.getAbsolutePath()
);
Process process = pb.start();
int exitCode = process.waitFor();
// Clean up raw file
if (!rawFile.delete()) {
// Log warning if needed
}
if (exitCode == 0 && mp3File.exists()) {
return mp3File;
} else {
throw new IOException("ffmpeg conversion failed with exit code: " + exitCode);
}
}
/**
* Convert raw PCM audio data to specified format using Java libraries.
* Falls back to FFmpeg if Java conversion fails.
*
* @param rawPcmData Raw PCM audio data
* @param sampleRate Sample rate of the audio
* @param format Target format (wav, mp3, etc.)
* @return Converted audio data
*/
public byte[] convertToFormat(byte[] rawPcmData, int sampleRate, String format)
throws IOException, InterruptedException {
String formatLower = format.toLowerCase();
try {
// Try Java-based conversion first
return convertWithJava(rawPcmData, sampleRate, formatLower);
} catch (Exception e) {
log.warn("Java-based conversion failed, falling back to FFmpeg", e);
// Fallback to FFmpeg
return convertWithFFmpeg(rawPcmData, sampleRate, formatLower);
}
}
/**
* Convert audio using Java libraries (Java Sound API + JAVE).
*/
private byte[] convertWithJava(byte[] rawPcmData, int sampleRate, String format)
throws IOException {
switch (format) {
case "wav":
return convertToWav(rawPcmData, sampleRate);
case "au":
return convertToAu(rawPcmData, sampleRate);
case "aiff":
return convertToAiff(rawPcmData, sampleRate);
case "mp3":
return convertWithJave(rawPcmData, sampleRate, format);
case "ogg":
return convertWithJave(rawPcmData, sampleRate, format);
case "flac":
return convertWithJave(rawPcmData, sampleRate, format);
default:
throw new UnsupportedOperationException(
"Java conversion not supported for format: " + format);
}
}
/**
* Convert audio using JAVE library.
*/
private byte[] convertWithJave(byte[] rawPcmData, int sampleRate, String format)
throws IOException {
Path tempDir = Files.createTempDirectory("jave-conversion");
Path inputWav = tempDir.resolve("input.wav");
Path outputFile = tempDir.resolve("output." + format);
try {
// First convert raw PCM to WAV (JAVE input format)
byte[] wavData = convertToWav(rawPcmData, sampleRate);
Files.write(inputWav, wavData);
// Set up JAVE conversion
MultimediaObject source = new MultimediaObject(inputWav.toFile());
// Configure audio attributes based on format
AudioAttributes audioAttributes = new AudioAttributes();
audioAttributes.setSamplingRate(sampleRate);
audioAttributes.setChannels(1); // Mono
switch (format.toLowerCase()) {
case "mp3":
audioAttributes.setCodec("libmp3lame");
audioAttributes.setBitRate(64000); // 64 kbps
break;
case "ogg":
audioAttributes.setCodec("libvorbis");
audioAttributes.setBitRate(128000); // 128 kbps
break;
case "flac":
audioAttributes.setCodec("flac");
// No bitrate for lossless
break;
default:
throw new IllegalArgumentException("Unsupported format for JAVE: " + format);
}
// Set encoding attributes
EncodingAttributes encodingAttributes = new EncodingAttributes();
encodingAttributes.setInputFormat("wav");
encodingAttributes.setOutputFormat(format);
encodingAttributes.setAudioAttributes(audioAttributes);
// Perform conversion
Encoder encoder = new Encoder();
encoder.encode(source, outputFile.toFile(), encodingAttributes);
if (!Files.exists(outputFile)) {
throw new IOException("JAVE conversion failed - output file not created");
}
return Files.readAllBytes(outputFile);
} catch (EncoderException e) {
throw new IOException("JAVE encoding failed", e);
} finally {
// Clean up temporary files
try {
Files.deleteIfExists(inputWav);
Files.deleteIfExists(outputFile);
Files.deleteIfExists(tempDir);
} catch (IOException e) {
log.warn("Failed to clean up JAVE temporary files", e);
}
}
}
/**
* Convert raw PCM to WAV format using Java Sound API.
*/
private byte[] convertToWav(byte[] rawPcmData, int sampleRate) throws IOException {
// Create audio format
AudioFormat audioFormat = new AudioFormat(
AudioFormat.Encoding.PCM_SIGNED,
sampleRate,
16, // bits per sample
1, // channels (mono)
2, // frame size (16 bits = 2 bytes)
sampleRate,
false // little endian
);
// Create audio input stream from raw data
ByteArrayInputStream rawInputStream = new ByteArrayInputStream(rawPcmData);
AudioInputStream audioInputStream = new AudioInputStream(
rawInputStream, audioFormat, rawPcmData.length / audioFormat.getFrameSize());
// Convert to WAV format
ByteArrayOutputStream wavOutputStream = new ByteArrayOutputStream();
AudioSystem.write(audioInputStream, AudioFileFormat.Type.WAVE, wavOutputStream);
audioInputStream.close();
return wavOutputStream.toByteArray();
}
/**
* Convert raw PCM to AU format using Java Sound API.
*/
private byte[] convertToAu(byte[] rawPcmData, int sampleRate) throws IOException {
AudioFormat audioFormat = new AudioFormat(
AudioFormat.Encoding.PCM_SIGNED,
sampleRate, 16, 1, 2, sampleRate, true // big endian for AU
);
ByteArrayInputStream rawInputStream = new ByteArrayInputStream(rawPcmData);
AudioInputStream audioInputStream = new AudioInputStream(
rawInputStream, audioFormat, rawPcmData.length / audioFormat.getFrameSize());
ByteArrayOutputStream auOutputStream = new ByteArrayOutputStream();
AudioSystem.write(audioInputStream, AudioFileFormat.Type.AU, auOutputStream);
audioInputStream.close();
return auOutputStream.toByteArray();
}
/**
* Convert raw PCM to AIFF format using Java Sound API.
*/
private byte[] convertToAiff(byte[] rawPcmData, int sampleRate) throws IOException {
AudioFormat audioFormat = new AudioFormat(
AudioFormat.Encoding.PCM_SIGNED,
sampleRate, 16, 1, 2, sampleRate, true // big endian for AIFF
);
ByteArrayInputStream rawInputStream = new ByteArrayInputStream(rawPcmData);
AudioInputStream audioInputStream = new AudioInputStream(
rawInputStream, audioFormat, rawPcmData.length / audioFormat.getFrameSize());
ByteArrayOutputStream aiffOutputStream = new ByteArrayOutputStream();
AudioSystem.write(audioInputStream, AudioFileFormat.Type.AIFF, aiffOutputStream);
audioInputStream.close();
return aiffOutputStream.toByteArray();
}
/**
* Fallback conversion using FFmpeg for formats not supported by Java.
*/
private byte[] convertWithFFmpeg(byte[] rawPcmData, int sampleRate, String format)
throws IOException, InterruptedException {
Path tempDir = Files.createTempDirectory("audio-conversion");
Path rawFile = tempDir.resolve("input.pcm");
Path convertedFile = tempDir.resolve("output." + format);
try {
// Write raw PCM data to temporary file
Files.write(rawFile, rawPcmData);
// Build ffmpeg command based on format
ProcessBuilder pb = buildFFmpegCommand(
rawFile.toString(),
convertedFile.toString(),
sampleRate,
format
);
Process process = pb.start();
int exitCode = process.waitFor();
if (exitCode != 0) {
throw new IOException("FFmpeg conversion failed with exit code: " + exitCode);
}
if (!Files.exists(convertedFile)) {
throw new IOException("Converted audio file was not created");
}
return Files.readAllBytes(convertedFile);
} finally {
// Clean up temporary files
try {
Files.deleteIfExists(rawFile);
Files.deleteIfExists(convertedFile);
Files.deleteIfExists(tempDir);
} catch (IOException e) {
log.warn("Failed to clean up temporary files", e);
}
}
}
private ProcessBuilder buildFFmpegCommand(
String inputFile,
String outputFile,
int sampleRate,
String format) {
return switch (format.toLowerCase()) {
case "wav" -> new ProcessBuilder(
"ffmpeg", "-y",
"-f", "s16le", // Little-endian 16-bit PCM
"-ar", String.valueOf(sampleRate),
"-ac", "1", // Mono
"-i", inputFile,
"-f", "wav",
outputFile
);
case "mp3" -> new ProcessBuilder(
"ffmpeg", "-y",
"-f", "s16le",
"-ar", String.valueOf(sampleRate),
"-ac", "1",
"-i", inputFile,
"-codec:a", "mp3",
"-b:a", "64k",
outputFile
);
case "flac" -> new ProcessBuilder(
"ffmpeg", "-y",
"-f", "s16le",
"-ar", String.valueOf(sampleRate),
"-ac", "1",
"-i", inputFile,
"-codec:a", "flac",
outputFile
);
case "ogg" -> new ProcessBuilder(
"ffmpeg", "-y",
"-f", "s16le",
"-ar", String.valueOf(sampleRate),
"-ac", "1",
"-i", inputFile,
"-codec:a", "libvorbis",
"-b:a", "128k",
outputFile
);
default -> throw new IllegalArgumentException("Unsupported audio format: " + format);
};
}
/**
* Get available conversion methods for a format.
*
* @param format Audio format
* @return Information about conversion method
*/
public ConversionInfo getConversionInfo(String format) {
String formatLower = format.toLowerCase();
boolean javaSupported = switch (formatLower) {
case "wav", "au", "aiff" -> true; // Java Sound API
case "mp3", "ogg", "flac" -> true; // JAVE library
default -> false;
};
boolean ffmpegSupported = switch (formatLower) {
case "wav", "mp3", "flac", "ogg", "aac", "m4a" -> true;
default -> false;
};
return new ConversionInfo(formatLower, javaSupported, ffmpegSupported);
}
/**
* Information about conversion capabilities for a format.
*/
public static class ConversionInfo {
private final String format;
private final boolean javaSupported;
private final boolean ffmpegSupported;
public ConversionInfo(String format, boolean javaSupported, boolean ffmpegSupported) {
this.format = format;
this.javaSupported = javaSupported;
this.ffmpegSupported = ffmpegSupported;
}
public String getFormat() { return format; }
public boolean isJavaSupported() { return javaSupported; }
public boolean isFFmpegSupported() { return ffmpegSupported; }
public boolean isSupported() { return javaSupported || ffmpegSupported; }
public String getPreferredMethod() {
if (javaSupported) return "Java libraries (JAVE/Sound API)";
if (ffmpegSupported) return "FFmpeg";
return "Not supported";
}
@Override
public String toString() {
return String.format("ConversionInfo{format='%s', java=%s, ffmpeg=%s, preferred='%s'}",
format, javaSupported, ffmpegSupported, getPreferredMethod());
}
}
/**
* Fast conversion to WAV format optimized for memory usage.
* This method is optimized for real-time processing.
*/
public byte[] convertToWavFast(byte[] rawPcmData, int sampleRate) {
try {
return convertToWav(rawPcmData, sampleRate);
} catch (IOException e) {
log.error("Fast WAV conversion failed", e);
throw new RuntimeException("WAV conversion failed", e);
}
}
/**
* Check if a format can be converted purely in Java without external dependencies.
*/
public boolean isPureJavaSupported(String format) {
return switch (format.toLowerCase()) {
case "wav", "au", "aiff" -> true; // Pure Java Sound API
default -> false; // JAVE and FFmpeg require native binaries
};
}
/**
* Get performance characteristics for a conversion method.
*/
public PerformanceInfo getPerformanceInfo(String format) {
ConversionInfo conversionInfo = getConversionInfo(format);
if (conversionInfo.isJavaSupported()) {
boolean pureJava = isPureJavaSupported(format);
return new PerformanceInfo(
pureJava ? "Fastest" : "Fast",
pureJava ? "Very Low" : "Low",
pureJava ? "None" : "Native libraries required"
);
} else if (conversionInfo.isFFmpegSupported()) {
return new PerformanceInfo("Slower", "High", "FFmpeg binary required");
} else {
return new PerformanceInfo("Not supported", "N/A", "Format not supported");
}
}
/**
* Performance characteristics for conversion methods.
*/
public static class PerformanceInfo {
private final String speed;
private final String resourceUsage;
private final String dependencies;
public PerformanceInfo(String speed, String resourceUsage, String dependencies) {
this.speed = speed;
this.resourceUsage = resourceUsage;
this.dependencies = dependencies;
}
public String getSpeed() { return speed; }
public String getResourceUsage() { return resourceUsage; }
public String getDependencies() { return dependencies; }
@Override
public String toString() {
return String.format("Performance{speed='%s', resources='%s', deps='%s'}",
speed, resourceUsage, dependencies);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio/service/AudioSessionManager.java
|
package ai.driftkit.audio.service;
import lombok.extern.slf4j.Slf4j;
import ai.driftkit.audio.config.AudioProcessingConfig;
import ai.driftkit.audio.engine.TranscriptionEngine;
import ai.driftkit.audio.engine.TranscriptionEngineFactory;
import ai.driftkit.audio.model.TranscriptionResult;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.stereotype.Service;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.function.Consumer;
/**
* Audio session manager that supports multiple transcription engines
* and both batch and streaming processing modes.
*/
@Slf4j
@Service
public class AudioSessionManager implements DisposableBean {
private final AudioProcessingConfig config;
private final TranscriptionEngine engine;
private final AudioConverter audioConverter;
// For batch mode processing
private final ConcurrentMap<String, BatchAudioProcessor> batchProcessors = new ConcurrentHashMap<>();
// For streaming mode - callbacks are managed by the engine
private final ConcurrentMap<String, Consumer<TranscriptionResult>> streamingCallbacks = new ConcurrentHashMap<>();
public AudioSessionManager(
AudioProcessingConfig config,
TranscriptionEngineFactory engineFactory,
AudioConverter audioConverter) {
this.config = config;
this.audioConverter = audioConverter;
// Create engine based on configuration
this.engine = engineFactory.createEngine();
log.info("Enhanced audio session manager initialized with {} engine in {} mode",
engine.getName(), config.getProcessingMode());
}
/**
* Create a new audio processing session.
*
* @param sessionId Unique session identifier
* @param resultCallback Callback for transcription results
*/
public void createSession(String sessionId, Consumer<TranscriptionResult> resultCallback) {
if (hasSession(sessionId)) {
throw new IllegalArgumentException("Session already exists: " + sessionId);
}
switch (config.getProcessingMode()) {
case STREAMING:
// For streaming mode, start a streaming session with the engine
streamingCallbacks.put(sessionId, resultCallback);
engine.startStreamingSession(
sessionId,
config.getSampleRate(),
getLanguageCode(),
resultCallback
);
log.debug("Created streaming session: {}", sessionId);
break;
case BATCH:
// For batch mode, create a batch processor with its own AudioAnalyzer
AudioAnalyzer sessionAnalyzer = new AudioAnalyzer(config);
BatchAudioProcessor processor = new BatchAudioProcessor(
sessionId, config, sessionAnalyzer, audioConverter, engine, resultCallback);
batchProcessors.put(sessionId, processor);
log.debug("Created batch session: {}", sessionId);
break;
}
}
/**
* Process audio chunk for a session.
*
* @param sessionId Session identifier
* @param audioData Audio data to process
*/
public void processAudioChunk(String sessionId, byte[] audioData) {
switch (config.getProcessingMode()) {
case STREAMING:
// For streaming mode, send directly to engine
engine.sendStreamingAudio(sessionId, audioData);
break;
case BATCH:
// For batch mode, use the batch processor
BatchAudioProcessor processor = batchProcessors.get(sessionId);
if (processor == null) {
throw new IllegalArgumentException("No active session found: " + sessionId);
}
processor.processAudioChunk(audioData);
break;
}
}
/**
* Check if a session exists.
*
* @param sessionId Session identifier
* @return true if session exists
*/
public boolean hasSession(String sessionId) {
return batchProcessors.containsKey(sessionId) ||
engine.isStreamingSessionActive(sessionId);
}
/**
* Close a session.
*
* @param sessionId Session identifier
*/
public void closeSession(String sessionId) {
// Close streaming session if exists
if (engine.isStreamingSessionActive(sessionId)) {
engine.stopStreamingSession(sessionId);
streamingCallbacks.remove(sessionId);
log.debug("Closed streaming session: {}", sessionId);
}
// Close batch processor if exists
BatchAudioProcessor processor = batchProcessors.remove(sessionId);
if (processor != null) {
processor.close();
log.debug("Closed batch session: {}", sessionId);
}
}
/**
* Get all active session IDs.
*
* @return Set of active session IDs
*/
public Set<String> getActiveSessions() {
Set<String> sessions = new java.util.HashSet<>();
sessions.addAll(batchProcessors.keySet());
sessions.addAll(streamingCallbacks.keySet());
return sessions;
}
/**
* Close all active sessions.
*/
public void closeAllSessions() {
// Close all batch sessions
batchProcessors.forEach((id, processor) -> processor.close());
batchProcessors.clear();
// Close all streaming sessions
streamingCallbacks.keySet().forEach(engine::stopStreamingSession);
streamingCallbacks.clear();
log.info("All audio sessions closed");
}
@Override
public void destroy() {
closeAllSessions();
engine.shutdown();
log.info("Enhanced audio session manager shut down");
}
private String getLanguageCode() {
// Get language code based on engine type
switch (config.getEngine()) {
case ASSEMBLYAI:
return config.getAssemblyai().getLanguageCode().getValue();
case DEEPGRAM:
return config.getDeepgram().getLanguage().getValue();
default:
return "en"; // Default
}
}
/**
* Get session statistics (placeholder for future implementation).
*/
public SessionStats getSessionStats(String sessionId) {
// TODO: Implement session statistics
return new SessionStats();
}
public static class SessionStats {
// Placeholder for session statistics
private long chunksProcessed;
private long bytesProcessed;
private long transcriptionsReceived;
// Getters/setters would be here
}
}
|
0
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio/service/BatchAudioProcessor.java
|
package ai.driftkit.audio.service;
import lombok.extern.slf4j.Slf4j;
import ai.driftkit.audio.config.AudioProcessingConfig;
import ai.driftkit.audio.engine.TranscriptionEngine;
import ai.driftkit.audio.model.AudioAnalysis;
import ai.driftkit.audio.model.TranscriptionResult;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
/**
* Processor for batch mode audio transcription.
* Accumulates audio chunks based on VAD and sends complete segments for transcription.
*/
@Slf4j
public class BatchAudioProcessor {
private final String sessionId;
private final AudioProcessingConfig config;
private final AudioAnalyzer audioAnalyzer;
private final AudioConverter audioConverter;
private final TranscriptionEngine engine;
private final Consumer<TranscriptionResult> resultCallback;
private final ByteArrayOutputStream audioBuffer = new ByteArrayOutputStream();
private final AtomicBoolean isProcessing = new AtomicBoolean(true);
private final AtomicLong lastSpeechTime = new AtomicLong(0);
private final AtomicLong totalChunksProcessed = new AtomicLong(0);
private boolean inSpeechSegment = false;
private long segmentStartTime = 0;
public BatchAudioProcessor(
String sessionId,
AudioProcessingConfig config,
AudioAnalyzer audioAnalyzer,
AudioConverter audioConverter,
TranscriptionEngine engine,
Consumer<TranscriptionResult> resultCallback) {
this.sessionId = sessionId;
this.config = config;
this.audioAnalyzer = audioAnalyzer;
this.audioConverter = audioConverter;
this.engine = engine;
this.resultCallback = resultCallback;
}
/**
* Process an audio chunk.
*/
public void processAudioChunk(byte[] audioData) {
if (!isProcessing.get()) {
log.warn("Processor is stopped, ignoring audio chunk for session {}", sessionId);
return;
}
totalChunksProcessed.incrementAndGet();
// Analyze audio for voice activity
AudioAnalysis analysis = audioAnalyzer.analyzeBuffer(audioData, audioData.length);
if (!analysis.isSilent()) {
handleSpeechDetected(audioData);
} else {
handleSilenceDetected();
}
// Debug output if enabled
if (config.getDebug().isEnabled()) {
saveDebugAudio(audioData);
}
}
private void handleSpeechDetected(byte[] audioData) {
lastSpeechTime.set(System.currentTimeMillis());
if (!inSpeechSegment) {
// Start new speech segment
inSpeechSegment = true;
segmentStartTime = System.currentTimeMillis();
log.debug("Speech started in session {}", sessionId);
}
// Add audio to buffer
try {
audioBuffer.write(audioData);
} catch (IOException e) {
log.error("Failed to buffer audio data", e);
}
}
private void handleSilenceDetected() {
if (inSpeechSegment) {
long silenceDuration = System.currentTimeMillis() - lastSpeechTime.get();
if (silenceDuration >= config.getVad().getSilenceDurationMs()) {
// End of speech segment detected
finalizeSpeechSegment();
}
}
}
private void finalizeSpeechSegment() {
inSpeechSegment = false;
byte[] audioData = audioBuffer.toByteArray();
audioBuffer.reset();
long segmentDuration = System.currentTimeMillis() - segmentStartTime;
log.debug("Speech segment ended in session {} after {}ms", sessionId, segmentDuration);
// Check minimum duration
if (segmentDuration < config.getMinChunkDurationSeconds() * 1000) {
log.debug("Segment too short ({}ms), discarding", segmentDuration);
return;
}
// Convert audio if needed
byte[] processedAudio = audioData;
if (engine.getConfiguration().isRequiresConversion()) {
try {
processedAudio = audioConverter.convertToFormat(
audioData,
config.getSampleRate(),
"wav"
);
} catch (Exception e) {
log.error("Failed to convert audio", e);
return;
}
}
// Send for transcription
final byte[] finalAudio = processedAudio;
engine.transcribeBatch(
finalAudio,
config.getSampleRate(),
getLanguageCode()
).thenAccept(result -> {
if (resultCallback != null) {
resultCallback.accept(result);
}
}).exceptionally(throwable -> {
log.error("Transcription failed for session {}", sessionId, throwable);
if (resultCallback != null) {
resultCallback.accept(TranscriptionResult.builder()
.error(true)
.errorMessage("Transcription failed: " + throwable.getMessage())
.timestamp(System.currentTimeMillis())
.build());
}
return null;
});
}
/**
* Force finalize any pending audio.
*/
public void flush() {
if (audioBuffer.size() > 0) {
finalizeSpeechSegment();
}
}
/**
* Close this processor.
*/
public void close() {
isProcessing.set(false);
flush();
log.info("Batch processor closed for session {} after processing {} chunks",
sessionId, totalChunksProcessed.get());
}
private String getLanguageCode() {
switch (config.getEngine()) {
case ASSEMBLYAI:
return config.getAssemblyai().getLanguageCode().getValue();
case DEEPGRAM:
return config.getDeepgram().getLanguage().getValue();
default:
return "en";
}
}
private void saveDebugAudio(byte[] audioData) {
try {
String filename = String.format("%s/session_%s_chunk_%d.raw",
config.getDebug().getOutputPath(), sessionId, totalChunksProcessed.get());
File file = new File(filename);
file.getParentFile().mkdirs();
try (FileOutputStream fos = new FileOutputStream(file)) {
fos.write(audioData);
}
} catch (IOException e) {
log.error("Failed to save debug audio", e);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio
|
java-sources/ai/driftkit/driftkit-ai-audio/1.0.0/ai/driftkit/audio/utils/Retrier.java
|
package ai.driftkit.audio.utils;
import java.util.concurrent.Callable;
import java.util.function.Consumer;
/**
* Utility class for retrying operations with exponential backoff
*/
public class Retrier {
private static final int DEFAULT_TRIALS = 3;
private static final int DEFAULT_DELAY = 3000;
private Retrier() {
}
public static void retry(Runnable runnable) throws Exception {
retry(runnable, 3L);
}
public static <R> R retry(Callable<R> callable) throws Exception {
return retry(callable, 3L);
}
public static void retry(Runnable runnable, long delay) throws Exception {
retry(runnable, 3, delay / 3L, 1);
}
public static <R> R retry(Callable<R> callable, long delay) throws Exception {
return retry(callable, 3, delay / 3L, 1);
}
public static void retry(Runnable runnable, int trials, long delay) throws Exception {
retry(runnable, trials, delay, 1);
}
public static <R> R retry(Callable<R> callable, int trials, long delay) throws Exception {
return retry(callable, trials, delay, 1);
}
public static void retryQuietly(Runnable runnable, Consumer<Exception> log) {
try {
retry(runnable, 3L);
} catch (Exception e) {
log.accept(e);
}
}
public static <R> R retryQuietly(Callable<R> callable, Consumer<Exception> log) {
try {
return retry(callable, 3000L);
} catch (Exception e) {
log.accept(e);
return null;
}
}
public static void retryQuietly(Runnable runnable, Consumer<Exception> log, int trials, long delay, int multiplier) {
try {
retry(runnable, trials, delay, multiplier);
} catch (Exception e) {
log.accept(e);
}
}
public static <R> R retryQuietly(Callable<R> callable, Consumer<Exception> log, int trials, long delay, int multiplier) {
try {
return retry(callable, trials, delay, multiplier);
} catch (Exception e) {
log.accept(e);
return null;
}
}
public static <R> R retry(Callable<R> callable, int trials, long delay, int multiplier) throws Exception {
for(int trial = 0; trial < trials; delay *= (long)multiplier) {
++trial;
try {
return callable.call();
} catch (Exception e) {
if (trial >= trials) {
throw e;
}
Thread.sleep(delay);
}
}
return null;
}
public static void retry(Runnable runnable, int trials, long delay, int multiplier) throws Exception {
for(int trial = 0; trial < trials; delay *= (long)multiplier) {
++trial;
try {
runnable.run();
return;
} catch (Exception e) {
if (trial >= trials) {
throw e;
}
Thread.sleep(delay);
}
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-audio-core/0.8.1/ai/driftkit/audio/core
|
java-sources/ai/driftkit/driftkit-audio-core/0.8.1/ai/driftkit/audio/core/config/AssemblyAIConfig.java
|
package ai.driftkit.audio.core.config;
import ai.driftkit.common.domain.Language;
import lombok.Data;
/**
* Configuration for AssemblyAI transcription service.
*/
@Data
public class AssemblyAIConfig {
/**
* AssemblyAI API key.
*/
private String apiKey;
/**
* Language code for transcription.
* Default: ENGLISH
*/
private Language languageCode = Language.ENGLISH;
}
|
0
|
java-sources/ai/driftkit/driftkit-audio-core/0.8.1/ai/driftkit/audio/core
|
java-sources/ai/driftkit/driftkit-audio-core/0.8.1/ai/driftkit/audio/core/config/CoreDeepgramConfig.java
|
package ai.driftkit.audio.core.config;
import ai.driftkit.common.domain.Language;
import lombok.Data;
/**
* Core configuration for Deepgram transcription service without Spring dependencies.
*/
@Data
public class CoreDeepgramConfig {
/**
* Deepgram API key.
*/
private String apiKey;
/**
* Language code for transcription.
* Default: ENGLISH
*/
private Language language = Language.ENGLISH;
/**
* Deepgram model to use.
* Options: "nova-2", "nova", "enhanced", "base"
* Default: "nova-2"
*/
private String model = "nova-2";
/**
* Whether to add punctuation to transcriptions.
* Default: true
*/
private boolean punctuate = true;
/**
* Whether to enable interim results for streaming.
* Default: true
*/
private boolean interimResults = true;
/**
* Whether to enable automatic language detection.
* Default: false
*/
private boolean detectLanguage = false;
/**
* Whether to enable speaker diarization.
* Default: false
*/
private boolean diarize = false;
/**
* Number of speakers to detect if diarization is enabled.
* Default: 2
*/
private int diarizeVersion = 2;
/**
* Whether to enable profanity filtering.
* Default: false
*/
private boolean profanityFilter = false;
/**
* Whether to enable redaction of sensitive information.
* Default: false
*/
private boolean redact = false;
/**
* Smart formatting options.
* Default: false
*/
private boolean smartFormat = false;
}
|
0
|
java-sources/ai/driftkit/driftkit-audio-core/0.8.1/ai/driftkit/audio/core
|
java-sources/ai/driftkit/driftkit-audio-core/0.8.1/ai/driftkit/audio/core/config/DeepgramConfig.java
|
package ai.driftkit.audio.core.config;
import ai.driftkit.common.domain.Language;
import lombok.Data;
/**
* Configuration properties for Deepgram transcription service.
*/
@Data
public class DeepgramConfig {
/**
* Deepgram API key.
*/
private String apiKey;
/**
* Language code for transcription.
* Default: ENGLISH
*/
private Language language = Language.ENGLISH;
/**
* Deepgram model to use.
* Options: "nova-2", "nova", "enhanced", "base"
* Default: "nova-2"
*/
private String model = "nova-2";
/**
* Whether to add punctuation to transcriptions.
* Default: true
*/
private boolean punctuate = true;
/**
* Whether to enable interim results for streaming.
* Default: true
*/
private boolean interimResults = true;
/**
* Whether to enable automatic language detection.
* Default: false
*/
private boolean detectLanguage = false;
/**
* Whether to enable speaker diarization.
* Default: false
*/
private boolean diarize = false;
/**
* Number of speakers to detect if diarization is enabled.
* Default: 2
*/
private int diarizeVersion = 2;
/**
* Whether to enable profanity filtering.
* Default: false
*/
private boolean profanityFilter = false;
/**
* Whether to enable redaction of sensitive information.
* Default: false
*/
private boolean redact = false;
/**
* Smart formatting options.
* Default: false
*/
private boolean smartFormat = false;
}
|
0
|
java-sources/ai/driftkit/driftkit-audio-core/0.8.1/ai/driftkit/audio/engine
|
java-sources/ai/driftkit/driftkit-audio-core/0.8.1/ai/driftkit/audio/engine/impl/DeepgramEngine.java
|
package ai.driftkit.audio.engine.impl;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.extern.slf4j.Slf4j;
import ai.driftkit.audio.core.config.CoreAudioConfig;
import ai.driftkit.audio.engine.AbstractTranscriptionEngine;
import ai.driftkit.audio.engine.EngineConfiguration;
import ai.driftkit.audio.model.TranscriptionResult;
import ai.driftkit.audio.model.WordBuffer;
import ai.driftkit.audio.model.WordInfo;
import ai.driftkit.audio.model.SegmentResult;
import ai.driftkit.audio.model.deepgram.DeepgramResponse;
import ai.driftkit.audio.model.deepgram.DeepgramAlternative;
import ai.driftkit.audio.model.deepgram.DeepgramWord;
import okhttp3.*;
import okio.ByteString;
import org.apache.commons.lang3.StringUtils;
import javax.net.ssl.SSLSocketFactory;
import java.io.IOException;
import java.net.URI;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
/**
* Deepgram transcription engine implementation.
* Supports both batch and streaming transcription modes.
*/
@Slf4j
public class DeepgramEngine extends AbstractTranscriptionEngine {
private static final String ENGINE_NAME = "Deepgram";
private static final String DEEPGRAM_API_URL = "https://api.deepgram.com/v1/listen";
private static final String DEEPGRAM_WS_URL = "wss://api.deepgram.com/v1/listen";
private OkHttpClient httpClient;
private final ObjectMapper objectMapper = new ObjectMapper();
private final ScheduledExecutorService reconnectExecutor = Executors.newSingleThreadScheduledExecutor();
public DeepgramEngine(CoreAudioConfig config) {
super(config);
}
@Override
public String getName() {
return ENGINE_NAME;
}
@Override
public boolean supportsBatchMode() {
return true;
}
@Override
public boolean supportsStreamingMode() {
return true;
}
@Override
public void initialize() {
String apiKey = config.getDeepgram().getApiKey();
if (apiKey == null || apiKey.trim().isEmpty()) {
throw new IllegalStateException("Deepgram API key is not configured");
}
this.httpClient = new OkHttpClient.Builder()
.connectTimeout(30, TimeUnit.SECONDS)
.readTimeout(30, TimeUnit.SECONDS)
.writeTimeout(30, TimeUnit.SECONDS)
.build();
log.info("Deepgram engine initialized");
}
@Override
protected CompletableFuture<TranscriptionResult> doTranscribeBatch(
byte[] audioData, int sampleRate, String languageCode) {
return CompletableFuture.supplyAsync(() -> {
try {
String url = buildBatchUrl(sampleRate, languageCode);
RequestBody body = RequestBody.create(
audioData,
MediaType.parse("audio/wav")
);
Request request = new Request.Builder()
.url(url)
.header("Authorization", "Token " + config.getDeepgram().getApiKey())
.header("Content-Type", "audio/wav")
.post(body)
.build();
try (Response response = httpClient.newCall(request).execute()) {
if (!response.isSuccessful()) {
throw new IOException("Unexpected response: " + response);
}
String responseBody = response.body().string();
return parseDeepgramResponse(responseBody);
}
} catch (Exception e) {
log.error("Deepgram batch transcription failed", e);
return TranscriptionResult.builder()
.error(true)
.errorMessage("Transcription failed: " + e.getMessage())
.timestamp(System.currentTimeMillis())
.build();
}
});
}
@Override
protected StreamingSession createStreamingSession(
String sessionId,
int sampleRate,
String languageCode,
Consumer<TranscriptionResult> resultCallback) {
return new DeepgramStreamingSession(sessionId, sampleRate, languageCode, resultCallback);
}
@Override
protected void doShutdown() {
if (httpClient != null) {
httpClient.dispatcher().executorService().shutdown();
httpClient.connectionPool().evictAll();
}
if (reconnectExecutor != null && !reconnectExecutor.isShutdown()) {
reconnectExecutor.shutdown();
}
}
@Override
public EngineConfiguration getConfiguration() {
Map<String, String> requiredConfig = new HashMap<>();
requiredConfig.put("audio.processing.deepgram.api-key", "Deepgram API key");
Map<String, String> optionalConfig = new HashMap<>();
optionalConfig.put("audio.processing.deepgram.language", "Language code (default: en)");
optionalConfig.put("audio.processing.deepgram.model", "Model to use (default: nova-2)");
optionalConfig.put("audio.processing.deepgram.punctuate", "Add punctuation (default: true)");
optionalConfig.put("audio.processing.deepgram.interim-results", "Enable interim results for streaming (default: true)");
return EngineConfiguration.builder()
.engineType(ENGINE_NAME)
.requiredConfig(requiredConfig)
.optionalConfig(optionalConfig)
.processingMode(EngineConfiguration.ProcessingMode.BOTH)
.supportedFormats(EngineConfiguration.AudioFormat.builder()
.supportedSampleRates(new int[]{8000, 16000, 24000, 48000})
.supportedChannels(new int[]{1, 2})
.supportedBitsPerSample(new int[]{16})
.supportedEncodings(new String[]{"linear16", "flac", "mulaw", "amr", "opus"})
.build())
.maxStreamingChunkSize(8192) // 8KB chunks
.recommendedBufferSizeMs(100) // 100ms buffers
.requiresConversion(false) // Deepgram accepts raw PCM
.build();
}
private String buildBatchUrl(int sampleRate, String languageCode) {
StringBuilder url = new StringBuilder(DEEPGRAM_API_URL);
url.append("?encoding=linear16");
url.append("&sample_rate=").append(sampleRate);
String effectiveLanguage = languageCode != null ? languageCode : config.getDeepgram().getLanguage().getValue();
url.append("&language=").append(effectiveLanguage);
url.append("&model=").append(config.getDeepgram().getModel());
url.append("&punctuate=").append(config.getDeepgram().isPunctuate());
return url.toString();
}
private String buildStreamingUrl(int sampleRate, String languageCode) {
StringBuilder url = new StringBuilder(DEEPGRAM_WS_URL);
url.append("?encoding=linear16");
url.append("&sample_rate=").append(sampleRate);
String effectiveLanguage = languageCode != null ? languageCode : config.getDeepgram().getLanguage().getValue();
url.append("&language=").append(effectiveLanguage);
url.append("&model=").append(config.getDeepgram().getModel());
url.append("&punctuate=").append(config.getDeepgram().isPunctuate());
url.append("&interim_results=").append(config.getDeepgram().isInterimResults());
return url.toString();
}
private TranscriptionResult parseDeepgramResponse(String json) {
try {
DeepgramResponse response = objectMapper.readValue(json, DeepgramResponse.class);
// Handle streaming response format (has direct channel)
if (response.getChannel() != null) {
var channel = response.getChannel();
if (channel.getAlternatives() != null && !channel.getAlternatives().isEmpty()) {
var alternative = channel.getAlternatives().get(0);
String transcript = alternative.getTranscript();
Double confidence = alternative.getConfidence();
return TranscriptionResult.builder()
.text(transcript)
.confidence(confidence)
.language(response.getLanguage() != null ? response.getLanguage() : "en")
.timestamp(System.currentTimeMillis())
.error(false)
.metadata(response.toMap())
.build();
}
}
// Handle batch response format (has results.channels)
if (response.getResults() != null && response.getResults().getChannels() != null && !response.getResults().getChannels().isEmpty()) {
var channel = response.getResults().getChannels().get(0);
if (channel.getAlternatives() != null && !channel.getAlternatives().isEmpty()) {
var alternative = channel.getAlternatives().get(0);
String transcript = alternative.getTranscript();
Double confidence = alternative.getConfidence();
return TranscriptionResult.builder()
.text(transcript)
.confidence(confidence)
.language(response.getLanguage() != null ? response.getLanguage() : "en")
.timestamp(System.currentTimeMillis())
.error(false)
.metadata(response.toMap())
.build();
}
}
return TranscriptionResult.builder()
.error(true)
.errorMessage("No transcription results found")
.timestamp(System.currentTimeMillis())
.build();
} catch (Exception e) {
log.error("Failed to parse Deepgram response", e);
return TranscriptionResult.builder()
.error(true)
.errorMessage("Failed to parse response: " + e.getMessage())
.timestamp(System.currentTimeMillis())
.build();
}
}
/**
* WebSocket-based streaming session for Deepgram.
*/
private class DeepgramStreamingSession implements StreamingSession {
private final String sessionId;
private final int sampleRate;
private final String languageCode;
private final Consumer<TranscriptionResult> resultCallback;
private WebSocket webSocket;
private volatile boolean active = false;
private volatile boolean shouldReconnect = true;
private final WordBuffer wordBuffer = new WordBuffer();
private final AtomicInteger reconnectAttempts = new AtomicInteger(0);
private static final int MAX_RECONNECT_ATTEMPTS = 5;
private static final long RECONNECT_DELAY_MS = 1000;
private static final long MAX_RECONNECT_DELAY_MS = 30000;
DeepgramStreamingSession(String sessionId, int sampleRate, String languageCode,
Consumer<TranscriptionResult> resultCallback) {
this.sessionId = sessionId;
this.sampleRate = sampleRate;
this.languageCode = languageCode;
this.resultCallback = resultCallback;
connect();
}
private void connect() {
try {
String url = buildStreamingUrl(sampleRate, languageCode);
Request request = new Request.Builder()
.url(url)
.header("Authorization", "Token " + config.getDeepgram().getApiKey())
.build();
WebSocketListener listener = new DeepgramWebSocketListener();
webSocket = httpClient.newWebSocket(request, listener);
active = true;
log.debug("Deepgram streaming session {} started", sessionId);
} catch (Exception e) {
log.error("Failed to start Deepgram streaming session", e);
if (shouldReconnect) {
scheduleReconnect();
} else {
throw new RuntimeException("Failed to start streaming session", e);
}
}
}
private void scheduleReconnect() {
int attempts = reconnectAttempts.incrementAndGet();
if (attempts > MAX_RECONNECT_ATTEMPTS) {
log.error("Max reconnection attempts ({}) reached for session {}", MAX_RECONNECT_ATTEMPTS, sessionId);
active = false;
shouldReconnect = false;
resultCallback.accept(TranscriptionResult.builder()
.error(true)
.errorMessage("Max reconnection attempts reached")
.timestamp(System.currentTimeMillis())
.build());
return;
}
long delay = Math.min(RECONNECT_DELAY_MS * (1L << (attempts - 1)), MAX_RECONNECT_DELAY_MS);
log.info("Scheduling reconnection attempt {} for session {} in {}ms", attempts, sessionId, delay);
reconnectExecutor.schedule(() -> {
if (shouldReconnect && !active) {
log.info("Attempting to reconnect session {} (attempt {})", sessionId, attempts);
connect();
}
}, delay, TimeUnit.MILLISECONDS);
}
private void onConnectionSuccess() {
reconnectAttempts.set(0);
log.info("Deepgram streaming session {} reconnected successfully", sessionId);
}
private void onConnectionFailure(Throwable t) {
active = false;
log.error("Deepgram WebSocket connection failed for session {}: {}", sessionId, t.getMessage());
if (shouldReconnect) {
scheduleReconnect();
} else {
resultCallback.accept(TranscriptionResult.builder()
.error(true)
.errorMessage("WebSocket connection failed: " + t.getMessage())
.timestamp(System.currentTimeMillis())
.build());
}
}
@Override
public void sendAudio(byte[] audioData) {
if (webSocket != null && active) {
webSocket.send(ByteString.of(audioData));
}
}
@Override
public void close() {
shouldReconnect = false;
active = false;
if (webSocket != null) {
webSocket.close(1000, "Session closed");
log.debug("Deepgram streaming session {} closed", sessionId);
}
}
@Override
public boolean isActive() {
return active;
}
private class DeepgramWebSocketListener extends WebSocketListener {
List<String> list = new ArrayList<>();
@Override
public void onOpen(WebSocket webSocket, Response response) {
log.debug("Deepgram WebSocket opened for session {}", sessionId);
onConnectionSuccess();
}
@Override
public void onMessage(WebSocket webSocket, String text) {
try {
DeepgramResponse response = objectMapper.readValue(text, DeepgramResponse.class);
// Early exit if no channel or alternatives
if (response.getChannel() == null || response.getChannel().getAlternatives() == null
|| response.getChannel().getAlternatives().isEmpty()) {
return;
}
// Find the best alternative (highest confidence)
DeepgramAlternative bestAlternative = findBestAlternative(response.getChannel().getAlternatives());
// Convert words and update buffer
List<WordInfo> words = convertToWordInfoList(bestAlternative);
boolean isFinal = Boolean.TRUE.equals(response.getIsFinal());
SegmentResult segmentResult = wordBuffer.updateWords(words, isFinal, config.getDeepgram().isInterimResults());
// Create and send result only if we have new content
if (StringUtils.isNotBlank(segmentResult.getText())) {
TranscriptionResult result = createTranscriptionResult(response, bestAlternative, segmentResult, isFinal);
list.add(text);
resultCallback.accept(result);
}
} catch (Exception e) {
log.error("Error processing Deepgram message", e);
}
}
private DeepgramAlternative findBestAlternative(List<DeepgramAlternative> alternatives) {
DeepgramAlternative best = alternatives.get(0);
for (DeepgramAlternative alt : alternatives) {
if (alt.getConfidence() == null || best.getConfidence() == null) {
continue;
}
if (alt.getConfidence() > best.getConfidence()) {
best = alt;
}
}
return best;
}
private List<WordInfo> convertToWordInfoList(DeepgramAlternative alternative) {
List<WordInfo> words = new ArrayList<>();
if (alternative.getWords() == null) {
return words;
}
for (DeepgramWord word : alternative.getWords()) {
words.add(WordInfo.builder()
.word(word.getWord())
.punctuatedWord(word.getPunctuatedWord())
.start(word.getStart())
.end(word.getEnd())
.confidence(word.getConfidence())
.language(word.getLanguage())
.build());
}
return words;
}
private TranscriptionResult createTranscriptionResult(DeepgramResponse response,
DeepgramAlternative alternative, SegmentResult segmentResult, boolean isFinal) {
return TranscriptionResult.builder()
.text(alternative.getTranscript()) // Original transcript from this response
.mergedTranscript(segmentResult.getText()) // Current segment only (since last final)
.words(segmentResult.getWords()) // Words for current segment only
.confidence(segmentResult.getConfidence())
.language(response.getLanguage() != null ? response.getLanguage() : "en")
.timestamp(System.currentTimeMillis())
.interim(!isFinal)
.error(false)
.metadata(response.toMap())
.build();
}
@Override
public void onClosing(WebSocket webSocket, int code, String reason) {
webSocket.close(1000, null);
active = false;
if (shouldReconnect && code != 1000) {
log.warn("Deepgram WebSocket closing unexpectedly for session {} with code {}: {}", sessionId, code, reason);
scheduleReconnect();
}
}
@Override
public void onFailure(WebSocket webSocket, Throwable t, Response response) {
onConnectionFailure(t);
}
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-audio-core/0.8.1/ai/driftkit/audio
|
java-sources/ai/driftkit/driftkit-audio-core/0.8.1/ai/driftkit/audio/model/WordBuffer.java
|
package ai.driftkit.audio.model;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
/**
* Buffer for managing and deduplicating words from streaming transcription results.
* Handles overlapping word updates and maintains a consistent timeline.
*/
@Slf4j
public class WordBuffer {
private final Map<String, WordEntry> wordMap = new ConcurrentHashMap<>();
private final List<WordEntry> currentSegmentWords = new ArrayList<>();
private final Object lock = new Object();
private double lastFinalEndTime = 0.0;
/**
* Add or update words from a new transcription result.
* Handles deduplication and merging of overlapping words.
* Returns the current segment transcript (since last final).
*/
public SegmentResult updateWords(List<WordInfo> newWords, boolean isFinal, boolean isInterimEnabled) {
synchronized (lock) {
// If this is a final result, first clean up old words
if (isInterimEnabled && isFinal) {
cleanupOldWords();
}
for (WordInfo newWord : newWords) {
// Only process words that are after the last final end time
if (isInterimEnabled && newWord.getEnd() <= lastFinalEndTime) {
continue;
}
String key = generateWordKey(newWord);
WordEntry existing = wordMap.get(key);
if (existing != null) {
// Update existing word if new version has better confidence or is final
if (isFinal || newWord.getConfidence() > existing.getConfidence()) {
existing.update(newWord, isFinal);
log.trace("Updated word: {} at {}-{} (confidence: {})",
newWord.getWord(), newWord.getStart(), newWord.getEnd(), newWord.getConfidence());
}
} else {
// Add new word
WordEntry entry = new WordEntry(newWord, isFinal);
wordMap.put(key, entry);
insertWordInOrder(entry);
log.trace("Added new word: {} at {}-{}",
newWord.getWord(), newWord.getStart(), newWord.getEnd());
}
}
// Clean up overlapping words if this is a final result
if (isFinal) {
if (isInterimEnabled) {
cleanupOverlaps();
}
// Update final state
String currentSegmentText = getCurrentSegmentTranscript();
if (!currentSegmentWords.isEmpty()) {
lastFinalEndTime = currentSegmentWords.stream()
.mapToDouble(WordEntry::getEnd)
.max().orElse(lastFinalEndTime);
}
return new SegmentResult(currentSegmentText, getOverallConfidence(), true, getCurrentWords());
} else {
// Return interim result
String currentSegmentText = getCurrentSegmentTranscript();
return new SegmentResult(currentSegmentText, getOverallConfidence(), false, getCurrentWords());
}
}
}
/**
* Get the current segment transcript (since last final).
*/
public String getCurrentSegmentTranscript() {
synchronized (lock) {
return currentSegmentWords.stream()
.map(w -> w.getPunctuatedWord() != null ? w.getPunctuatedWord() : w.getWord())
.collect(Collectors.joining(" "));
}
}
/**
* Get the current segment transcript using smart punctuation spacing.
*/
public String getCurrentSegmentPunctuatedTranscript() {
synchronized (lock) {
StringBuilder sb = new StringBuilder();
String lastPunctuation = "";
for (WordEntry word : currentSegmentWords) {
String punctuated = word.getPunctuatedWord() != null ? word.getPunctuatedWord() : word.getWord();
// Smart spacing: no space after opening quotes/brackets or before closing punctuation
if (!sb.isEmpty() && !lastPunctuation.matches("[(\\[\"'«]")
&& !punctuated.matches("^[.,;:!?)\\]\"'»].*")) {
sb.append(" ");
}
sb.append(punctuated);
// Track last character for smart spacing
if (!punctuated.isEmpty()) {
lastPunctuation = punctuated.substring(punctuated.length() - 1);
}
}
return sb.toString();
}
}
/**
* Get current segment words in chronological order.
*/
public List<WordInfo> getCurrentWords() {
synchronized (lock) {
return currentSegmentWords.stream()
.map(WordEntry::toWordInfo)
.collect(Collectors.toList());
}
}
/**
* Get the overall confidence of the current segment.
*/
public double getOverallConfidence() {
synchronized (lock) {
if (currentSegmentWords.isEmpty()) return 0.0;
double sum = currentSegmentWords.stream()
.mapToDouble(WordEntry::getConfidence)
.sum();
return sum / currentSegmentWords.size();
}
}
/**
* Clear all words from the buffer.
*/
public void clear() {
synchronized (lock) {
wordMap.clear();
currentSegmentWords.clear();
lastFinalEndTime = 0.0;
}
}
/**
* Clean up words that are before the last final result.
*/
private void cleanupOldWords() {
// Remove words that ended before the last final time
currentSegmentWords.removeIf(word -> word.getEnd() <= lastFinalEndTime);
wordMap.entrySet().removeIf(entry -> entry.getValue().getEnd() <= lastFinalEndTime);
}
private String generateWordKey(WordInfo word) {
// Key based on approximate time position and word text
// This allows for small time adjustments while identifying the same word
long timeSlot = Math.round(word.getStart() * 10); // 100ms slots
return timeSlot + "_" + word.getWord().toLowerCase();
}
private void insertWordInOrder(WordEntry entry) {
// Binary search to find insertion point
int index = Collections.binarySearch(currentSegmentWords, entry,
Comparator.comparing(WordEntry::getStart));
if (index < 0) {
index = -index - 1;
}
currentSegmentWords.add(index, entry);
}
private void cleanupOverlaps() {
// Remove words that significantly overlap with higher confidence words
List<WordEntry> toRemove = new ArrayList<>();
for (int i = 0; i < currentSegmentWords.size() - 1; i++) {
WordEntry current = currentSegmentWords.get(i);
WordEntry next = currentSegmentWords.get(i + 1);
// Check for significant overlap
if (current.getEnd() > next.getStart() + 0.1) { // 100ms overlap threshold
// Keep the word with higher confidence or the final one
if (next.isFinal() && !current.isFinal()) {
toRemove.add(current);
} else if (current.isFinal() && !next.isFinal()) {
toRemove.add(next);
} else if (next.getConfidence() > current.getConfidence()) {
toRemove.add(current);
} else {
toRemove.add(next);
}
}
}
// Remove overlapping words
for (WordEntry entry : toRemove) {
currentSegmentWords.remove(entry);
wordMap.values().removeIf(e -> e.equals(entry));
}
}
@Data
private static class WordEntry {
private String word;
private String punctuatedWord;
private double start;
private double end;
private double confidence;
private String language;
private boolean isFinal;
private long lastUpdated;
WordEntry(WordInfo info, boolean isFinal) {
update(info, isFinal);
}
void update(WordInfo info, boolean isFinal) {
this.word = info.getWord();
this.punctuatedWord = info.getPunctuatedWord();
this.start = info.getStart();
this.end = info.getEnd();
this.confidence = info.getConfidence();
this.language = info.getLanguage();
this.isFinal = this.isFinal || isFinal;
this.lastUpdated = System.currentTimeMillis();
}
WordInfo toWordInfo() {
return WordInfo.builder()
.word(word)
.punctuatedWord(punctuatedWord)
.start(start)
.end(end)
.confidence(confidence)
.language(language)
.build();
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-audio-core/0.8.1/ai/driftkit/audio/model
|
java-sources/ai/driftkit/driftkit-audio-core/0.8.1/ai/driftkit/audio/model/deepgram/DeepgramResponse.java
|
package ai.driftkit.audio.model.deepgram;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.util.List;
import java.util.Map;
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public class DeepgramResponse {
@JsonProperty("metadata")
private DeepgramMetadata metadata;
@JsonProperty("results")
private DeepgramResults results;
@JsonProperty("language")
private String language;
@JsonProperty("is_final")
private Boolean isFinal;
@JsonProperty("speech_final")
private Boolean speechFinal;
@JsonProperty("channel_index")
private List<Integer> channelIndex;
@JsonProperty("duration")
private Double duration;
@JsonProperty("start")
private Double start;
@JsonProperty("type")
private String type;
@JsonProperty("channel")
private DeepgramChannel channel;
@JsonProperty("from_finalize")
private Boolean fromFinalize;
@JsonProperty("transaction_key")
private String transactionKey;
public Map<String, Object> toMap() {
return Map.of(
"metadata", metadata,
"results", results == null ? "" : results,
"language", language != null ? language : "en",
"is_final", isFinal != null ? isFinal : false,
"speech_final", speechFinal != null ? speechFinal : false,
"channel_index", channelIndex,
"duration", duration,
"start", start,
"type", type
);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-audio-core/0.8.1/ai/driftkit/audio
|
java-sources/ai/driftkit/driftkit-audio-core/0.8.1/ai/driftkit/audio/processor/AudioAnalyzer.java
|
package ai.driftkit.audio.processor;
import ai.driftkit.audio.core.config.CoreAudioConfig;
import ai.driftkit.audio.model.AudioAnalysis;
/**
* Service for analyzing audio buffers and detecting voice activity
*/
public class AudioAnalyzer {
private final CoreAudioConfig config;
// Adaptive sensitivity fields
private volatile long lastVoiceDetectedTime = System.currentTimeMillis();
private volatile boolean sensitivityBoosted = false;
private volatile int currentSilenceThreshold;
private volatile int currentVoiceThreshold;
// Silence reset timer fields
private volatile long lastSoundDetectedTime = System.currentTimeMillis();
private volatile int dynamicSilenceThreshold = -1;
private volatile boolean timerRunning = false;
private final Object thresholdLock = new Object();
// Calibration support
private double backgroundNoiseLevel = 0;
private boolean isCalibrated = false;
public AudioAnalyzer(CoreAudioConfig config) {
this.config = config;
}
/**
* Calibrate background noise level
* @param samples Array of AudioAnalysis samples from calibration phase
* @return Adjusted thresholds based on background noise
*/
public CalibrationResult calibrateBackgroundNoise(AudioAnalysis[] samples) {
// Default thresholds based on VAD config
double defaultSilenceThreshold = config.getVad().getThreshold() * 100; // Convert to 0-100 scale
double defaultVoiceThreshold = defaultSilenceThreshold * 2;
if (samples == null || samples.length == 0) {
return new CalibrationResult((int)defaultSilenceThreshold, (int)defaultVoiceThreshold);
}
// Calculate average background noise
double totalAmplitude = 0;
for (AudioAnalysis sample : samples) {
totalAmplitude += sample.getAmplitude();
}
backgroundNoiseLevel = totalAmplitude / samples.length;
// Calculate adjusted thresholds
int adjustedSilenceThreshold = (int)(backgroundNoiseLevel * 150); // 1.5x background
int adjustedVoiceThreshold = (int)(backgroundNoiseLevel * 250); // 2.5x background
// Use adjusted thresholds if they're higher than configured
int finalSilenceThreshold = Math.max(adjustedSilenceThreshold, (int)defaultSilenceThreshold);
int finalVoiceThreshold = Math.max(adjustedVoiceThreshold, (int)defaultVoiceThreshold);
// Update current thresholds
currentSilenceThreshold = finalSilenceThreshold;
currentVoiceThreshold = finalVoiceThreshold;
isCalibrated = true;
return new CalibrationResult(finalSilenceThreshold, finalVoiceThreshold, backgroundNoiseLevel);
}
/**
* Analyze audio buffer for voice activity and silence detection
*/
public AudioAnalysis analyzeBuffer(byte[] buffer, int length) {
// Convert bytes to 16-bit samples and calculate RMS (Root Mean Square)
long sum = 0;
int sampleCount = length / 2; // 16-bit samples
for (int i = 0; i < length - 1; i += 2) {
// Convert two bytes to a 16-bit sample (big-endian)
short sample = (short)((buffer[i] << 8) | (buffer[i + 1] & 0xFF));
sum += sample * sample;
}
double rms = Math.sqrt((double)sum / sampleCount);
boolean isSilent = rms < getCurrentSilenceThreshold();
// Update sound detection time for silence reset timer
updateSoundDetectionTime(rms);
return new AudioAnalysis(isSilent, rms);
}
/**
* Analyze entire chunk for voice activity
*/
public boolean analyzeChunkForVoice(byte[] chunkData) {
// Analyze the entire chunk in segments to detect voice activity
int segmentSize = config.getSampleRate() * 2; // 1 second segments
int numSegments = chunkData.length / segmentSize;
int voiceSegments = 0;
double maxSegmentAmplitude = 0;
for (int i = 0; i < numSegments; i++) {
int start = i * segmentSize;
int length = Math.min(segmentSize, chunkData.length - start);
if (length > 0) {
byte[] segment = new byte[length];
System.arraycopy(chunkData, start, segment, 0, length);
AudioAnalysis analysis = analyzeBuffer(segment, length);
if (analysis.getAmplitude() > maxSegmentAmplitude) {
maxSegmentAmplitude = analysis.getAmplitude();
}
if (analysis.getAmplitude() > getCurrentVoiceThreshold()) {
voiceSegments++;
}
}
}
// Calculate average amplitude across segments
double avgAmplitude = 0;
if (numSegments > 0) {
double totalAmplitude = 0;
for (int i = 0; i < numSegments; i++) {
int start = i * segmentSize;
int length = Math.min(segmentSize, chunkData.length - start);
if (length > 0) {
byte[] segment = new byte[length];
System.arraycopy(chunkData, start, segment, 0, length);
AudioAnalysis analysis = analyzeBuffer(segment, length);
totalAmplitude += analysis.getAmplitude();
}
}
avgAmplitude = totalAmplitude / numSegments;
}
// Consider it has voice if:
// 1. At least one segment has voice activity, OR
// 2. Maximum amplitude is above 70% of threshold (more lenient)
// 3. Average amplitude across segments is reasonably high
boolean hasVoice = voiceSegments > 0 ||
maxSegmentAmplitude > (getCurrentVoiceThreshold() * 0.7) ||
avgAmplitude > (getCurrentVoiceThreshold() * 0.5);
return hasVoice;
}
/**
* Initialize adaptive sensitivity settings
*/
public void initializeAdaptiveSensitivity() {
int baseThreshold = (int)(config.getVad().getThreshold() * 100);
currentSilenceThreshold = baseThreshold;
currentVoiceThreshold = baseThreshold * 2;
lastVoiceDetectedTime = System.currentTimeMillis();
sensitivityBoosted = false;
}
/**
* Update adaptive sensitivity based on voice activity
*/
public void updateAdaptiveSensitivity(boolean voiceDetected) {
long currentTime = System.currentTimeMillis();
if (voiceDetected) {
lastVoiceDetectedTime = currentTime;
if (sensitivityBoosted) {
// Reset to normal sensitivity when voice is detected
int baseThreshold = (int)(config.getVad().getThreshold() * 100);
currentSilenceThreshold = baseThreshold;
currentVoiceThreshold = baseThreshold * 2;
sensitivityBoosted = false;
}
} else {
// Check if we should boost sensitivity after 5 seconds of silence
long silenceDuration = currentTime - lastVoiceDetectedTime;
if (!sensitivityBoosted && silenceDuration > 5000) {
currentSilenceThreshold = currentSilenceThreshold / 2;
currentVoiceThreshold = currentVoiceThreshold / 2;
sensitivityBoosted = true;
}
}
}
/**
* Get current silence threshold (may be adapted)
*/
public int getCurrentSilenceThreshold() {
if (!config.getVad().isEnabled()) {
return 0;
}
return currentSilenceThreshold > 0 ? currentSilenceThreshold : (int)(config.getVad().getThreshold() * 100);
}
/**
* Get current voice activity threshold (may be adapted)
*/
public int getCurrentVoiceThreshold() {
return currentVoiceThreshold > 0 ? currentVoiceThreshold : (int)(config.getVad().getThreshold() * 200);
}
/**
* Update last sound detected time when analyzing audio
*/
private void updateSoundDetectionTime(double amplitude) {
// If sound is detected (not silence), update the timer
if (amplitude > getCurrentSilenceThreshold()) {
lastSoundDetectedTime = System.currentTimeMillis();
}
}
/**
* Set dynamic silence threshold (for background noise adaptation)
*/
public void setDynamicSilenceThreshold(int threshold) {
this.currentSilenceThreshold = threshold;
}
/**
* Set dynamic voice threshold (for background noise adaptation)
*/
public void setDynamicVoiceThreshold(int threshold) {
this.currentVoiceThreshold = threshold;
}
/**
* Decrease silence threshold and start 15-second timer to reset
* @return new threshold value
*/
public int decreaseSilenceThreshold() {
synchronized (thresholdLock) {
// Set to a very low threshold
int newThreshold = 15;
dynamicSilenceThreshold = newThreshold;
currentSilenceThreshold = newThreshold;
// Reset the timer
lastSoundDetectedTime = System.currentTimeMillis();
// Start timer thread if not already running
startSilenceResetTimer();
return newThreshold;
}
}
/**
* Start a timer that resets silence threshold after 15 seconds of silence
*/
private void startSilenceResetTimer() {
if (timerRunning) {
return; // Timer already running
}
timerRunning = true;
new Thread(() -> {
while (timerRunning) {
try {
Thread.sleep(1000); // Check every second
long currentTime = System.currentTimeMillis();
long silenceDuration = currentTime - lastSoundDetectedTime;
// Check if 15 seconds of silence have passed
if (silenceDuration >= 15000) {
synchronized (thresholdLock) {
dynamicSilenceThreshold = 0;
currentSilenceThreshold = 0;
timerRunning = false;
break;
}
}
} catch (InterruptedException e) {
break;
}
}
timerRunning = false;
}).start();
}
/**
* Reset all thresholds to default values
*/
public void resetThresholds() {
synchronized (thresholdLock) {
int baseThreshold = (int)(config.getVad().getThreshold() * 100);
currentSilenceThreshold = baseThreshold;
currentVoiceThreshold = baseThreshold * 2;
dynamicSilenceThreshold = -1;
sensitivityBoosted = false;
isCalibrated = false;
timerRunning = false;
}
}
/**
* Get current configuration state for debugging
*/
public String getDebugInfo() {
return String.format("AudioAnalyzer Debug Info:\n" +
" Background Noise: %.1f\n" +
" Current Silence Threshold: %d\n" +
" Current Voice Threshold: %d\n" +
" Is Calibrated: %b\n" +
" Sensitivity Boosted: %b\n" +
" Dynamic Threshold: %d\n" +
" Timer Running: %b",
backgroundNoiseLevel,
getCurrentSilenceThreshold(),
getCurrentVoiceThreshold(),
isCalibrated,
sensitivityBoosted,
dynamicSilenceThreshold,
timerRunning
);
}
/**
* Calibration result class
*/
public static class CalibrationResult {
private final int silenceThreshold;
private final int voiceThreshold;
private final double backgroundNoise;
public CalibrationResult(int silenceThreshold, int voiceThreshold) {
this(silenceThreshold, voiceThreshold, 0);
}
public CalibrationResult(int silenceThreshold, int voiceThreshold, double backgroundNoise) {
this.silenceThreshold = silenceThreshold;
this.voiceThreshold = voiceThreshold;
this.backgroundNoise = backgroundNoise;
}
public int getSilenceThreshold() { return silenceThreshold; }
public int getVoiceThreshold() { return voiceThreshold; }
public double getBackgroundNoise() { return backgroundNoise; }
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/ai
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/ai/client/AiClient.java
|
package ai.driftkit.chat.framework.ai.client;
import ai.driftkit.chat.framework.ai.domain.*;
import ai.driftkit.chat.framework.ai.domain.Chat.ChatRestResponse;
import ai.driftkit.chat.framework.ai.domain.MessageTask.MessageIdResponse;
import ai.driftkit.chat.framework.ai.domain.MessageTask.MessageRestResponse;
import ai.driftkit.common.domain.*;
import lombok.SneakyThrows;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.*;
import java.util.List;
@FeignClient(name = "aiClient", url = "${ai-props.host:}", configuration = FeignConfig.class)
public interface AiClient {
@GetMapping("/data/v1.0/admin/llm/chat/{chatId}")
List<HistoryMessage> getMessages(@PathVariable("chatId") String chatId);
@PostMapping("/data/v1.0/admin/llm/chat")
ChatRestResponse createChat(@RequestBody ChatRequest chatRequest);
@GetMapping("/data/v1.0/admin/llm/message/{messageId}")
MessageRestResponse getMessage(@PathVariable("messageId") String messageId);
@PostMapping("/data/v1.0/admin/llm/prompt/message")
MessageIdResponse sendPromptMessageAsync(@RequestBody PromptRequest promptRequest);
@PostMapping("/data/v1.0/admin/llm/prompt/message/sync")
MessageRestResponse sendPromptMessage(@RequestBody PromptRequest promptRequest);
@PostMapping("/data/v1.0/admin/llm/message")
MessageRestResponse sendDirectMessageAsync(@RequestBody MessageRequest messageRequest);
@GetMapping("/data/v1.0/admin/llm/image/{imageTaskId}/resource/0")
byte[] getImage(@PathVariable("imageTaskId") String imageTaskId);
@GetMapping("/data/v1.0/admin/dictionary/")
RestResponse<List<DictionaryItem>> getDictionaries();
@PostMapping("/data/v1.0/admin/dictionary/items")
RestResponse<List<DictionaryItem>> saveDictionaries(@RequestBody List<DictionaryItem> items);
@PostMapping("/data/v1.0/admin/dictionary/group/")
RestResponse<DictionaryGroup> saveDictionaryGroup(@RequestBody DictionaryGroup group);
@GetMapping("/data/v1.0/admin/dictionary/group/{groupId}")
RestResponse<DictionaryGroup> getDictionaryGroup(@PathVariable String groupId);
@PostMapping("/data/v1.0/admin/prompt/create-if-not-exists")
RestResponse<Prompt> createPromptIfNotExists(@RequestBody CreatePromptRequest createPromptRequest);
@SneakyThrows
default MessageRestResponse sendDirectMessageAndWait(@RequestBody MessageRequest promptRequest) {
MessageRestResponse messageIdResponse = sendDirectMessageAsync(promptRequest);
if (!messageIdResponse.isSuccess()) {
return new MessageRestResponse(false, null);
}
String messageId = messageIdResponse.getData().getMessageId();
MessageRestResponse message = getMessage(messageId);
int count = 300;
while (message != null && message.getData().getResult() == null && count-- > 0) {
message = getMessage(messageId);
Thread.sleep(1000);
}
return message;
}
@SneakyThrows
default MessageRestResponse sendPromptMessageAndWait(@RequestBody PromptRequest promptRequest) {
MessageIdResponse messageIdResponse = sendPromptMessageAsync(promptRequest);
if (!messageIdResponse.isSuccess()) {
return new MessageRestResponse(false, null);
}
String messageId = messageIdResponse.getData().getMessageId();
MessageRestResponse message = getMessage(messageId);
int count = 300;
while (message != null && message.getData().getResult() == null && count-- > 0) {
message = getMessage(messageId);
Thread.sleep(1000);
}
return message;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/ai
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/ai/client/FeignConfig.java
|
package ai.driftkit.chat.framework.ai.client;
import feign.RequestInterceptor;
import feign.codec.Encoder;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.ObjectFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.http.HttpMessageConverters;
import org.springframework.cloud.openfeign.support.SpringEncoder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import java.nio.charset.StandardCharsets;
import java.util.Base64;
@Configuration
@Slf4j
public class FeignConfig {
@Value("${ai-props.username:}")
private String username;
@Value("${ai-props.password:}")
private String password;
@Autowired
private ObjectFactory<HttpMessageConverters> messageConverters;
@Bean
public RequestInterceptor basicAuthRequestInterceptor() {
return requestTemplate -> {
if (username != null && !username.isEmpty() && password != null && !password.isEmpty()) {
String auth = username + ":" + password;
byte[] encodedAuth = Base64.getEncoder().encode(auth.getBytes(StandardCharsets.UTF_8));
String authHeader = "Basic " + new String(encodedAuth);
requestTemplate.header("Authorization", authHeader);
} else {
log.warn("AI client credentials not configured. Set ai-props.username and ai-props.password");
}
requestTemplate.header("Content-Type", "application/json");
requestTemplate.header("Accept", "application/json");
};
}
@Bean("aiClientEncoder")
@Primary
public Encoder aiClientEncoder() {
return new SpringEncoder(this.messageConverters);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/ai
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/ai/domain/AIFunctionSchema.java
|
package ai.driftkit.chat.framework.ai.domain;
import com.fasterxml.jackson.annotation.JsonGetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.swagger.v3.oas.annotations.media.SchemaProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.util.StringUtils;
import java.io.Serializable;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class AIFunctionSchema implements Serializable {
private static final Map<Class<?>, AIFunctionSchema> schemaCache = new ConcurrentHashMap<>();
private static final ThreadLocal<Set<Class<?>>> processingClasses = ThreadLocal.withInitial(HashSet::new);
private static final ObjectMapper MAPPER = new ObjectMapper();
String schemaName;
String description;
List<AIFunctionProperty> properties;
boolean isArray;
boolean composable;
public AIFunctionSchema(List<AIFunctionProperty> properties) {
this.properties = properties;
}
public AIFunctionSchema(String schemaName, List<AIFunctionProperty> properties) {
this(properties);
this.schemaName = schemaName;
}
public static AIFunctionSchema fromClass(Class<?> clazz) {
return fromClass(clazz, SchemaGenerationStrategy.RECURSIVE);
}
public static AIFunctionSchema fromClass(Class<?> clazz, SchemaGenerationStrategy strategy) {
if (strategy == SchemaGenerationStrategy.JACKSON) {
return fromClassUsingJackson(clazz);
} else {
return fromClassRecursive(clazz, null);
}
}
private static AIFunctionSchema fromClassUsingJackson(Class<?> clazz) {
if (schemaCache.containsKey(clazz)) {
return schemaCache.get(clazz);
}
try {
AIFunctionSchema schema = fromClassRecursive(clazz, null);
schemaCache.put(clazz, schema);
return schema;
} catch (Exception e) {
throw new RuntimeException("Failed to generate schema for class: " + clazz.getName(), e);
}
}
private static AIFunctionSchema fromClassRecursive(Class<?> clazz, Type genericType) {
if (schemaCache.containsKey(clazz)) {
return schemaCache.get(clazz);
}
Set<Class<?>> currentlyProcessing = processingClasses.get();
if (currentlyProcessing.contains(clazz)) {
return new AIFunctionSchema(clazz.getSimpleName(), new ArrayList<>());
}
currentlyProcessing.add(clazz);
try {
List<AIFunctionProperty> properties = new ArrayList<>();
String schemaName = clazz.getSimpleName();
SchemaName schemaNameAnnotation = clazz.getAnnotation(SchemaName.class);
if (schemaNameAnnotation != null && StringUtils.hasText(schemaNameAnnotation.value())) {
schemaName = schemaNameAnnotation.value();
}
String description = null;
SchemaDescription descriptionAnnotation = clazz.getAnnotation(SchemaDescription.class);
if (descriptionAnnotation != null && StringUtils.hasText(descriptionAnnotation.value())) {
description = descriptionAnnotation.value();
}
Class<?> currentClass = clazz;
while (currentClass != null && currentClass != Object.class) {
for (Field field : currentClass.getDeclaredFields()) {
if (Modifier.isStatic(field.getModifiers()) ||
Modifier.isTransient(field.getModifiers()) ||
field.isSynthetic()) {
continue;
}
if (field.isAnnotationPresent(JsonIgnore.class) ||
field.isAnnotationPresent(SchemaIgnore.class)) {
continue;
}
field.setAccessible(true);
AIFunctionProperty property = createPropertyFromField(field);
if (property != null) {
properties.add(property);
}
}
currentClass = currentClass.getSuperclass();
}
AIFunctionSchema schema = new AIFunctionSchema(schemaName, properties);
if (description != null) {
schema.setDescription(description);
}
SchemaArray schemaArrayAnnotation = clazz.getAnnotation(SchemaArray.class);
if (schemaArrayAnnotation != null) {
schema.setArray(true);
}
schemaCache.put(clazz, schema);
return schema;
} finally {
currentlyProcessing.remove(clazz);
}
}
private static AIFunctionProperty createPropertyFromField(Field field) {
String name = field.getName();
Class<?> type = field.getType();
Type genericType = field.getGenericType();
SchemaProperty propertyAnnotation = field.getAnnotation(SchemaProperty.class);
AIFunctionProperty.AIFunctionPropertyBuilder builder = AIFunctionProperty.builder()
.name(name)
.nameId(name);
if (propertyAnnotation != null) {
if (StringUtils.hasText(propertyAnnotation.nameId())) {
builder.nameId(propertyAnnotation.nameId());
}
if (StringUtils.hasText(propertyAnnotation.dataNameId())) {
builder.dataNameId(propertyAnnotation.dataNameId());
}
if (StringUtils.hasText(propertyAnnotation.description())) {
builder.description(propertyAnnotation.description());
} else {
builder.description("Property " + name);
}
if (StringUtils.hasText(propertyAnnotation.defaultValue())) {
builder.defaultValue(propertyAnnotation.defaultValue());
}
if (propertyAnnotation.minValue() != Integer.MIN_VALUE) {
builder.minValue(propertyAnnotation.minValue());
}
if (propertyAnnotation.maxValue() != Integer.MAX_VALUE) {
builder.maxValue(propertyAnnotation.maxValue());
}
if (propertyAnnotation.minLength() > 0) {
builder.minLength(propertyAnnotation.minLength());
}
if (propertyAnnotation.maxLength() > 0) {
builder.maxLength(propertyAnnotation.maxLength());
}
builder.isRequired(propertyAnnotation.required());
builder.isMultiSelect(propertyAnnotation.multiSelect());
builder.isArray(propertyAnnotation.array());
builder.valueAsNameId(propertyAnnotation.valueAsNameId());
if (propertyAnnotation.values() != null && propertyAnnotation.values().length > 0) {
builder.type(PropertyType.ENUM);
builder.values(List.of(propertyAnnotation.values()));
} else if (propertyAnnotation.type() != Void.class) {
if (propertyAnnotation.type().isEnum()) {
setEnumValues(propertyAnnotation.type(), builder);
}
}
} else {
builder.description("Property " + name);
}
if (type.isArray()) {
builder.isArray(true);
Class<?> componentType = type.getComponentType();
if (isSimpleType(componentType)) {
builder.type(mapClassToPropertyType(componentType));
} else {
builder.type(PropertyType.OBJECT);
builder.nestedSchema(fromClassRecursive(componentType, null));
}
}
if (type.isEnum() && CollectionUtils.isEmpty(builder.values)) {
setEnumValues(type, builder);
}
if (genericType.getClass().isEnum()) {
setEnumValues(genericType.getClass(), builder);
}
else if (Collection.class.isAssignableFrom(type)) {
builder.isArray(true);
if (genericType instanceof ParameterizedType paramType) {
Type actualTypeArg = paramType.getActualTypeArguments()[0];
if (actualTypeArg instanceof Class<?> genericClass) {
if (isSimpleType(genericClass)) {
builder.type(mapClassToPropertyType(genericClass));
} else {
builder.type(PropertyType.OBJECT);
builder.nestedSchema(fromClassRecursive(genericClass, null));
}
} else if (actualTypeArg instanceof ParameterizedType nestedParamType) {
Class<?> rawType = (Class<?>) nestedParamType.getRawType();
if (Collection.class.isAssignableFrom(rawType)) {
builder.type(PropertyType.ARRAY);
Type nestedTypeArg = nestedParamType.getActualTypeArguments()[0];
if (nestedTypeArg instanceof Class<?> nestedClass) {
AIFunctionSchema nestedSchema = new AIFunctionSchema();
nestedSchema.setArray(true);
if (isSimpleType(nestedClass)) {
nestedSchema.setSchemaName(nestedClass.getSimpleName() + "Array");
AIFunctionProperty itemProp = AIFunctionProperty.builder()
.name("item")
.type(mapClassToPropertyType(nestedClass))
.build();
nestedSchema.setProperties(List.of(itemProp));
} else {
nestedSchema.setSchemaName(nestedClass.getSimpleName() + "Array");
nestedSchema.setProperties(fromClassRecursive(nestedClass, null).getProperties());
}
builder.nestedSchema(nestedSchema);
}
} else {
builder.type(PropertyType.OBJECT);
builder.nestedSchema(new AIFunctionSchema(rawType.getSimpleName(), new ArrayList<>()));
}
}
} else {
builder.type(PropertyType.OBJECT);
}
}
else if (Map.class.isAssignableFrom(type)) {
builder.type(PropertyType.MAP);
if (genericType instanceof ParameterizedType paramType) {
Type keyType = paramType.getActualTypeArguments()[0];
Type valueType = paramType.getActualTypeArguments()[1];
if (valueType instanceof Class<?> valueClass && !isSimpleType(valueClass)) {
AIFunctionSchema valueSchema = fromClassRecursive(valueClass, null);
builder.nestedSchema(valueSchema);
}
Map<String, Object> additionalProps = new HashMap<>();
additionalProps.put("keyType", keyType.getTypeName());
additionalProps.put("valueType", valueType.getTypeName());
builder.additionalProperties(additionalProps);
}
}
else if (isSimpleType(type)) {
if (builder.type == null) {
builder.type(mapClassToPropertyType(type));
if (type.isEnum()) {
List<String> enumValues = new ArrayList<>();
for (Object enumConstant : type.getEnumConstants()) {
enumValues.add(enumConstant.toString());
}
builder.values(enumValues);
SchemaEnumValues enumValuesAnnotation = field.getAnnotation(SchemaEnumValues.class);
if (enumValuesAnnotation != null && enumValuesAnnotation.value().length > 0) {
builder.values(Arrays.asList(enumValuesAnnotation.value()));
}
}
}
}
else {
builder.type(PropertyType.OBJECT);
builder.nestedSchema(fromClassRecursive(type, genericType));
}
return builder.build();
}
private static void setEnumValues(Class<?> type, AIFunctionProperty.AIFunctionPropertyBuilder builder) {
builder.type(PropertyType.ENUM);
Object[] enumConstants = type.getEnumConstants();
List<String> enumValues = new ArrayList<>();
for (Object constant : enumConstants) {
enumValues.add(constant.toString());
}
builder.values(enumValues);
}
private static boolean isSimpleType(Class<?> type) {
return type.isPrimitive() ||
type.equals(String.class) ||
type.equals(Integer.class) ||
type.equals(Double.class) ||
type.equals(Float.class) ||
type.equals(Boolean.class) ||
type.equals(Long.class) ||
type.equals(Date.class) ||
type.equals(UUID.class) ||
type.isEnum();
}
private static PropertyType mapClassToPropertyType(Class<?> clazz) {
if (clazz.equals(String.class) || clazz.equals(UUID.class) || clazz.equals(Date.class)) {
return PropertyType.STRING;
} else if (clazz.equals(Integer.class) || clazz.equals(int.class) ||
clazz.equals(Long.class) || clazz.equals(long.class) ||
clazz.equals(Short.class) || clazz.equals(short.class) ||
clazz.equals(Byte.class) || clazz.equals(byte.class)) {
return PropertyType.INTEGER;
} else if (clazz.equals(Double.class) || clazz.equals(double.class) ||
clazz.equals(Float.class) || clazz.equals(float.class)) {
return PropertyType.DOUBLE;
} else if (clazz.equals(Boolean.class) || clazz.equals(boolean.class)) {
return PropertyType.BOOLEAN;
} else if (clazz.isEnum()) {
return PropertyType.ENUM;
} else if (Collection.class.isAssignableFrom(clazz)) {
return PropertyType.ARRAY;
} else if (Map.class.isAssignableFrom(clazz)) {
return PropertyType.MAP;
} else {
return PropertyType.OBJECT;
}
}
public static void clearCache() {
schemaCache.clear();
}
public enum SchemaGenerationStrategy {
RECURSIVE,
JACKSON
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class AIFunctionProperty implements Serializable {
public static AIFunctionPropertyBuilder builder() {
return new AIFunctionPropertyBuilder();
}
public static class AIFunctionPropertyBuilder {
private String name;
private String nameId;
private List<String> nameIds;
private String description;
private PropertyType type;
private String dataNameId;
private String defaultValue;
private Integer maxValue;
private Integer minValue;
private boolean isRequired;
private List<String> values;
private Integer minLength;
private Integer maxLength;
private boolean isArray;
private boolean valueAsNameId;
private boolean isMultiSelect;
private AIFunctionSchema nestedSchema;
private Map<String, Object> additionalProperties;
public AIFunctionPropertyBuilder name(String name) {
this.name = name;
return this;
}
public AIFunctionPropertyBuilder nameId(String nameId) {
this.nameId = nameId;
return this;
}
public AIFunctionPropertyBuilder nameIds(List<String> nameIds) {
this.nameIds = nameIds;
return this;
}
public AIFunctionPropertyBuilder description(String description) {
this.description = description;
return this;
}
public AIFunctionPropertyBuilder type(PropertyType type) {
this.type = type;
return this;
}
public AIFunctionPropertyBuilder dataNameId(String dataNameId) {
this.dataNameId = dataNameId;
return this;
}
public AIFunctionPropertyBuilder defaultValue(String defaultValue) {
this.defaultValue = defaultValue;
return this;
}
public AIFunctionPropertyBuilder maxValue(Integer maxValue) {
this.maxValue = maxValue;
return this;
}
public AIFunctionPropertyBuilder minValue(Integer minValue) {
this.minValue = minValue;
return this;
}
public AIFunctionPropertyBuilder isRequired(boolean isRequired) {
this.isRequired = isRequired;
return this;
}
public AIFunctionPropertyBuilder values(List<String> values) {
this.values = values;
return this;
}
public AIFunctionPropertyBuilder minLength(Integer minLength) {
this.minLength = minLength;
return this;
}
public AIFunctionPropertyBuilder maxLength(Integer maxLength) {
this.maxLength = maxLength;
return this;
}
public AIFunctionPropertyBuilder isArray(boolean isArray) {
this.isArray = isArray;
return this;
}
public AIFunctionPropertyBuilder valueAsNameId(boolean valueAsNameId) {
this.valueAsNameId = valueAsNameId;
return this;
}
public AIFunctionPropertyBuilder isMultiSelect(boolean isMultiSelect) {
this.isMultiSelect = isMultiSelect;
return this;
}
public AIFunctionPropertyBuilder nestedSchema(AIFunctionSchema nestedSchema) {
this.nestedSchema = nestedSchema;
return this;
}
public AIFunctionPropertyBuilder additionalProperties(Map<String, Object> additionalProperties) {
this.additionalProperties = additionalProperties;
return this;
}
public AIFunctionProperty build() {
AIFunctionProperty property = new AIFunctionProperty();
property.name = this.name;
property.nameId = this.nameId;
property.nameIds = this.nameIds;
property.description = this.description;
property.type = this.type;
property.dataNameId = this.dataNameId;
property.defaultValue = this.defaultValue;
property.maxValue = this.maxValue;
property.minValue = this.minValue;
property.isRequired = this.isRequired;
property.values = this.values;
property.minLength = this.minLength;
property.maxLength = this.maxLength;
property.isArray = this.isArray;
property.valueAsNameId = this.valueAsNameId;
property.isMultiSelect = this.isMultiSelect;
property.nestedSchema = this.nestedSchema;
property.additionalProperties = this.additionalProperties;
return property;
}
}
private String name;
private String nameId;
private List<String> nameIds;
private String description;
private PropertyType type;
private String dataNameId;
private String defaultValue;
private Integer maxValue;
private Integer minValue;
private boolean isRequired;
private List<String> values;
private Integer minLength;
private Integer maxLength;
private boolean isArray;
private boolean valueAsNameId;
private boolean isMultiSelect;
private AIFunctionSchema nestedSchema;
private Map<String, Object> additionalProperties;
@JsonGetter
public PropertyType getType() {
if (CollectionUtils.isNotEmpty(values)) {
return PropertyType.ENUM;
}
return type;
}
public void addAdditionalProperty(String key, Object value) {
if (additionalProperties == null) {
additionalProperties = new HashMap<>();
}
additionalProperties.put(key, value);
}
}
public enum PropertyType {
STRING,
INTEGER,
DOUBLE,
BOOLEAN,
LITERAL,
ENUM,
OBJECT,
ARRAY_OBJECT,
ARRAY,
MAP
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface SchemaName {
String value();
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface SchemaDescription {
String value();
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface SchemaArray {
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface SchemaIgnore {
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface SchemaProperty {
String nameId() default "";
String description() default "";
String defaultValue() default "";
String dataNameId() default "";
int minValue() default Integer.MIN_VALUE;
int maxValue() default Integer.MAX_VALUE;
int minLength() default 0;
int maxLength() default 0;
boolean required() default false;
boolean multiSelect() default false;
boolean array() default false;
boolean valueAsNameId() default false;
Class<?> type() default Void.class;
String[] values() default {};
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface SchemaEnumValues {
String[] value();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/ai
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/ai/domain/Chat.java
|
package ai.driftkit.chat.framework.ai.domain;
import ai.driftkit.common.domain.RestResponse;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.Data;
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public class Chat {
private String chatId;
public static class ChatRestResponse extends RestResponse<Chat> {
public ChatRestResponse() {
super();
}
public ChatRestResponse(boolean success, Chat data) {
super(success, data);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/ai
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/ai/domain/HistoryMessage.java
|
package ai.driftkit.chat.framework.ai.domain;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@JsonInclude(Include.NON_NULL)
public class HistoryMessage {
String messageId;
String message;
ChatMessageType type;
MessageType messageType = MessageType.TEXT;
String imageTaskId;
String workflow;
long createdTime;
Long responseTime;
@Builder
public HistoryMessage(
String messageId,
String message,
ChatMessageType type,
MessageType messageType,
String imageTaskId,
String workflow,
long createdTime,
Long responseTime
) {
this.messageId = messageId;
this.message = message;
this.type = type;
this.messageType = messageType;
this.imageTaskId = imageTaskId;
this.workflow = workflow;
this.createdTime = createdTime;
this.responseTime = responseTime;
}
public enum MessageType {
IMAGE,
TEXT
}
public enum ChatMessageType {
SYSTEM,
USER,
AI
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/ai
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/ai/domain/MessageRequest.java
|
package ai.driftkit.chat.framework.ai.domain;
import ai.driftkit.common.domain.Language;
import ai.driftkit.common.domain.client.ResponseFormat;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
import java.util.Map;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonIgnoreProperties(ignoreUnknown = true)
public class MessageRequest {
private String message;
private Language language;
private String chatId;
private String workflow;
private Boolean jsonResponse;
private String systemMessage;
private Map<String, String> variables;
private Boolean logprobs;
private Integer topLogprobs;
private String model;
private Double temperature;
private String purpose;
private List<String> imageBase64;
private String imageMimeType;
private ResponseFormat responseFormat;
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/ai
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/ai/domain/MessageTask.java
|
package ai.driftkit.chat.framework.ai.domain;
import ai.driftkit.common.domain.RestResponse;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public class MessageTask {
private String messageId;
private String result;
private String checkerPromptId;
private String imageTaskId;
private CheckerResponse checkerResponse;
public static class MessageRestResponse extends RestResponse<MessageTask> {
public MessageRestResponse() {
super();
}
public MessageRestResponse(boolean success, MessageTask data) {
super(success, data);
}
}
public static class MessageIdResponse extends RestResponse<MessageTask.MessageId> {
public MessageIdResponse() {
super();
}
public MessageIdResponse(boolean success, MessageTask.MessageId data) {
super(success, data);
}
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class MessageId {
private String messageId;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(Include.NON_NULL)
public static class CheckerResponse {
String id;
String checkerMessage;
String checkerPromptId;
String messageTaskId;
Object correctMessage;
List<Fix> fixes;
long createdTime;
long resultTime;
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class Fix {
String wrongStatement;
String fixedStatement;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/ai
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/ai/utils/AIUtils.java
|
package ai.driftkit.chat.framework.ai.utils;
import ai.driftkit.chat.framework.ai.domain.MessageTask.MessageRestResponse;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class AIUtils {
public static final String REASONING_LITE = "reasoning-lite";
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
public static <T> T parseModelResponse(MessageRestResponse msg, Class<T> clazz) throws JsonProcessingException {
try {
String result = msg.getData().getResult();
return OBJECT_MAPPER.readValue(result, clazz);
} catch (Exception e) {
log.error("[generate] Couldn't parse incoming json [{}]", msg, e);
throw e;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/annotations/AsyncStep.java
|
package ai.driftkit.chat.framework.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation for asynchronous workflow step methods.
* This is used to identify methods that handle the asynchronous part of a workflow step.
* These methods will be called after the main step method returns an AsyncTaskEvent.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface AsyncStep {
/**
* The ID of the step that this async method is associated with.
* This must match the ID of a step defined with @WorkflowStep.
*/
String forStep();
/**
* Human-readable description of the async step.
*/
String description() default "";
/**
* The ID of the schema for input data.
*
* Note: If inputClass is specified, this is ignored.
*/
String inputSchemaId() default "";
/**
* The ID of the schema for output data.
*
* Note: If outputClass is specified, this is ignored.
*/
String outputSchemaId() default "";
/**
* The class to use as input schema.
* This class will be automatically converted to an AIFunctionSchema.
* If specified, takes precedence over inputSchemaId.
*/
Class<?> inputClass() default void.class;
/**
* The classes to use as possible input schemas.
* These classes will be automatically converted to AIFunctionSchema objects.
* If specified, takes precedence over inputSchemaId and inputClass.
*/
Class<?>[] inputClasses() default {};
/**
* The class to use as output schema.
* This class will be automatically converted to an AIFunctionSchema.
* If specified, takes precedence over outputSchemaId.
*/
Class<?> outputClass() default void.class;
/**
* The classes to use as possible output schemas.
* These classes will be automatically converted to AIFunctionSchema objects.
* If specified, takes precedence over outputSchemaId and outputClass.
*/
Class<?>[] nextClasses() default {};
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/annotations/OnInvocationsLimit.java
|
package ai.driftkit.chat.framework.annotations;
/**
* Enum defining actions to take when a step's invocation limit is reached
*/
public enum OnInvocationsLimit {
/**
* Throw an error when the limit is reached
*/
ERROR,
/**
* Stop the workflow and return the current result
*/
STOP,
/**
* Continue to the next step
*/
CONTINUE
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/annotations/SchemaClass.java
|
package ai.driftkit.chat.framework.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation to mark a class as a schema class for workflow steps.
* The framework will automatically convert this class to AIFunctionSchema.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface SchemaClass {
/**
* The ID of the schema. If not provided, class name will be used.
* @return schema ID
*/
String id() default "";
/**
* Description of the schema class
* @return description
*/
String description() default "";
/**
* Flag indicating this schema is composable and should be broken into separate questions
* when processing in the workflow. When true, each field becomes a separate form/question.
* @return true if schema should be handled as composable, false otherwise
*/
boolean composable() default false;
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/annotations/StepInfo.java
|
package ai.driftkit.chat.framework.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation to provide additional information about a workflow step.
* This is used to document steps and their parameters.
*/
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public @interface StepInfo {
/**
* Detailed description of the step
*/
String description() default "";
/**
* Maximum number of times this step can be invoked
*/
int invocationsLimit() default Integer.MAX_VALUE;
/**
* What to do when invocations limit is reached
*/
OnInvocationsLimit onInvocationsLimit() default OnInvocationsLimit.ERROR;
/**
* Whether this step can be executed asynchronously
*/
boolean async() default false;
/**
* Whether user input is required for this step
*/
boolean userInputRequired() default false;
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/annotations/WorkflowStep.java
|
package ai.driftkit.chat.framework.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation for workflow step methods.
* This is used to identify and register methods as workflow steps.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface WorkflowStep {
int index() default 1;
/**
* The ID of the step.
* If not specified:
* 1. First tries to use the inputClass simple name
* 2. Falls back to the method name if inputClass is not specified
*/
String id() default "";
/**
* Human-readable description of the step.
*/
String description() default "";
/**
* Indicates if this step requires user input.
*/
boolean requiresUserInput() default false;
/**
* Indicates if this step should be executed asynchronously.
*/
boolean async() default false;
/**
* The ID of the schema for input data.
* This is only relevant if requiresUserInput is true.
*
* Note: If inputClass is specified, this is ignored.
*/
String inputSchemaId() default "";
/**
* The ID of the schema for output data.
*
* Note: If outputClass is specified, this is ignored.
*/
String outputSchemaId() default "";
/**
* The class to use as input schema.
* This class will be automatically converted to an AIFunctionSchema.
* If specified, takes precedence over inputSchemaId.
*/
Class<?> inputClass() default void.class;
/**
* The classes to use as possible input schemas.
* These classes will be automatically converted to AIFunctionSchema objects.
* If specified, takes precedence over inputSchemaId and inputClass.
*/
Class<?>[] inputClasses() default {};
/**
* The classes to use as possible output schemas (direct outputs of this step).
* These classes will be automatically converted to AIFunctionSchema objects.
*/
Class<?>[] outputClasses() default {};
/**
* The classes to use as possible next step input schemas.
* These classes will be automatically converted to AIFunctionSchema objects.
* If specified, takes precedence over outputSchemaId.
*/
Class<?>[] nextClasses() default {};
/**
* Possible next steps this step can transition to.
* If empty, framework will use the next step in the workflow definition order.
*/
String[] nextSteps() default {};
/**
* Condition to evaluate before transitioning to next step.
* Expression language can be used to reference input/output objects.
*/
String condition() default "";
/**
* Step to execute if condition evaluates to true.
*/
String onTrue() default "";
/**
* Step to execute if condition evaluates to false.
*/
String onFalse() default "";
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/controller/AssistantController.java
|
package ai.driftkit.chat.framework.controller;
import ai.driftkit.chat.framework.ai.domain.AIFunctionSchema;
import ai.driftkit.chat.framework.dto.PageableResponseWithChat;
import ai.driftkit.chat.framework.dto.PageableResponseWithChatMessage;
import ai.driftkit.chat.framework.model.ChatDomain.ChatMessage;
import ai.driftkit.chat.framework.model.ChatDomain.ChatRequest;
import ai.driftkit.chat.framework.model.ChatDomain.ChatResponse;
import ai.driftkit.chat.framework.model.ChatDomain.MessageType;
import ai.driftkit.chat.framework.model.ChatMessageTask;
import ai.driftkit.chat.framework.model.ChatMessageTaskConverter;
import ai.driftkit.chat.framework.model.ChatSession;
import ai.driftkit.chat.framework.model.StepDefinition;
import ai.driftkit.chat.framework.repository.ChatMessageRepository;
import ai.driftkit.chat.framework.service.AsyncResponseTracker;
import ai.driftkit.chat.framework.service.ChatSessionService;
import ai.driftkit.chat.framework.service.ChatWorkflowService;
import ai.driftkit.chat.framework.workflow.AnnotatedWorkflow;
import ai.driftkit.chat.framework.workflow.WorkflowRegistry;
import jakarta.servlet.http.HttpServletRequest;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.BooleanUtils;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.commons.lang3.StringUtils;
import org.jetbrains.annotations.NotNull;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.server.ResponseStatusException;
import javax.validation.Valid;
import java.net.URLDecoder;
import java.nio.charset.Charset;
import java.util.*;
@Slf4j
@RestController("legacyAssistantController")
@RequiredArgsConstructor
@RequestMapping("/public/api1.0/ai/assistant/v2/")
@Validated
public class AssistantController {
private final ChatWorkflowService chatWorkflowService;
private final AsyncResponseTracker asyncResponseTracker;
private final ChatSessionService chatService;
private final ChatMessageRepository messageRepository;
@PostMapping("/chat")
public ChatResponseWithTasks chat(@Valid @RequestBody ChatRequest request,
@RequestParam(required = false) String userId) {
userId = decode(userId);
log.info("Processing chat request for session: {}, user: {}", request.getChatId(), userId);
try {
if (StringUtils.isEmpty(request.getUserId())) {
if (StringUtils.isNotBlank(userId)) {
request.setUserId(userId);
} else {
userId = request.getPropertiesMap().getOrDefault("userId", "anonymous");
request.setUserId(userId);
}
}
// Ensure chat exists
chatService.getOrCreateChat(request.getChatId(), userId, request.getMessage());
// Process the request using workflow service (which will handle session creation if needed)
ChatResponse response = chatWorkflowService.processChat(request);
// Convert the request and response to message tasks
List<ChatMessageTask> requestTasks = ChatMessageTaskConverter.convert(request);
List<ChatMessageTask> responseTasks = ChatMessageTaskConverter.convert(response);
// Create a structured response with both the original response and the message tasks
return new ChatResponseWithTasks(response, requestTasks, responseTasks);
} catch (Exception e) {
log.error("Error processing chat request", e);
throw new ResponseStatusException(
HttpStatus.INTERNAL_SERVER_ERROR,
"Error processing chat request: " + e.getMessage(),
e
);
}
}
private static String decode(String userId) {
if (StringUtils.isBlank(userId)) {
return null;
}
return URLDecoder.decode(userId, Charset.defaultCharset());
}
// Poll for response updates
@GetMapping("/chat/response/{responseId}")
public ChatResponseWithTasks getChatResponse(
@PathVariable String responseId,
@RequestParam(required = false) String userId
) {
userId = decode(userId);
log.info("Getting chat response for ID: {}, user: {}", responseId, userId);
try {
Optional<ChatResponse> response = asyncResponseTracker.getResponse(responseId);
// If userId is provided, verify ownership
if (response.isPresent() && StringUtils.isNotBlank(userId)) {
ChatResponse chatResponse = response.get();
String responseUserId = chatResponse.getUserId();
// If the response has a userId that doesn't match, return forbidden
if (StringUtils.isNotBlank(responseUserId) && !responseUserId.equals(userId)) {
log.warn("User {} attempted to access response {} owned by {}",
userId, responseId, responseUserId);
throw new RuntimeException("Forbidden for [%s] [%s]".formatted(userId, responseId));
}
}
if (response.isEmpty()) {
return new ChatResponseWithTasks();
}
List<ChatMessageTask> responseTasks = ChatMessageTaskConverter.convert(response.get());
return new ChatResponseWithTasks(response.get(), null, responseTasks);
} catch (Exception e) {
log.error("Error retrieving chat response", e);
throw new ResponseStatusException(
HttpStatus.INTERNAL_SERVER_ERROR,
"Error retrieving chat response: " + e.getMessage(),
e
);
}
}
// Chat history with pagination
@GetMapping("/chat/history")
public List<ChatMessageTask> history(
final @NotNull HttpServletRequest request,
@RequestParam String chatId,
@RequestParam(required = false) String userId,
@RequestParam(required = false, defaultValue = "0") int page,
@RequestParam(required = false, defaultValue = "1000") int limit,
@RequestParam(required = false, defaultValue = "asc") String sort,
@RequestParam(required = false, defaultValue = "true") boolean showSchema,
@RequestParam(required = false, defaultValue = "false") Boolean context) {
userId = decode(userId);
log.info("Retrieving chat history for session: {}, user: {}, page: {}, limit: {}, sort: {}",
chatId, userId, page, limit, sort);
try {
// Create pageable object with sort by timestamp
Pageable pageable = createPageable(page, limit, sort, "timestamp");
// Verify user has access to the chat if userId provided
verifyUserChatAccess(chatId, userId);
// Get chat history from repository with pagination
Page<ChatMessage> historyPage = messageRepository.findByChatIdOrderByTimestampDesc(chatId, pageable);
// If no history found, return empty page (don't try to get history from other chats)
if (historyPage.isEmpty()) {
log.info("No history found for chat: {}", chatId);
return new ArrayList<>();
}
List<ChatMessage> content = historyPage.getContent().stream()
.filter(e -> BooleanUtils.isTrue(context) || e.getType() != MessageType.CONTEXT)
.toList();
if (BooleanUtils.isNotTrue(showSchema)) {
content = content.stream()
.map(SerializationUtils::clone)
.peek(e -> {
if (e instanceof ChatResponse response) {
// Clear schemas to reduce payload size
response.setNextSchemaAsSchema(null);
}
})
.toList();
}
// Convert messages to MessageTask format and return only the tasks
return ChatMessageTaskConverter.convertAll(content);
} catch (ResponseStatusException e) {
// Re-throw existing status exceptions
throw e;
} catch (Exception e) {
log.error("Error retrieving chat history", e);
throw new ResponseStatusException(
HttpStatus.INTERNAL_SERVER_ERROR,
"Error retrieving chat history: " + e.getMessage(),
e
);
}
}
// Chat history with pagination
@GetMapping("/chat/history/pageable")
public PageableResponseWithChatMessage historyPageable(
final @NotNull HttpServletRequest request,
@RequestParam String chatId,
@RequestParam(required = false) String userId,
@RequestParam(required = false, defaultValue = "0") int page,
@RequestParam(required = false, defaultValue = "10") int limit,
@RequestParam(required = false, defaultValue = "asc") String sort) {
userId = decode(userId);
log.info("Retrieving chat history for session: {}, user: {}, page: {}, limit: {}, sort: {}",
chatId, userId, page, limit, sort);
try {
// Create pageable object with sort by timestamp
Pageable pageable = createPageable(page, limit, sort, "timestamp");
// Verify user has access to the chat if userId provided
verifyUserChatAccess(chatId, userId);
// Get chat history from repository with pagination
Page<ChatMessage> historyPage = messageRepository.findByChatIdOrderByTimestampDesc(chatId, pageable);
// If no history found, return empty page (don't try to get history from other chats)
if (historyPage.isEmpty()) {
log.info("No history found for chat: {}", chatId);
}
return new PageableResponseWithChatMessage(request, historyPage);
} catch (ResponseStatusException e) {
throw e;
} catch (Exception e) {
log.error("Error retrieving chat history", e);
throw new ResponseStatusException(
HttpStatus.INTERNAL_SERVER_ERROR,
"Error retrieving chat history: " + e.getMessage(),
e
);
}
}
// List available chats with pagination
@GetMapping("/chat/list")
public PageableResponseWithChat getChats(
final @NotNull HttpServletRequest request,
@RequestParam(required = false) String userId,
@RequestParam(required = false, defaultValue = "0") int page,
@RequestParam(required = false, defaultValue = "100") int limit,
@RequestParam(required = false, defaultValue = "desc") String sort) {
userId = decode(userId);
log.info("Listing chats for user: {}, page: {}, limit: {}, sort: {}", userId, page, limit, sort);
try {
// Create pageable object with sort by lastMessageTime
Pageable pageable = createPageable(page, limit, sort, "lastMessageTime");
// Get paginated chats only for the specified user
// If userId is not provided, return an empty page
Page<ChatSession> chatsPage;
if (StringUtils.isNotBlank(userId)) {
chatsPage = chatService.listChatsForUser(userId, pageable);
} else {
// Return empty page if no userId provided
chatsPage = Page.empty(pageable);
}
return new PageableResponseWithChat(request, chatsPage);
} catch (Exception e) {
log.error("Error listing chats", e);
throw new ResponseStatusException(
HttpStatus.INTERNAL_SERVER_ERROR,
"Error listing chats: " + e.getMessage(),
e
);
}
}
// Create a new chat
@PostMapping("/chat/create")
public ChatInfo createChat(
@RequestParam(required = false) String userId,
@RequestParam(required = false) String name
) {
userId = decode(userId);
userId = StringUtils.isNotBlank(userId) ? userId : "anonymous";
log.info("Creating new chat for user: {}", userId);
try {
ChatSession chat = chatService.createChat(userId, name);
return new ChatInfo(
chat.getChatId(),
chat.getLastMessageTime(),
chat.getDescription(),
chat.getUserId(),
chat.getName()
);
} catch (Exception e) {
log.error("Error creating new chat", e);
throw new ResponseStatusException(
HttpStatus.INTERNAL_SERVER_ERROR,
"Error creating new chat: " + e.getMessage(),
e
);
}
}
// Archive a chat
@PostMapping("/chat/{chatId}/archive")
public ResponseEntity<Void> archiveChat(
@PathVariable String chatId,
@RequestParam(required = false) String userId) {
userId = decode(userId);
log.info("Archiving chat: {}, user: {}", chatId, userId);
try {
// Verify user has access to the chat if userId provided
verifyUserChatAccess(chatId, userId);
chatService.archiveChat(chatId);
return ResponseEntity.ok().build();
} catch (ResponseStatusException e) {
// Return as ResponseEntity for consistency
return ResponseEntity.status(e.getStatusCode()).build();
} catch (Exception e) {
log.error("Error archiving chat", e);
throw new ResponseStatusException(
HttpStatus.INTERNAL_SERVER_ERROR,
"Error archiving chat: " + e.getMessage(),
e
);
}
}
// Schemas endpoint - list all available schemas
@GetMapping("/schemas")
public SchemaResponse schemas() {
log.info("Retrieving schemas for all workflows");
try {
// Get all unique schemas from registered workflows
Set<AIFunctionSchema> schemas = new HashSet<>();
Map<String, String> messageIds = new HashMap<>();
// Collect schemas from registered AnnotatedWorkflows in WorkflowRegistry
for (AnnotatedWorkflow workflow : WorkflowRegistry.getAllWorkflows()) {
for (StepDefinition step : workflow.getStepDefinitions()) {
if (step.getInputSchemas() != null) {
schemas.addAll(step.getInputSchemas());
}
if (step.getOutputSchemas() != null) {
schemas.addAll(step.getOutputSchemas());
}
}
}
return new SchemaResponse(new ArrayList<>(schemas), messageIds);
} catch (Exception e) {
log.error("Error retrieving schemas", e);
throw new ResponseStatusException(
HttpStatus.INTERNAL_SERVER_ERROR,
"Error retrieving schemas: " + e.getMessage(),
e
);
}
}
/**
* Chat information for the chat list
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class ChatInfo {
private String chatId;
private Long lastMessageTime;
private String lastMessage;
private String userId;
private String name;
}
/**
* Schema response for the schemas endpoint
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class SchemaResponse {
private List<AIFunctionSchema> schemas;
private Map<String, String> messageIds;
}
/**
* Helper methods for common operations
*/
/**
* Creates a PageRequest object with the provided pagination parameters
* @param page Page number (zero-based)
* @param limit Number of items per page
* @param sort Sort direction ("asc" or "desc")
* @param sortBy Field to sort by
* @return Configured Pageable object
*/
private Pageable createPageable(int page, int limit, String sort, String sortBy) {
Sort.Direction sortDirection = "asc".equalsIgnoreCase(sort)
? Sort.Direction.ASC
: Sort.Direction.DESC;
return PageRequest.of(page, limit, Sort.by(sortDirection, sortBy));
}
/**
* Verifies that the user has access to the chat
* @param chatId Chat ID to verify access to
* @param userId User ID to check
* @throws ResponseStatusException if user is not authorized to access the chat
*/
private void verifyUserChatAccess(String chatId, String userId) {
if (StringUtils.isNotBlank(userId)) {
Optional<ChatSession> chatOpt = chatService.getChat(chatId);
if (chatOpt.isPresent() && !userId.equals(chatOpt.get().getUserId())) {
log.warn("User {} attempted to access chat {} owned by {}",
userId, chatId, chatOpt.get().getUserId());
throw new ResponseStatusException(HttpStatus.FORBIDDEN, "User not authorized to access this chat");
}
}
}
/**
* Get the first step schema for a workflow by ID
* This endpoint allows the frontend to understand how to initialize the first step for each workflow
*
* @param workflowId The ID of the workflow
* @return First step schema response containing the schema(s) for the first step
*/
@GetMapping("/workflow/first-schema/{workflowId}")
public FirstStepSchemaResponse getFirstStepSchema(@PathVariable String workflowId) {
log.info("Getting first step schema for workflow: {}", workflowId);
try {
// Get the workflow by ID
Optional<AnnotatedWorkflow> workflowOpt = WorkflowRegistry.getWorkflow(workflowId);
if (workflowOpt.isEmpty()) {
log.warn("Workflow not found with ID: {}", workflowId);
throw new ResponseStatusException(
HttpStatus.NOT_FOUND,
"Workflow not found with ID: " + workflowId
);
}
AnnotatedWorkflow workflow = workflowOpt.get();
// Get all steps in the workflow
List<StepDefinition> steps = workflow.getStepDefinitions();
if (steps.isEmpty()) {
log.warn("No steps found for workflow: {}", workflowId);
throw new ResponseStatusException(
HttpStatus.NOT_FOUND,
"No steps found for workflow: " + workflowId
);
}
// The first step is the first in the sorted list (sorted by index)
StepDefinition firstStep = steps.get(0);
// Get the input schemas for the first step
List<AIFunctionSchema> inputSchemas = firstStep.getInputSchemas();
if (inputSchemas == null || inputSchemas.isEmpty()) {
log.warn("No input schemas found for the first step of workflow: {}", workflowId);
return new FirstStepSchemaResponse(workflowId, firstStep.getId(), Collections.emptyList());
}
return new FirstStepSchemaResponse(workflowId, firstStep.getId(), inputSchemas);
} catch (ResponseStatusException e) {
// Re-throw existing status exceptions
throw e;
} catch (Exception e) {
log.error("Error retrieving first step schema", e);
throw new ResponseStatusException(
HttpStatus.INTERNAL_SERVER_ERROR,
"Error retrieving first step schema: " + e.getMessage(),
e
);
}
}
/**
* Response for the first step schema endpoint
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class FirstStepSchemaResponse {
private String workflowId;
private String stepId;
private List<AIFunctionSchema> schemas;
}
/**
* Wrapper class for ChatResponse with MessageTasks
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class ChatResponseWithTasks {
private ChatResponse originalResponse;
private List<ChatMessageTask> request;
private List<ChatMessageTask> response;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/dto/PageableResponse.java
|
package ai.driftkit.chat.framework.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.servlet.http.HttpServletRequest;
import lombok.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.springframework.data.domain.Page;
import org.springframework.web.util.UriComponentsBuilder;
import java.util.Arrays;
import java.util.List;
/**
* Response with information about current and available pages
*
* @param <DataType> Page data type
*/
@Data
@NoArgsConstructor
@RequiredArgsConstructor
@Schema(
title = "Pageable Response",
description = "Response with information about current and available pages"
)
public class PageableResponse<DataType> {
/**
* Page data
*/
@NotNull
@Schema(
description = "Page data",
type = "array",
requiredMode = Schema.RequiredMode.REQUIRED,
contentSchema = Object.class
)
private List<DataType> data;
/**
* Information about current and available pages
*/
@NotNull
@Schema(
description = "Information about current and available pages",
requiredMode = Schema.RequiredMode.REQUIRED
)
private PageInfo page;
/**
* Information about navigation between pages
*/
@NotNull
@Schema(
description = "Information about navigation between pages",
requiredMode = Schema.RequiredMode.REQUIRED
)
private PageLinks links;
/**
* Create response for given {@link Page}
*
* @param page page to return
* @param request HTTP request
*/
public PageableResponse(
final @NotNull HttpServletRequest request,
final @NotNull Page<DataType> page
) {
this.data = page.getContent();
this.page = new PageInfo(page.getSize(), page.getTotalElements(), page.getTotalPages(), page.getNumber());
this.links = new PageLinks(
self(request, page),
next(request, page),
prev(request, page),
first(request, page),
last(request, page)
);
}
@NotNull
private String self(final @NotNull HttpServletRequest request, final @NotNull Page<?> page) {
final var requestURI = request.getRequestURI();
final var requestParams = request.getParameterMap();
UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(requestURI);
for (final var key : requestParams.keySet()) {
builder.queryParam(key, Arrays.asList(requestParams.get(key)));
}
if (!requestParams.containsKey("page")) {
builder.queryParam("page", List.of(page.getNumber()));
}
if (!requestParams.containsKey("limit")) {
builder.queryParam("limit", List.of(page.getSize()));
}
return builder.build().toUriString();
}
@Nullable
private String next(final @NotNull HttpServletRequest request, final @NotNull Page<?> page) {
// next page doesn't exist
if (page.getNumber() >= page.getTotalPages()) {
return null;
}
final var requestURI = request.getRequestURI();
final var requestParams = request.getParameterMap();
UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(requestURI);
for (final var key : requestParams.keySet()) {
builder.queryParam(key, Arrays.asList(requestParams.get(key)));
}
builder.replaceQueryParam("page", List.of(page.getNumber() + 1));
builder.replaceQueryParam("limit", List.of(page.getSize()));
return builder.build().toUriString();
}
@Nullable
private String prev(final @NotNull HttpServletRequest request, final @NotNull Page<?> page) {
// prev page doesn't exist
if (page.getNumber() == 0) {
return null;
}
final var requestURI = request.getRequestURI();
final var requestParams = request.getParameterMap();
UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(requestURI);
for (final var key : requestParams.keySet()) {
builder.queryParam(key, Arrays.asList(requestParams.get(key)));
}
builder.replaceQueryParam("page", List.of(page.getNumber() - 1));
builder.replaceQueryParam("limit", List.of(page.getSize()));
return builder.build().toUriString();
}
@NotNull
private String first(final @NotNull HttpServletRequest request, final @NotNull Page<?> page) {
final var requestURI = request.getRequestURI();
final var requestParams = request.getParameterMap();
UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(requestURI);
for (final var key : requestParams.keySet()) {
builder.queryParam(key, Arrays.asList(requestParams.get(key)));
}
builder.replaceQueryParam("page", List.of(0));
builder.replaceQueryParam("limit", List.of(page.getSize()));
return builder.build().toUriString();
}
@NotNull
private String last(final @NotNull HttpServletRequest request, final @NotNull Page<?> page) {
final var requestURI = request.getRequestURI();
final var requestParams = request.getParameterMap();
UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(requestURI);
for (final var key : requestParams.keySet()) {
builder.queryParam(key, Arrays.asList(requestParams.get(key)));
}
builder.replaceQueryParam("page", List.of(page.getTotalPages()));
builder.replaceQueryParam("limit", List.of(page.getSize()));
return builder.build().toUriString();
}
/**
* Page information
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
@Schema(description = "Page information")
public static class PageInfo {
@Schema(description = "Page size")
private int size;
@Schema(description = "Total number of elements")
private long totalElements;
@Schema(description = "Total number of pages")
private int totalPages;
@Schema(description = "Current page number")
private int number;
}
/**
* Page navigation links
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
@Schema(description = "Page navigation links")
public static class PageLinks {
@Schema(description = "Current page link")
private String self;
@Schema(description = "Next page link")
private String next;
@Schema(description = "Previous page link")
private String prev;
@Schema(description = "First page link")
private String first;
@Schema(description = "Last page link")
private String last;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/dto/PageableResponseWithChat.java
|
package ai.driftkit.chat.framework.dto;
import ai.driftkit.chat.framework.model.ChatSession;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.servlet.http.HttpServletRequest;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.jetbrains.annotations.NotNull;
import org.springframework.data.domain.Page;
@Schema(
title = "Pageable Response with Chat Sessions"
)
@Data
@EqualsAndHashCode(callSuper = true)
public class PageableResponseWithChat extends PageableResponse<ChatSession> {
public PageableResponseWithChat(
final @NotNull HttpServletRequest request,
final @NotNull Page<ChatSession> page
) {
super(request, page);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/dto/PageableResponseWithChatMessage.java
|
package ai.driftkit.chat.framework.dto;
import ai.driftkit.chat.framework.model.ChatDomain.ChatMessage;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.servlet.http.HttpServletRequest;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.jetbrains.annotations.NotNull;
import org.springframework.data.domain.Page;
@Schema(
title = "Pageable Response with Chat Messages"
)
@Data
@EqualsAndHashCode(callSuper = true)
public class PageableResponseWithChatMessage extends PageableResponse<ChatMessage> {
public PageableResponseWithChatMessage(
final @NotNull HttpServletRequest request,
final @NotNull Page<ChatMessage> page
) {
super(request, page);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/events/AsyncTaskEvent.java
|
package ai.driftkit.chat.framework.events;
import ai.driftkit.chat.framework.ai.domain.AIFunctionSchema;
import ai.driftkit.chat.framework.util.SchemaUtils;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class AsyncTaskEvent extends StepEvent {
private String taskName;
private Map<String, Object> taskArgs;
@Builder(builderMethodName = "asyncBuilder")
public AsyncTaskEvent(String nextStepId, List<String> possibleNextStepIds, Map<String, String> properties, AIFunctionSchema currentSchema, AIFunctionSchema nextInputSchema, boolean completed, int percentComplete, String taskName, Map<String, Object> taskArgs) {
super(nextStepId, possibleNextStepIds, properties, currentSchema, nextInputSchema, completed, percentComplete, true);
this.taskName = taskName;
this.taskArgs = taskArgs;
}
public static AsyncTaskEvent create(String taskName,
Map<String, Object> taskArgs,
Map<String, String> responseProperties,
AIFunctionSchema currentSchema,
AIFunctionSchema nextInputSchema) {
return AsyncTaskEvent.asyncBuilder()
.taskName(taskName)
.taskArgs(taskArgs)
.properties(responseProperties)
.currentSchema(currentSchema)
.nextInputSchema(nextInputSchema)
.completed(false)
.percentComplete(50)
.build();
}
public static AsyncTaskEvent withMessageId(String taskName,
Map<String, Object> taskArgs,
String messageId,
AIFunctionSchema nextInputSchema) {
Map<String, String> props = new HashMap<>();
props.put("messageId", messageId);
return AsyncTaskEvent.asyncBuilder()
.taskName(taskName)
.taskArgs(taskArgs)
.properties(props)
.nextInputSchema(nextInputSchema)
.completed(false)
.percentComplete(50)
.build();
}
public static AsyncTaskEvent createWithSchemaClasses(String taskName,
Map<String, Object> taskArgs,
Map<String, String> responseProperties,
Class<?> currentSchemaClass,
Class<?> nextInputSchemaClass) {
AIFunctionSchema currentSchema = currentSchemaClass != null ?
SchemaUtils.getSchemaFromClass(currentSchemaClass) : null;
AIFunctionSchema nextInputSchema = nextInputSchemaClass != null ?
SchemaUtils.getSchemaFromClass(nextInputSchemaClass) : null;
return AsyncTaskEvent.asyncBuilder()
.taskName(taskName)
.taskArgs(taskArgs)
.properties(responseProperties)
.currentSchema(currentSchema)
.nextInputSchema(nextInputSchema)
.completed(false)
.percentComplete(50)
.build();
}
public static AsyncTaskEvent createWithObjects(String taskName,
Map<String, Object> taskArgs,
Object outputObject,
Class<?> nextInputSchemaClass) {
Map<String, String> properties = SchemaUtils.extractProperties(outputObject);
AIFunctionSchema currentSchema = outputObject != null ?
SchemaUtils.getSchemaFromClass(outputObject.getClass()) : null;
AIFunctionSchema nextInputSchema = nextInputSchemaClass != null ?
SchemaUtils.getSchemaFromClass(nextInputSchemaClass) : null;
return AsyncTaskEvent.asyncBuilder()
.taskName(taskName)
.taskArgs(taskArgs)
.properties(properties)
.currentSchema(currentSchema)
.nextInputSchema(nextInputSchema)
.completed(false)
.percentComplete(50)
.build();
}
public static AsyncTaskEvent withMessageId(String taskName,
Map<String, Object> taskArgs,
String messageId,
Class<?> nextInputSchemaClass) {
Map<String, String> props = new HashMap<>();
props.put("messageId", messageId);
AIFunctionSchema nextInputSchema = nextInputSchemaClass != null ?
SchemaUtils.getSchemaFromClass(nextInputSchemaClass) : null;
return AsyncTaskEvent.asyncBuilder()
.taskName(taskName)
.taskArgs(taskArgs)
.properties(props)
.nextInputSchema(nextInputSchema)
.completed(false)
.percentComplete(50)
.build();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/events/StepEvent.java
|
package ai.driftkit.chat.framework.events;
import ai.driftkit.chat.framework.ai.domain.AIFunctionSchema;
import ai.driftkit.chat.framework.util.SchemaUtils;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
public class StepEvent {
@Accessors(chain = true)
private String nextStepId;
@Accessors(chain = true)
private List<String> possibleNextStepIds = new ArrayList<>();
@Accessors(chain = true)
private Map<String, String> properties;
@Accessors(chain = true)
private AIFunctionSchema currentSchema;
@Accessors(chain = true)
private AIFunctionSchema nextInputSchema;
@Accessors(chain = true)
private boolean completed;
@Accessors(chain = true)
private int percentComplete;
@Accessors(chain = true)
@Builder.Default
private boolean required = true;
public static StepEvent nextStep(String nextStepId) {
return StepEvent.builder()
.nextStepId(nextStepId)
.completed(true)
.percentComplete(100)
.build();
}
public static StepEvent of(Object schemaObject, AIFunctionSchema nextInputSchema) {
return StepEvent.builder()
.properties(SchemaUtils.extractProperties(schemaObject))
.currentSchema(SchemaUtils.getSchemaFromClass(schemaObject.getClass()))
.nextInputSchema(nextInputSchema)
.completed(true)
.percentComplete(100)
.build();
}
public static StepEvent completed(Map<String, String> properties,
AIFunctionSchema currentSchema,
AIFunctionSchema nextInputSchema) {
return StepEvent.builder()
.properties(properties)
.currentSchema(currentSchema)
.nextInputSchema(nextInputSchema)
.completed(true)
.percentComplete(100)
.build();
}
public static StepEvent completed(Map<String, String> properties,
Class<?> currentSchemaClass,
Class<?> nextInputSchemaClass) {
return StepEvent.builder()
.properties(properties)
.currentSchema(SchemaUtils.getSchemaFromClass(currentSchemaClass))
.nextInputSchema(SchemaUtils.getSchemaFromClass(nextInputSchemaClass))
.completed(true)
.percentComplete(100)
.build();
}
public static StepEvent fromObject(Object schemaObject, Class<?> nextInputSchemaClass) {
return StepEvent.builder()
.properties(SchemaUtils.extractProperties(schemaObject))
.currentSchema(SchemaUtils.getSchemaFromClass(schemaObject.getClass()))
.nextInputSchema(SchemaUtils.getSchemaFromClass(nextInputSchemaClass))
.completed(true)
.percentComplete(100)
.build();
}
public static StepEvent fromObjectWithMultipleNextInputs(Object schemaObject, Class<?>[] nextInputSchemaClasses) {
StepEvent event = StepEvent.builder()
.properties(SchemaUtils.extractProperties(schemaObject))
.currentSchema(SchemaUtils.getSchemaFromClass(schemaObject.getClass()))
.completed(true)
.percentComplete(100)
.build();
if (nextInputSchemaClasses != null && nextInputSchemaClasses.length > 0) {
event.setNextInputSchema(SchemaUtils.getSchemaFromClass(nextInputSchemaClasses[0]));
}
return event;
}
public static StepEvent of(Object outputObject, Class<?> nextInputClass) {
return fromObject(outputObject, nextInputClass);
}
public static StepEvent of(Object outputObject, Class<?>... nextInputClasses) {
return fromObjectWithMultipleNextInputs(outputObject, nextInputClasses);
}
public static StepEvent of(Object outputObject) {
return fromObject(outputObject, null);
}
public StepEvent withCurrentSchemaClass(Class<?> schemaClass) {
this.currentSchema = SchemaUtils.getSchemaFromClass(schemaClass);
return this;
}
public StepEvent withNextInputSchemaClass(Class<?> schemaClass) {
this.nextInputSchema = SchemaUtils.getSchemaFromClass(schemaClass);
return this;
}
public StepEvent withSchemaObject(Object schemaObject) {
this.properties = SchemaUtils.extractProperties(schemaObject);
this.currentSchema = SchemaUtils.getSchemaFromClass(schemaObject.getClass());
return this;
}
public static StepEvent withProperties(Map<String, String> properties) {
return StepEvent.builder()
.properties(properties)
.completed(true)
.percentComplete(100)
.build();
}
public static StepEvent withProperty(String key, String value) {
Map<String, String> props = new HashMap<>();
props.put(key, value);
return withProperties(props);
}
public static StepEvent withMessageId(String value) {
Map<String, String> props = new HashMap<>();
props.put("messageId", value);
return withProperties(props);
}
public static StepEvent withMessageId(String value, boolean required) {
Map<String, String> props = new HashMap<>();
props.put("messageId", value);
return withProperties(props).setRequired(required);
}
public static StepEvent withMessage(String message) {
return withProperty("message", message);
}
public static StepEvent withError(String errorMessage) {
return withProperty("error", errorMessage);
}
public StepEvent setNextStepId(String nextStepId) {
this.nextStepId = nextStepId;
addPossibleNextStep(nextStepId);
return this;
}
public StepEvent addPossibleNextStep(String stepId) {
if (possibleNextStepIds == null) {
possibleNextStepIds = new CopyOnWriteArrayList<>();
}
if (stepId != null && !this.possibleNextStepIds.contains(stepId)) {
this.possibleNextStepIds.add(stepId);
}
return this;
}
public StepEvent addPossibleNextSteps(String... stepIds) {
if (stepIds != null) {
for (String stepId : stepIds) {
addPossibleNextStep(stepId);
}
}
return this;
}
public static StepEvent withPossibleNextSteps(Map<String, String> properties, String... nextStepIds) {
StepEvent event = StepEvent.builder()
.properties(properties)
.completed(true)
.percentComplete(100)
.build();
if (nextStepIds != null) {
for (String stepId : nextStepIds) {
event.addPossibleNextStep(stepId);
}
if (nextStepIds.length > 0) {
event.setNextStepId(nextStepIds[0]);
}
}
return event;
}
public static StepEvent withPossibleNextSteps(Object outputObject, String... nextStepIds) {
StepEvent event = of(outputObject);
if (nextStepIds != null) {
for (String stepId : nextStepIds) {
event.addPossibleNextStep(stepId);
}
if (nextStepIds.length > 0) {
event.setNextStepId(nextStepIds[0]);
}
}
return event;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/events/WorkflowTransitionEvent.java
|
package ai.driftkit.chat.framework.events;
import ai.driftkit.common.domain.Language;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import java.util.HashMap;
import java.util.Map;
/**
* Special event that signals a transition to another workflow.
* When this event is returned from a workflow step, the framework will:
* 1. Save the current session state
* 2. Switch to the target workflow
* 3. Initialize the new workflow with the provided context
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
@EqualsAndHashCode(callSuper = true)
public class WorkflowTransitionEvent extends StepEvent {
private String sourceWorkflowId;
private String targetWorkflowId;
private String targetStepId; // Optional: specific step ID in target workflow
private String initialMessage;
private Language language;
private String transitionReason;
@Builder(builderMethodName = "transitionBuilder")
public WorkflowTransitionEvent(String sourceWorkflowId, String targetWorkflowId,
String targetStepId, String initialMessage,
Map<String, String> contextData,
Language language, String transitionReason) {
super();
this.sourceWorkflowId = sourceWorkflowId;
this.targetWorkflowId = targetWorkflowId;
this.targetStepId = targetStepId;
this.initialMessage = initialMessage;
this.language = language;
this.transitionReason = transitionReason;
// Only add the essential marker and context data
Map<String, String> props = new HashMap<>();
props.put("workflowTransition", "true");
if (contextData != null) {
props.putAll(contextData);
}
this.setProperties(props);
this.setCompleted(true);
this.setPercentComplete(100);
}
/**
* Create a workflow transition event
*/
public static WorkflowTransitionEvent to(String targetWorkflowId) {
return transitionBuilder()
.targetWorkflowId(targetWorkflowId)
.build();
}
/**
* Create a workflow transition event with initial message
*/
public static WorkflowTransitionEvent to(String targetWorkflowId, String initialMessage) {
return transitionBuilder()
.targetWorkflowId(targetWorkflowId)
.initialMessage(initialMessage)
.build();
}
/**
* Create a workflow transition event with context data
*/
public static WorkflowTransitionEvent to(String sourceWorkflowId, String targetWorkflowId,
Map<String, String> contextData) {
return transitionBuilder()
.sourceWorkflowId(sourceWorkflowId)
.targetWorkflowId(targetWorkflowId)
.contextData(contextData)
.build();
}
/**
* Create a workflow transition event with full context
*/
public static WorkflowTransitionEvent to(String sourceWorkflowId, String targetWorkflowId,
String initialMessage, Map<String, String> contextData) {
return transitionBuilder()
.sourceWorkflowId(sourceWorkflowId)
.targetWorkflowId(targetWorkflowId)
.initialMessage(initialMessage)
.contextData(contextData)
.build();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/model/ChatDomain.java
|
package ai.driftkit.chat.framework.model;
import ai.driftkit.chat.framework.ai.domain.AIFunctionSchema;
import ai.driftkit.chat.framework.ai.domain.AIFunctionSchema.AIFunctionProperty;
import ai.driftkit.chat.framework.ai.domain.AIFunctionSchema.PropertyType;
import ai.driftkit.common.domain.Language;
import com.fasterxml.jackson.annotation.*;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.databind.JsonNode;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import java.io.Serializable;
import java.util.*;
import java.util.stream.Collectors;
@Slf4j
public class ChatDomain {
public enum MessageType {
USER,
AI,
CONTEXT,
SYSTEM
}
public enum SessionState {
WAITING_FOR_USER_INPUT,
EXECUTING_STEP
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonInclude(Include.NON_DEFAULT)
public static class DataProperty implements Serializable {
private String name;
private String nameId;
private String dataNameId;
private String value;
private String data;
private Boolean multiSelect;
private PropertyType type;
private boolean valueAsNameId;
public DataProperty(String name, String value, String nameId, PropertyType type) {
this(name, value, type);
this.nameId = nameId;
}
public DataProperty(String name, String value, PropertyType type) {
this.name = name;
this.value = value;
this.type = type;
}
public boolean isValueAsNameId() {
return valueAsNameId;
}
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
include = JsonTypeInfo.As.PROPERTY,
property = "type",
visible = true
)
@JsonSubTypes({
@JsonSubTypes.Type(value = ChatRequest.class, name = "USER"),
@JsonSubTypes.Type(value = ChatResponse.class, name = "AI"),
})
public static class ChatMessage implements Serializable {
protected String id;
protected String chatId;
protected MessageType type;
protected Language language;
protected Long timestamp;
protected List<DataProperty> properties = new ArrayList<>();
protected String userId;
public ChatMessage(String id, String chatId, MessageType type) {
this.id = id;
this.chatId = chatId;
this.type = type;
this.timestamp = System.currentTimeMillis();
}
public ChatMessage(String id, String chatId, MessageType type, String userId) {
this(id, chatId, type);
this.userId = userId;
}
@JsonIgnore
public Map<String, String> getPropertiesMap() {
Map<String, String> propsMap = new HashMap<>();
for (DataProperty prop : properties) {
if (prop.getValue() == null) {
continue;
}
propsMap.put(prop.getName(), prop.getValue());
}
return propsMap;
}
@JsonIgnore
public void setPropertiesMap(Map<String, String> map) {
if (map == null) {
return;
}
for (Map.Entry<String, String> entry : map.entrySet()) {
updateOrAddProperty(entry.getKey(), entry.getValue());
}
}
public void updateOrAddProperty(String name, String value) {
if (name == null) {
return;
}
for (DataProperty prop : properties) {
if (name.equals(prop.getName())) {
prop.setValue(value);
return;
}
}
DataProperty newProp = new DataProperty();
newProp.setName(name);
newProp.setValue(value);
newProp.setType(PropertyType.STRING);
properties.add(newProp);
}
public void fillCurrentSchema(AIFunctionSchema schema) {
if (schema == null) {
return;
}
fillCurrentSchema(List.of(schema));
}
public void fillCurrentSchema(List<AIFunctionSchema> schemas) {
if (CollectionUtils.isEmpty(schemas)) {
return;
}
Map<String, DataProperty> propertiesMap = this.properties.stream()
.collect(Collectors.toMap(DataProperty::getName, p -> p, (p1, p2) -> p1));
for (AIFunctionProperty property : schemas.stream().flatMap(e -> e.getProperties().stream()).toList()) {
DataProperty data = propertiesMap.get(property.getName());
if (data == null) {
log.warn("Schema [{}] is not filled with property [{}]", schemas.get(0).getSchemaName(), property);
continue;
}
data.setNameId(property.getNameId());
data.setDataNameId(property.getDataNameId());
if (property.isMultiSelect()) {
data.setMultiSelect(true);
}
data.setType(property.getType());
data.setValueAsNameId(property.isValueAsNameId());
}
}
}
@Data
@NoArgsConstructor
public static class ChatRequest extends ChatMessage {
private String requestSchemaName;
private String workflowId;
private Boolean composable;
public ChatRequest(String chatId, Map<String, String> properties, Language language, String workflowId) {
this(chatId, properties, language, workflowId, null);
}
public ChatRequest(String chatId, Map<String, String> properties, Language language, String workflowId, String requestSchemaName) {
super(UUID.randomUUID().toString(), chatId, MessageType.USER);
this.language = language;
this.workflowId = workflowId;
this.requestSchemaName = requestSchemaName;
if (properties != null) {
setPropertiesMap(properties);
}
}
public static ChatRequest fromSession(
WorkflowContext session,
String workflowId,
Map<String, String> props) {
ChatRequest request = new ChatRequest();
request.setId(UUID.randomUUID().toString());
request.setChatId(session.getContextId());
request.setType(MessageType.USER);
request.setLanguage(session.getLanguage());
request.setWorkflowId(workflowId);
request.setTimestamp(System.currentTimeMillis());
request.setUserId(session.getUserId());
if (props != null) {
request.setPropertiesMap(props);
}
return request;
}
public static ChatRequest fromSessionWithMessage(
WorkflowContext session,
String workflowId,
String message) {
Map<String, String> props = new HashMap<>();
if (message != null) {
props.put("message", message);
}
return fromSession(session, workflowId, props);
}
@JsonIgnore
public String getMessage() {
Map<String, String> propsMap = getPropertiesMap();
return propsMap.get("message");
}
@JsonSetter("properties")
public void setPropertiesFromJson(JsonNode node) {
if (node == null) {
return;
}
if (node.isArray()) {
// Handle array format (default)
this.properties = new ArrayList<>();
for (JsonNode propNode : node) {
DataProperty prop = new DataProperty();
if (propNode.has("name")) prop.setName(propNode.get("name").asText());
if (propNode.has("value")) prop.setValue(propNode.get("value").asText());
if (propNode.has("nameId")) prop.setNameId(propNode.get("nameId").asText());
if (propNode.has("dataNameId")) prop.setDataNameId(propNode.get("dataNameId").asText());
if (propNode.has("data")) prop.setData(propNode.get("data").asText());
if (propNode.has("multiSelect")) prop.setMultiSelect(propNode.get("multiSelect").asBoolean());
if (propNode.has("type")) prop.setType(PropertyType.valueOf(propNode.get("type").asText()));
if (propNode.has("valueAsNameId")) prop.setValueAsNameId(propNode.get("valueAsNameId").asBoolean());
this.properties.add(prop);
}
} else if (node.isObject()) {
// Handle object format (backward compatibility)
this.properties = new ArrayList<>();
Iterator<Map.Entry<String, JsonNode>> fields = node.fields();
while (fields.hasNext()) {
Map.Entry<String, JsonNode> field = fields.next();
DataProperty prop = new DataProperty();
prop.setName(field.getKey());
prop.setValue(field.getValue().asText());
prop.setType(PropertyType.STRING);
this.properties.add(prop);
}
}
}
public void resolveDataNameIdReferences(List<ChatMessage> previousMessages) {
if (properties == null || properties.isEmpty() || previousMessages == null || previousMessages.isEmpty()) {
return;
}
for (DataProperty property : properties) {
if (property.getDataNameId() == null) {
continue;
}
String dataNameId = property.getDataNameId();
for (ChatMessage message : previousMessages) {
if (id.equals(message.getId())) {
continue;
}
for (DataProperty historicalProp : message.getProperties()) {
if (historicalProp.getNameId() == null || historicalProp.getValue() == null) {
continue;
}
if (!dataNameId.equals(historicalProp.getNameId())) {
continue;
}
property.setData(historicalProp.getValue());
}
}
}
}
}
@Data
@NoArgsConstructor
public static class ChatResponse extends ChatMessage {
private String workflowId;
private NextSchema nextSchema;
private boolean completed = true;
private Integer percentComplete;
private boolean required = true;
public ChatResponse(String responseId, String chatId, String workflowId, Language language,
boolean completed, Integer percentComplete, String userId, Map<String, String> props) {
this(responseId, chatId, workflowId, language, completed, percentComplete, true, userId, props);
}
public ChatResponse(String responseId, String chatId, String workflowId, Language language,
boolean completed, Integer percentComplete, boolean required, String userId, Map<String, String> props) {
super(responseId, chatId, MessageType.AI, userId);
this.workflowId = workflowId;
this.language = language;
this.completed = completed;
this.percentComplete = percentComplete != null ? percentComplete : 100;
this.required = required;
if (props != null) {
setPropertiesMap(props);
}
}
public ChatResponse(String responseId, String chatId, String workflowId, Language language, String userId, Map<String, String> props) {
this(responseId, chatId, workflowId, language, true, 100, userId, props);
}
public ChatResponse(String chatId, String workflowId, Language language,
AIFunctionSchema nextRequestSchema,
String responseId, boolean completed, Integer percentComplete, String userId) {
this(responseId, chatId, workflowId, language, completed, percentComplete, userId, null);
}
public ChatResponse(String responseId, String chatId, String workflowId, Language language, AIFunctionSchema nextRequestSchema,
boolean completed, Integer percentComplete, String userId) {
this(responseId, chatId, workflowId, language, completed, percentComplete, userId, null);
}
public void setNextSchemaAsSchema(AIFunctionSchema schema) {
if (schema == null) {
return;
}
NextSchema nextSchema = new NextSchema();
nextSchema.setSchemaName(schema.getSchemaName());
if (schema.getProperties() != null) {
List<NextProperties> nextProps = new ArrayList<>();
for (AIFunctionSchema.AIFunctionProperty prop : schema.getProperties()) {
NextProperties nextProp = new NextProperties();
nextProp.setName(prop.getName());
nextProp.setNameId(prop.getNameId());
nextProp.setType(prop.getType());
if (prop.getValues() != null) {
nextProp.setValues(prop.getValues());
}
nextProp.setMultiSelect(prop.isMultiSelect());
nextProps.add(nextProp);
}
nextSchema.setProperties(nextProps);
}
this.nextSchema = nextSchema;
}
public static ChatResponse fromSession(
WorkflowContext session,
String workflowId,
Map<String, String> props) {
return new ChatResponse(
session.getCurrentResponseId(),
session.getContextId(),
workflowId,
session.getLanguage(),
session.getUserId(),
props
);
}
public static ChatResponse fromSessionWithMessage(
WorkflowContext session,
String workflowId,
String message) {
Map<String, String> props = new HashMap<>();
if (message != null) {
props.put("message", message);
}
return fromSession(session, workflowId, props);
}
public static ChatResponse fromSessionWithError(
WorkflowContext session,
String workflowId,
String errorMessage) {
Map<String, String> props = new HashMap<>();
if (errorMessage != null) {
props.put("error", errorMessage);
}
return fromSession(session, workflowId, props);
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class NextSchema implements Serializable {
String schemaName;
List<NextProperties> properties;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(Include.NON_DEFAULT)
public static class NextProperties implements Serializable {
private String name;
private String nameId;
private PropertyType type;
private List<String> values;
private boolean isMultiSelect;
}
}
@Data
@AllArgsConstructor
@NoArgsConstructor
public static class Message {
private String content;
private MessageType type;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/model/ChatMessageTask.java
|
package ai.driftkit.chat.framework.model;
import ai.driftkit.chat.framework.model.ChatDomain.ChatResponse.NextSchema;
import ai.driftkit.chat.framework.model.ChatDomain.DataProperty;
import ai.driftkit.chat.framework.model.ChatDomain.MessageType;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
/**
* Represents a task in the chat conversation UI.
* Used for displaying message tasks in the frontend with progress tracking.
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonInclude(Include.NON_NULL)
public class ChatMessageTask {
private String id;
private String nameId;
private MessageType type;
private List<DataProperty> properties;
private NextSchema nextSchema;
private long timestamp;
private Boolean completed;
private Integer percentComplete;
private Boolean required;
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/model/ChatMessageTaskConverter.java
|
package ai.driftkit.chat.framework.model;
import ai.driftkit.chat.framework.model.ChatDomain.*;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
/**
* Converts ChatRequest and ChatResponse objects to ChatMessageTask format.
*/
@Slf4j
public class ChatMessageTaskConverter {
/**
* Converts a ChatMessage to one or more ChatMessageTask objects.
* For composable ChatRequests, creates a separate ChatMessageTask for each nameId-value pair.
* For other messages, creates a single ChatMessageTask.
*
* @param message The message to convert
* @return List of converted ChatMessageTask objects
*/
public static List<ChatMessageTask> convert(ChatMessage message) {
if (message instanceof ChatRequest) {
return convertRequest((ChatRequest) message);
} else if (message instanceof ChatResponse) {
List<ChatMessageTask> tasks = new ArrayList<>();
tasks.add(convertResponse((ChatResponse) message));
return tasks;
} else {
throw new IllegalArgumentException("Unsupported message type: " + message.getClass().getName());
}
}
/**
* Converts a ChatRequest to one or more ChatMessageTask objects.
*
* 1. If ChatRequest is composable=true, creates a separate ChatMessageTask for each nameId-value pair
* 2. Otherwise creates a single ChatMessageTask with all properties
*
* @param request The ChatRequest to convert
* @return List of converted ChatMessageTask objects
*/
private static List<ChatMessageTask> convertRequest(ChatRequest request) {
List<ChatMessageTask> tasks = new ArrayList<>();
// If it's a composable request (composable=true), create a task for each property
if (request.getComposable() != null && request.getComposable()) {
long timestamp = request.getTimestamp();
for (DataProperty prop : request.getProperties()) {
// Skip properties with no nameId or value
if (prop.getNameId() == null || prop.getValue() == null) {
continue;
}
ChatMessageTask task = new ChatMessageTask();
task.setId(request.getId() + "_" + prop.getNameId() + "_AI");
task.setType(MessageType.AI);
task.setNameId(prop.getNameId());
task.setTimestamp(timestamp++);
task.setRequired(true);
tasks.add(task);
task = new ChatMessageTask();
task.setId(request.getId() + "_" + prop.getNameId() + "_USER");
task.setType(request.getType());
task.setTimestamp(timestamp++);
List<DataProperty> properties = new ArrayList<>();
DataProperty valueProp = getDataProperty(prop);
properties.add(valueProp);
// Check if property has valueAsNameId=true and set nameId to property value
if (prop.isValueAsNameId()) {
task.setNameId(prop.getValue());
log.debug("Using field value as nameId: {} -> {}", prop.getNameId(), prop.getValue());
}
task.setProperties(properties);
tasks.add(task);
}
} else {
// For non-composable requests, create a single task with all properties
ChatMessageTask task = new ChatMessageTask();
task.setId(request.getId());
task.setType(request.getType());
task.setTimestamp(request.getTimestamp());
// Use requestSchemaName as messageNameId
task.setNameId(request.getRequestSchemaName());
if (request.getProperties().size() == 1) {
DataProperty valueProp = getDataProperty(request.getProperties().getFirst());
task.setProperties(List.of(valueProp));
// Check if property has valueAsNameId=true and set nameId to property value
DataProperty prop = request.getProperties().getFirst();
if (prop.isValueAsNameId()) {
task.setNameId(prop.getValue());
log.debug("Using field value as nameId: {} -> {}", prop.getNameId(), prop.getValue());
}
} else {
task.setProperties(new ArrayList<>(request.getProperties()));
// Check if any property has valueAsNameId=true, and use its value as nameId
for (DataProperty prop : request.getProperties()) {
if (prop.isValueAsNameId()) {
task.setNameId(prop.getValue());
log.debug("Using field value as nameId: {} -> {}", prop.getNameId(), prop.getValue());
break; // Use the first match only
}
}
}
task.setType(request.getType());
tasks.add(task);
}
return tasks;
}
@NotNull
private static DataProperty getDataProperty(DataProperty prop) {
DataProperty valueProp = new DataProperty();
valueProp.setName(prop.getName());
valueProp.setValue(prop.getValue());
valueProp.setNameId(prop.getNameId());
valueProp.setData(prop.getData());
valueProp.setType(prop.getType());
valueProp.setMultiSelect(prop.getMultiSelect());
return valueProp;
}
/**
* Converts a ChatResponse to a ChatMessageTask.
*
* 1. Transfers properties to ChatMessageTask properties
* 2. Uses nextSchema.properties.nameId for the first element with a defined nameId as messageNameId
* 3. Sets the nextSchema in the ChatMessageTask
*
* @param response The ChatResponse to convert
* @return The converted ChatMessageTask
*/
private static ChatMessageTask convertResponse(ChatResponse response) {
ChatMessageTask task = new ChatMessageTask();
task.setId(response.getId());
task.setType(response.getType());
task.setCompleted(response.isCompleted());
task.setPercentComplete(response.getPercentComplete());
task.setRequired(response.isRequired());
// Set properties
List<DataProperty> properties = response.getProperties();
if (response.getNextSchema() != null && response.getNextSchema().getProperties() != null) {
Optional<String> messageNameId = response.getNextSchema().getProperties().stream()
.filter(prop -> StringUtils.isNotBlank(prop.getNameId()))
.map(ChatResponse.NextProperties::getNameId)
.findFirst();
task.setNameId(messageNameId.orElse(null));
}
if (properties.size() == 1) {
DataProperty prop = properties.get(0);
if (prop.isValueAsNameId()) {
task.setNameId(prop.getValue());
}
}
task.setProperties(properties);
task.setTimestamp(response.getTimestamp());
task.setNextSchema(response.getNextSchema());
task.setType(response.getType());
return task;
}
/**
* Converts a list of ChatMessage objects to ChatMessageTask objects.
* For composable ChatRequests, creates multiple ChatMessageTask objects.
*
* @param messages The messages to convert
* @return The converted ChatMessageTask objects
*/
public static List<ChatMessageTask> convertAll(List<ChatMessage> messages) {
List<ChatMessageTask> tasks = new ArrayList<>();
for (ChatMessage message : messages) {
try {
// Convert message to one or more tasks
List<ChatMessageTask> messageTasks = convert(message);
tasks.addAll(messageTasks);
} catch (Exception e) {
log.error("Failed to convert message to task: {}", e.getMessage(), e);
}
}
return tasks.stream()
.sorted(Comparator.comparing(ChatMessageTask::getTimestamp))
.collect(Collectors.toList());
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/model/ChatSession.java
|
package ai.driftkit.chat.framework.model;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.mapping.Document;
import java.util.HashMap;
import java.util.Map;
/**
* Represents a chat session.
* Stores metadata about chat sessions for display in chat list.
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Document("chat_sessions")
public class ChatSession {
/**
* Unique identifier for the chat
*/
@Id
private String chatId;
/**
* Name of the chat (for display purposes)
*/
private String name;
/**
* Description or snippet of the last message
*/
private String description;
/**
* User ID of the chat owner
*/
private String userId;
/**
* Timestamp of the last message
*/
private Long lastMessageTime;
/**
* Workflow ID of the current workflow
*/
private String workflowId;
/**
* Additional properties for the chat
*/
@Builder.Default
private Map<String, String> properties = new HashMap<>();
/**
* Creation timestamp
*/
private Long createdTime;
/**
* Last updated timestamp
*/
private Long updatedTime;
/**
* Whether the chat is archived
*/
@Builder.Default
private boolean archived = false;
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/model/StepDefinition.java
|
package ai.driftkit.chat.framework.model;
import ai.driftkit.chat.framework.ai.domain.AIFunctionSchema;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.ArrayList;
import java.util.List;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class StepDefinition {
private int index;
private String id;
private String action;
private boolean userInputRequired;
@Builder.Default
private List<AIFunctionSchema> inputSchemas = new ArrayList<>();
@Builder.Default
private List<AIFunctionSchema> outputSchemas = new ArrayList<>();
private boolean asyncExecution;
@Builder.Default
private List<String> nextStepIds = new ArrayList<>();
public void addNextStepId(String nextStepId) {
if (nextStepId != null && !this.nextStepIds.contains(nextStepId)) {
this.nextStepIds.add(nextStepId);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/model/WorkflowContext.java
|
package ai.driftkit.chat.framework.model;
import ai.driftkit.chat.framework.ai.domain.AIFunctionSchema;
import ai.driftkit.chat.framework.ai.utils.AIUtils;
import ai.driftkit.common.domain.Language;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.type.CollectionType;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Slf4j
public class WorkflowContext {
private String contextId;
private String currentResponseId;
private String userId;
private Language language;
private String workflowId;
private String currentStepId;
private WorkflowSessionState state;
private AIFunctionSchema currentSchema;
private AIFunctionSchema nextInputSchema;
@Builder.Default
private Map<String, String> properties = new HashMap<>();
@Builder.Default
private List<StepExecutionRecord> executionHistory = new ArrayList<>();
@Builder.Default
private Map<String, Object> context = new HashMap<>();
private Long createdTime;
private Long updatedTime;
public synchronized void saveStepExecution(String stepId, Map<String, String> stepProperties) {
if (executionHistory == null) {
executionHistory = new ArrayList<>();
}
StepExecutionRecord record = new StepExecutionRecord();
record.setStepId(stepId);
record.setTimestamp(System.currentTimeMillis());
record.setProperties(new HashMap<>(stepProperties));
executionHistory.add(record);
this.updatedTime = System.currentTimeMillis();
}
public synchronized void putAll(Map<String, String> map) {
if (properties == null) {
properties = new HashMap<>();
}
properties.putAll(map);
this.updatedTime = System.currentTimeMillis();
}
public synchronized void putProperty(String key, String value) {
if (properties == null) {
properties = new HashMap<>();
}
properties.put(key, value);
this.updatedTime = System.currentTimeMillis();
}
public synchronized void setContextValue(String key, Object value) {
if (context == null) {
context = new HashMap<>();
}
try {
if (value instanceof String) {
context.put(key, value);
} else if (value != null) {
String jsonValue = AIUtils.OBJECT_MAPPER.writeValueAsString(value);
context.put(key + "_type", value.getClass().getName());
context.put(key, jsonValue);
} else {
context.put(key, null);
}
this.updatedTime = System.currentTimeMillis();
} catch (JsonProcessingException e) {
log.error("Error serializing object for key [{}]: {}", key, e.getMessage());
}
}
@SuppressWarnings("unchecked")
public synchronized <T> T getContextValue(String key, Class<T> type) {
if (context == null || !context.containsKey(key)) {
return null;
}
Object value = context.get(key);
if (type.isInstance(value)) {
return type.cast(value);
}
if (type == String.class && value instanceof String) {
return type.cast(value);
}
try {
if (value instanceof String) {
String jsonValue = (String)value;
return AIUtils.OBJECT_MAPPER.readValue(jsonValue, type);
}
} catch (JsonProcessingException e) {
log.error("Error deserializing JSON for key [{}] to type [{}]: {}", key, type.getName(), e.getMessage());
}
return null;
}
public synchronized <T> List<T> getContextValueAsList(String key, Class<T> elementType) {
if (context == null || !context.containsKey(key)) {
return null;
}
Object value = context.get(key);
if (value instanceof List) {
List<?> list = (List<?>) value;
if (list.isEmpty() || elementType.isInstance(list.get(0))) {
return (List<T>) list;
}
}
try {
if (value instanceof String) {
String jsonValue = (String)value;
CollectionType listType = AIUtils.OBJECT_MAPPER.getTypeFactory()
.constructCollectionType(List.class, elementType);
return AIUtils.OBJECT_MAPPER.readValue(jsonValue, listType);
}
} catch (JsonProcessingException e) {
log.error("Error deserializing JSON for key [{}] to List<{}>: {}",
key, elementType.getName(), e.getMessage());
}
return null;
}
public synchronized String getProperty(String param) {
return properties.get(param);
}
public enum WorkflowSessionState {
NEW,
WAITING_FOR_USER_INPUT,
PROCESSING,
EXECUTING_STEP,
COMPLETED,
ERROR
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class StepExecutionRecord {
private String stepId;
private Long timestamp;
private Map<String, String> properties;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/repository/ChatMessageRepository.java
|
package ai.driftkit.chat.framework.repository;
import ai.driftkit.chat.framework.model.ChatDomain.ChatMessage;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.stereotype.Repository;
/**
* Repository for managing chat messages in MongoDB.
* This repository is only active when MongoDB is enabled.
*/
@Repository
public interface ChatMessageRepository extends MongoRepository<ChatMessage, String> {
/**
* Find all messages for a chat, ordered by timestamp descending
* @param chatId The chat ID
* @param pageable Pagination information
* @return Page of chat messages
*/
Page<ChatMessage> findByChatIdOrderByTimestampDesc(String chatId, Pageable pageable);
/**
* Delete all messages for a chat
* @param chatId The chat ID
* @return Number of deleted messages
*/
long deleteByChatId(String chatId);
/**
* Count messages for a chat
* @param chatId The chat ID
* @return Number of messages
*/
long countByChatId(String chatId);
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/repository/ChatSessionRepository.java
|
package ai.driftkit.chat.framework.repository;
import ai.driftkit.chat.framework.model.ChatSession;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.stereotype.Repository;
import java.util.Optional;
/**
* Repository for managing chat sessions in MongoDB.
* This repository is only active when MongoDB is enabled.
*/
@Repository
public interface ChatSessionRepository extends MongoRepository<ChatSession, String> {
/**
* Find a chat session by chat ID
* @param chatId The chat ID
* @return Optional containing the chat session if found
*/
Optional<ChatSession> findByChatId(String chatId);
/**
* Find all chats for a user, excluding archived ones, ordered by last message time
* @param userId The user ID
* @param pageable Pagination information
* @return Page of chat sessions
*/
Page<ChatSession> findByUserIdAndArchivedFalseOrderByLastMessageTimeDesc(String userId, Pageable pageable);
/**
* Find all non-archived chats, ordered by last message time
* @param pageable Pagination information
* @return Page of chat sessions
*/
Page<ChatSession> findByArchivedFalseOrderByLastMessageTimeDesc(Pageable pageable);
/**
* Count chats for a user
* @param userId The user ID
* @return Number of chats
*/
long countByUserId(String userId);
/**
* Count non-archived chats for a user
* @param userId The user ID
* @return Number of active chats
*/
long countByUserIdAndArchivedFalse(String userId);
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/repository/WorkflowContextRepository.java
|
package ai.driftkit.chat.framework.repository;
import ai.driftkit.chat.framework.model.WorkflowContext;
import java.util.Optional;
/**
* Repository interface for storing and retrieving workflow contexts.
* Implementation should be provided by the consuming application.
*/
public interface WorkflowContextRepository {
/**
* Find a workflow context by its ID
* @param contextId The context ID
* @return Optional containing the context if found
*/
Optional<WorkflowContext> findById(String contextId);
/**
* Save or update a workflow context
* @param context The context to save
* @return The saved context
*/
WorkflowContext saveOrUpdate(WorkflowContext context);
/**
* Delete a workflow context by its ID
* @param contextId The context ID to delete
*/
void deleteById(String contextId);
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/service/AsyncResponseTracker.java
|
package ai.driftkit.chat.framework.service;
import ai.driftkit.chat.framework.model.ChatDomain.ChatResponse;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.function.Supplier;
/**
* Service interface for tracking asynchronous responses.
* Implementation should be provided by the consuming application.
*/
public interface AsyncResponseTracker {
/**
* Generate a unique response ID
* @return A unique response ID
*/
String generateResponseId();
/**
* Track a response for asynchronous processing
* @param responseId The response ID
* @param response The response to track
*/
void trackResponse(String responseId, ChatResponse response);
/**
* Update the status of a tracked response
* @param responseId The response ID
* @param response The updated response
*/
void updateResponseStatus(String responseId, ChatResponse response);
/**
* Get a tracked response by ID
* @param responseId The response ID
* @return The response if found, null otherwise
*/
Optional<ChatResponse> getResponse(String responseId);
/**
* Remove a tracked response
* @param responseId The response ID to remove
*/
void removeResponse(String responseId);
/**
* Execute a task asynchronously and track its progress
* @param <T> The type of response (must extend ChatResponse)
* @param responseId The response ID for tracking
* @param initialResponse The initial response to return immediately
* @param task The async task to execute
* @return A CompletableFuture that completes when the task is done
*/
<T extends ChatResponse> CompletableFuture<T> executeAsync(
String responseId,
T initialResponse,
Supplier<T> task);
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/service/ChatHistoryService.java
|
package ai.driftkit.chat.framework.service;
import ai.driftkit.chat.framework.model.ChatDomain.ChatMessage;
import ai.driftkit.chat.framework.model.ChatDomain.ChatRequest;
import ai.driftkit.chat.framework.model.ChatDomain.ChatResponse;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
/**
* Service interface for managing chat history.
* Implementation should be provided by the consuming application.
*/
public interface ChatHistoryService {
/**
* Add a chat request to the history
* @param request The request to add
*/
void addRequest(ChatRequest request);
/**
* Add a chat response to the history
* @param response The response to add
*/
void addResponse(ChatResponse response);
/**
* Update an existing response in the history
* @param response The response to update
*/
void updateResponse(ChatResponse response);
/**
* Get a request by ID
* @param requestId The request ID
* @return The request if found, null otherwise
*/
ChatRequest getRequest(String requestId);
/**
* Get a response by ID
* @param responseId The response ID
* @return The response if found, null otherwise
*/
ChatResponse getResponse(String responseId);
/**
* Get a message by ID
* @param messageId The message ID
* @return The message if found, null otherwise
*/
ChatMessage getMessage(String messageId);
/**
* Get all messages for a chat, ordered by timestamp (newest first)
* @param chatId The chat ID
* @param pageable Pagination information
* @return Page of chat messages
*/
Page<ChatMessage> getMessages(String chatId, Pageable pageable);
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/service/ChatMessageService.java
|
package ai.driftkit.chat.framework.service;
import ai.driftkit.chat.framework.model.ChatDomain.ChatMessage;
import ai.driftkit.chat.framework.model.ChatDomain.ChatRequest;
import ai.driftkit.chat.framework.model.ChatDomain.ChatResponse;
import ai.driftkit.chat.framework.model.ChatDomain.MessageType;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
/**
* Service interface for managing chat messages.
* Implementation should be provided by the consuming application.
*/
public interface ChatMessageService {
/**
* Add a message to the chat
* @param chatId The chat ID
* @param message The message content
* @param type The message type
*/
void addMessage(String chatId, String message, MessageType type);
/**
* Add a user message to the chat
* @param chatId The chat ID
* @param message The message content
*/
default void addUserMessage(String chatId, String message) {
addMessage(chatId, message, MessageType.USER);
}
/**
* Add an AI message to the chat
* @param chatId The chat ID
* @param message The message content
*/
default void addAIMessage(String chatId, String message) {
addMessage(chatId, message, MessageType.AI);
}
/**
* Add a context message to the chat
* @param chatId The chat ID
* @param message The message content
*/
default void addContextMessage(String chatId, String message) {
addMessage(chatId, message, MessageType.CONTEXT);
}
/**
* Add a chat request to the history
* @param chatId The chat ID
* @param request The request to add
* @return The saved request
*/
ChatRequest addRequest(String chatId, ChatRequest request);
/**
* Add a chat response to the history
* @param chatId The chat ID
* @param response The response to add
* @return The saved response
*/
ChatResponse addResponse(String chatId, ChatResponse response);
/**
* Update an existing chat response
* @param response The response to update
* @return The updated response
*/
ChatResponse updateResponse(ChatResponse response);
/**
* Get chat history for a session
* @param chatId The chat ID
* @param pageable Pagination information
* @return Page of chat messages
*/
Page<ChatMessage> getHistory(String chatId, Pageable pageable);
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/service/ChatSessionService.java
|
package ai.driftkit.chat.framework.service;
import ai.driftkit.chat.framework.model.ChatDomain.ChatMessage;
import ai.driftkit.chat.framework.model.ChatSession;
import ai.driftkit.chat.framework.repository.ChatMessageRepository;
import ai.driftkit.chat.framework.repository.ChatSessionRepository;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.jetbrains.annotations.NotNull;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import java.util.Date;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
/**
* Service for managing chat sessions.
* Provides functionality for creating, retrieving, updating, and archiving chat sessions.
* Uses database for persistence with a cache for frequently accessed sessions.
*/
@Slf4j
@Service
public class ChatSessionService {
@Autowired
private ChatSessionRepository chatRepository;
@Autowired
private ChatMessageRepository messageRepository;
// Cache for frequently accessed sessions
private final Map<String, ChatSession> sessionCache = new ConcurrentHashMap<>();
/**
* Create a new chat session
*/
public ChatSession createChat(String userId, String name) {
String chatId = UUID.randomUUID().toString();
return createChatWithId(chatId, userId, name);
}
/**
* Create a new chat session with specific ID
*/
public ChatSession createChatWithId(String chatId, String userId, String name) {
long now = System.currentTimeMillis();
name = StringUtils.isNotBlank(name) ? getDescription(name) : "New Chat - " + new Date(now);
ChatSession chat = ChatSession.builder()
.chatId(chatId)
.userId(userId)
.name(name)
.lastMessageTime(now)
.createdTime(now)
.updatedTime(now)
.build();
try {
ChatSession savedChat = chatRepository.save(chat);
// Update cache
sessionCache.put(chatId, savedChat);
return savedChat;
} catch (Exception e) {
log.error("Error creating chat: {}", e.getMessage(), e);
throw new RuntimeException("Failed to create chat", e);
}
}
/**
* Get a chat by ID, creating it if it doesn't exist
*/
public ChatSession getOrCreateChat(String chatId, String userId, String name) {
Optional<ChatSession> existing = getChat(chatId);
return existing.orElseGet(() -> createChatWithId(chatId, userId, name));
}
/**
* Get a chat by ID
*/
public Optional<ChatSession> getChat(String chatId) {
if (chatId == null) {
return Optional.empty();
}
try {
// Check cache first
ChatSession cached = sessionCache.get(chatId);
if (cached != null) {
return Optional.of(cached);
}
// If not in cache, query database
Optional<ChatSession> session = chatRepository.findByChatId(chatId);
session.ifPresent(s -> sessionCache.put(chatId, s));
return session;
} catch (Exception e) {
log.error("Error getting chat: {}", e.getMessage(), e);
return Optional.empty();
}
}
/**
* List chats for a user
*/
public Page<ChatSession> listChatsForUser(String userId, Pageable pageable) {
if (userId == null) {
return Page.empty();
}
try {
return chatRepository.findByUserIdAndArchivedFalseOrderByLastMessageTimeDesc(userId, pageable);
} catch (Exception e) {
log.error("Error listing chats for user {}: {}", userId, e.getMessage(), e);
return Page.empty();
}
}
/**
* List all chats
*/
public Page<ChatSession> listAllChats(Pageable pageable) {
try {
return chatRepository.findByArchivedFalseOrderByLastMessageTimeDesc(pageable);
} catch (Exception e) {
log.error("Error listing all chats: {}", e.getMessage(), e);
return Page.empty();
}
}
/**
* Update the last message in a chat
*/
public void updateLastMessage(String chatId, ChatMessage message) {
if (chatId == null || message == null) {
return;
}
try {
Optional<ChatSession> chatOpt = getChat(chatId);
if (chatOpt.isPresent()) {
ChatSession chat = chatOpt.get();
String content = message.getPropertiesMap().getOrDefault(
"message",
message.getPropertiesMap().getOrDefault("messageId", "")
);
String description = getDescription(content);
chat.setDescription(description);
chat.setLastMessageTime(message.getTimestamp());
chat.setUpdatedTime(System.currentTimeMillis());
chatRepository.save(chat);
// Update cache
sessionCache.put(chatId, chat);
log.debug("Updated last message for chat: {}", chatId);
} else {
log.warn("Cannot update last message - chat not found: {}", chatId);
}
} catch (Exception e) {
log.error("Error updating last message for chat: {}", chatId, e);
}
}
/**
* Archive a chat
*/
public void archiveChat(String chatId) {
if (chatId == null) {
return;
}
try {
Optional<ChatSession> chatOpt = getChat(chatId);
if (chatOpt.isPresent()) {
ChatSession chat = chatOpt.get();
chat.setArchived(true);
chat.setUpdatedTime(System.currentTimeMillis());
chatRepository.save(chat);
// Update cache
sessionCache.put(chatId, chat);
log.info("Archived chat: {}", chatId);
} else {
log.warn("Cannot archive chat - not found: {}", chatId);
}
} catch (Exception e) {
log.error("Error archiving chat: {}", chatId, e);
}
}
/**
* Delete a chat and all its messages
*/
public void deleteChat(String chatId) {
if (chatId == null) {
return;
}
try {
// Delete the chat
chatRepository.deleteById(chatId);
// Delete all messages for this chat
Page<ChatMessage> messages = messageRepository.findByChatIdOrderByTimestampDesc(chatId, Pageable.unpaged());
messageRepository.deleteAll(messages);
log.info("Deleted chat: {} with {} messages", chatId, messages.getContent().size());
// Remove from cache
sessionCache.remove(chatId);
} catch (Exception e) {
log.error("Error deleting chat: {}", chatId, e);
}
}
/**
* Update chat properties
*/
public void updateChatProperties(String chatId, Map<String, String> properties) {
if (chatId == null || properties == null || properties.isEmpty()) {
return;
}
try {
Optional<ChatSession> chatOpt = getChat(chatId);
if (chatOpt.isPresent()) {
ChatSession chat = chatOpt.get();
chat.getProperties().putAll(properties);
chat.setUpdatedTime(System.currentTimeMillis());
chatRepository.save(chat);
// Update cache
sessionCache.put(chatId, chat);
log.debug("Updated properties for chat: {}", chatId);
} else {
log.warn("Cannot update properties - chat not found: {}", chatId);
}
} catch (Exception e) {
log.error("Error updating chat properties: {}", chatId, e);
}
}
/**
* Count chats for a user
*/
public long countUserChats(String userId, boolean includeArchived) {
if (userId == null) {
return 0;
}
try {
return includeArchived ?
chatRepository.countByUserId(userId) :
chatRepository.countByUserIdAndArchivedFalse(userId);
} catch (Exception e) {
log.error("Error counting chats for user {}: {}", userId, e.getMessage(), e);
return 0;
}
}
/**
* Helper method to get description from content
*/
@NotNull
private static String getDescription(String content) {
if (StringUtils.isBlank(content)) {
return "";
}
String description;
if (content.length() > 100) {
description = content.substring(0, 97) + "...";
} else {
description = content;
}
return description;
}
/**
* Clean up cache periodically
* Run every 30 minutes
*/
@Scheduled(fixedRate = 30 * 60 * 1000)
public void cleanupCache() {
try {
int initialSize = sessionCache.size();
// Clear the entire cache - it will be repopulated on demand
sessionCache.clear();
log.info("Session cache cleanup: cleared {} entries", initialSize);
} catch (Exception e) {
log.error("Error during session cache cleanup", e);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/service/ChatWorkflowService.java
|
package ai.driftkit.chat.framework.service;
import ai.driftkit.chat.framework.model.ChatDomain.ChatRequest;
import ai.driftkit.chat.framework.model.ChatDomain.ChatResponse;
import ai.driftkit.chat.framework.model.ChatDomain.MessageType;
import ai.driftkit.chat.framework.model.WorkflowContext;
import ai.driftkit.chat.framework.repository.WorkflowContextRepository;
import ai.driftkit.chat.framework.workflow.AnnotatedWorkflow;
import ai.driftkit.chat.framework.workflow.ChatWorkflow;
import ai.driftkit.chat.framework.workflow.WorkflowRegistry;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import java.util.HashMap;
import java.util.Optional;
import java.util.UUID;
/**
* Service for processing chat requests through workflows.
* Uses persistent storage for sessions and responses.
* Works exclusively with AnnotatedWorkflow implementations.
*/
@Slf4j
@Service
@RequiredArgsConstructor
public class ChatWorkflowService {
private final WorkflowContextRepository sessionRepository;
private final AsyncResponseTracker asyncResponseTracker;
private final ChatHistoryService chatHistoryService;
private final ChatMessageService chatMessageService;
/**
* Process a chat request either synchronously or asynchronously
* depending on the step in the workflow.
*/
public ChatResponse processChat(ChatRequest request) {
try {
String sessionId = request.getChatId();
String workflowId = request.getWorkflowId();
String userId = request.getUserId();
chatMessageService.addRequest(sessionId, request);
if (StringUtils.isBlank(userId)) {
userId = request.getPropertiesMap().getOrDefault("userId", "anonymous");
request.setUserId(userId);
}
WorkflowContext session = findOrCreateSession(sessionId, userId);
Optional<? extends ChatWorkflow> workflowOpt = findWorkflow(workflowId, request, session);
if (workflowOpt.isEmpty()) {
throw new IllegalStateException("No suitable workflow found for this request");
}
ChatWorkflow workflow = workflowOpt.get();
// Generate response ID for tracking
String responseId = UUID.randomUUID().toString();
session.setCurrentResponseId(responseId);
if (StringUtils.isBlank(session.getUserId())) {
session.setUserId(userId);
}
session.setWorkflowId(workflow.getWorkflowId());
if (request.getLanguage() != null) {
session.setLanguage(request.getLanguage());
}
WorkflowRegistry.saveSession(session);
ChatResponse response = workflow.processChat(request);
if (StringUtils.isBlank(response.getUserId())) {
response.setUserId(userId);
}
try {
sessionRepository.saveOrUpdate(session);
} catch (Exception e) {
throw new RuntimeException(e);
}
// If the request has properties with dataNameId references, resolve them
if (hasDataNameIdReferences(request)) {
resolveDataNameIdReferences(request);
chatMessageService.addRequest(request.getChatId(), request);
}
// Track the response for status updates
asyncResponseTracker.trackResponse(session.getCurrentResponseId(), response);
return response;
} catch (Exception e) {
log.error("Error processing chat workflow", e);
throw new RuntimeException("Error processing chat workflow: " + e.getMessage(), e);
}
}
/**
* Check if the request has any properties with dataNameId references
*/
private boolean hasDataNameIdReferences(ChatRequest request) {
if (request.getProperties() == null || request.getProperties().isEmpty()) {
return false;
}
// Check if any property has a non-null dataNameId
return request.getProperties().stream()
.anyMatch(prop -> prop.getDataNameId() != null && !prop.getDataNameId().isEmpty());
}
/**
* Resolve any dataNameId references in the request properties
* by looking up values from previous messages in the chat.
*/
private void resolveDataNameIdReferences(ChatRequest request) {
try {
if (request.getChatId() == null) {
return;
}
// Get recent messages for this chat
var previousMessages = chatHistoryService.getMessages(request.getChatId(), Pageable.ofSize(100))
.stream()
.filter(e -> e.getType() != MessageType.CONTEXT)
.toList();
// Process the request to resolve dataNameId references
request.resolveDataNameIdReferences(previousMessages);
} catch (Exception e) {
log.warn("Error resolving dataNameId references: {}", e.getMessage(), e);
// This is non-critical, so we just log the error and continue
}
}
public Optional<ChatResponse> getResponse(String responseId) {
return asyncResponseTracker.getResponse(responseId);
}
private WorkflowContext findOrCreateSession(String sessionId, String userId) {
Optional<WorkflowContext> existingSession = sessionRepository.findById(sessionId);
if (existingSession.isPresent()) {
WorkflowContext session = existingSession.get();
if (StringUtils.isBlank(session.getUserId()) && StringUtils.isNotBlank(userId)) {
session.setUserId(userId);
}
WorkflowRegistry.saveSession(session);
return session;
} else {
long now = System.currentTimeMillis();
WorkflowContext newSession = new WorkflowContext();
newSession.setContextId(sessionId);
newSession.setUserId(userId);
newSession.setState(WorkflowContext.WorkflowSessionState.NEW);
newSession.setProperties(new HashMap<>());
newSession.setCreatedTime(now);
newSession.setUpdatedTime(now);
WorkflowRegistry.saveSession(newSession);
return newSession;
}
}
/**
* Find the appropriate workflow to handle the request
*/
private Optional<? extends ChatWorkflow> findWorkflow(String workflowId, ChatRequest request, WorkflowContext session) {
// First check if the session has a workflow already
if (StringUtils.isNotBlank(session.getWorkflowId())) {
Optional<AnnotatedWorkflow> existingWorkflow = WorkflowRegistry.getWorkflow(session.getWorkflowId());
if (existingWorkflow.isPresent()) {
return existingWorkflow;
}
}
// If workflowId is specified in the request, try to use it directly
if (StringUtils.isNotBlank(workflowId)) {
Optional<AnnotatedWorkflow> requestedWorkflow = WorkflowRegistry.getWorkflow(workflowId);
if (requestedWorkflow.isPresent()) {
return requestedWorkflow;
}
}
// Look for a suitable workflow in the registry based on message content and properties
return WorkflowRegistry.findWorkflowForMessage(
request.getPropertiesMap().getOrDefault("message", ""),
request.getPropertiesMap()
);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/service
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/service/impl/DefaultAsyncResponseTracker.java
|
package ai.driftkit.chat.framework.service.impl;
import ai.driftkit.chat.framework.model.ChatDomain.ChatResponse;
import ai.driftkit.chat.framework.service.AsyncResponseTracker;
import ai.driftkit.chat.framework.service.ChatHistoryService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.*;
import java.util.function.Supplier;
/**
* Default implementation of AsyncResponseTracker.
* Service for tracking and managing asynchronous responses.
* Allows tasks to execute in the background while providing progress updates.
* Uses a cache for quick access with database fallback for persistence.
*/
@Slf4j
@Service
@ConditionalOnMissingBean(AsyncResponseTracker.class)
public class DefaultAsyncResponseTracker implements AsyncResponseTracker {
// Cache for active responses - cleared periodically, DB is source of truth
private final Map<String, ChatResponse> responseCache = new ConcurrentHashMap<>();
// Cache for executing tasks
private final Map<String, CompletableFuture<?>> runningTasks = new ConcurrentHashMap<>();
// Maximum age for cached items in milliseconds (30 minutes)
private static final long MAX_CACHE_AGE_MS = TimeUnit.MINUTES.toMillis(30);
private final ExecutorService executorService = Executors.newFixedThreadPool(10);
private final ChatHistoryService historyService;
public DefaultAsyncResponseTracker(ChatHistoryService historyService) {
this.historyService = historyService;
}
/**
* Generate a unique response ID for tracking
*/
@Override
public String generateResponseId() {
return UUID.randomUUID().toString();
}
/**
* Track a response for later retrieval and status updates
*/
@Override
public void trackResponse(String responseId, ChatResponse response) {
try {
// Ensure consistent ID
if (!responseId.equals(response.getId())) {
response.setId(responseId);
}
// Save to persistent storage (DB)
historyService.updateResponse(response);
// Update cache after successful DB save
responseCache.put(responseId, response);
log.debug("Tracking response: {} for session: {}", responseId, response.getChatId());
} catch (Exception e) {
log.error("Error tracking response: {}", responseId, e);
}
}
/**
* Get a tracked response by ID
*/
@Override
public Optional<ChatResponse> getResponse(String responseId) {
try {
// Check cache first
ChatResponse response = responseCache.get(responseId);
// If not in cache, query from database
if (response == null) {
response = historyService.getResponse(responseId);
// If found in DB, update cache
if (response != null) {
responseCache.put(responseId, response);
}
}
if (response != null) {
log.debug("Found response: {} with completion status: {}",
responseId, response.isCompleted());
} else {
log.debug("Response not found: {}", responseId);
}
return Optional.ofNullable(response);
} catch (Exception e) {
log.error("Error getting response: {}", responseId, e);
return Optional.empty();
}
}
@Override
public void removeResponse(String responseId) {
responseCache.remove(responseId);
runningTasks.remove(responseId);
}
@Override
public void updateResponseStatus(String responseId, ChatResponse response) {
updateResponseStatus(responseId, response.getPercentComplete(), response.isCompleted());
}
/**
* Update the status of a tracked response
*/
public void updateResponseStatus(String responseId, int percentComplete, boolean completed) {
try {
Optional<ChatResponse> responseOpt = getResponse(responseId);
if (responseOpt.isPresent()) {
ChatResponse response = responseOpt.get();
// Only update if the status has changed
if (response.getPercentComplete() != percentComplete ||
response.isCompleted() != completed) {
// Update status
response.setPercentComplete(percentComplete);
response.setCompleted(completed);
// Update in database first
historyService.updateResponse(response);
// Update cache after successful DB update
responseCache.put(responseId, response);
log.debug("Updated response status: {} to {}% complete, completed: {}",
responseId, percentComplete, completed);
}
} else {
log.warn("Cannot update status - response not found: {}", responseId);
}
} catch (Exception e) {
log.error("Error updating response status: {}", responseId, e);
}
}
/**
* Execute a task asynchronously and track its progress
*/
@Override
public <T extends ChatResponse> CompletableFuture<T> executeAsync(
String responseId,
T initialResponse,
Supplier<T> task) {
try {
// Make sure the initial response is tracked
trackResponse(responseId, initialResponse);
// Execute the task asynchronously
CompletableFuture<T> future = CompletableFuture.supplyAsync(() -> {
try {
log.info("Starting async task for response: {}", responseId);
// Execute the task
T result = task.get();
// Update tracking with final result
if (result != null) {
result.setId(responseId);
trackResponse(responseId, result);
log.info("Async task completed successfully for response: {}", responseId);
} else {
// Handle null result
log.error("Async task returned null result for response: {}", responseId);
// Update the initial response to show an error
ChatResponse errorResponse = getResponse(responseId).orElse(initialResponse);
errorResponse.updateOrAddProperty("error", "Task returned null result");
errorResponse.setCompleted(true);
errorResponse.setPercentComplete(100);
// Update tracking
trackResponse(responseId, errorResponse);
// Return the error response cast to expected type
@SuppressWarnings("unchecked")
T typedErrorResponse = (T) errorResponse;
return typedErrorResponse;
}
return result;
} catch (Exception e) {
// Handle exceptions
log.error("Error executing async task for response: {}", responseId, e);
// Update the initial response to show an error
ChatResponse errorResponse = getResponse(responseId).orElse(initialResponse);
errorResponse.updateOrAddProperty("error", e.getMessage());
errorResponse.setCompleted(true);
errorResponse.setPercentComplete(100);
// Update tracking
trackResponse(responseId, errorResponse);
// Return the error response cast to expected type
@SuppressWarnings("unchecked")
T typedErrorResponse = (T) errorResponse;
return typedErrorResponse;
} finally {
// Remove from running tasks when complete
runningTasks.remove(responseId);
}
}, executorService);
// Track the running task for cleanup
runningTasks.put(responseId, future);
return future;
} catch (Exception e) {
log.error("Error setting up async task for response: {}", responseId, e);
// Create error response
ChatResponse errorResponse = initialResponse;
errorResponse.updateOrAddProperty("error", "Failed to start async task: " + e.getMessage());
errorResponse.setCompleted(true);
errorResponse.setPercentComplete(100);
// Update tracking
trackResponse(responseId, errorResponse);
// Return a completed future with the error
@SuppressWarnings("unchecked")
T typedErrorResponse = (T) errorResponse;
return CompletableFuture.completedFuture(typedErrorResponse);
}
}
/**
* Clean up old entries to prevent memory leaks
* Run every 30 minutes
*/
@Scheduled(fixedRate = 30 * 60 * 1000)
public void cleanup() {
try {
long now = System.currentTimeMillis();
long cutoffTime = now - MAX_CACHE_AGE_MS;
int initialSize = responseCache.size();
// Clean up completed responses
responseCache.entrySet().removeIf(entry -> {
ChatResponse response = entry.getValue();
// Remove if it's completed and old
return response.isCompleted() && response.getTimestamp() < cutoffTime;
});
// Log cleanup results
log.info("Response cache cleanup: removed {} entries, {} remaining",
initialSize - responseCache.size(),
responseCache.size());
} catch (Exception e) {
log.error("Error during response cache cleanup", e);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/service
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/service/impl/DefaultChatHistoryService.java
|
package ai.driftkit.chat.framework.service.impl;
import ai.driftkit.chat.framework.model.ChatDomain.ChatMessage;
import ai.driftkit.chat.framework.model.ChatDomain.ChatRequest;
import ai.driftkit.chat.framework.model.ChatDomain.ChatResponse;
import ai.driftkit.chat.framework.repository.ChatMessageRepository;
import ai.driftkit.chat.framework.service.ChatHistoryService;
import ai.driftkit.chat.framework.service.ChatMessageService;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
/**
* Default implementation of ChatHistoryService.
* Service for managing chat history.
* Uses database for persistence with a cache for recently accessed items.
*/
@Slf4j
@Service
@ConditionalOnMissingBean(ChatHistoryService.class)
@RequiredArgsConstructor
public class DefaultChatHistoryService implements ChatHistoryService {
// Cache for all messages (requests, responses) - cleared periodically, DB is source of truth
private final Map<String, ChatMessage> messageCache = new ConcurrentHashMap<>();
// Maximum age for cached items in milliseconds (10 minutes)
private static final long MAX_CACHE_AGE_MS = TimeUnit.MINUTES.toMillis(10);
private final ChatMessageService messageService;
@Autowired(required = false)
private ChatMessageRepository messageRepository;
/**
* Add a request to the history
*/
@Override
public void addRequest(ChatRequest request) {
if (request == null || request.getId() == null) {
log.warn("Cannot add null request or request without ID");
return;
}
try {
// Persist to database
messageService.addRequest(request.getChatId(), request);
// Update cache after successful DB save
messageCache.put(request.getId(), request);
log.debug("Added request {} to chat {}", request.getId(), request.getChatId());
} catch (Exception e) {
log.error("Error adding request to history", e);
}
}
/**
* Add a response to the history
*/
@Override
public void addResponse(ChatResponse response) {
if (response == null || response.getId() == null) {
log.warn("Cannot add null response or response without ID");
return;
}
try {
// Persist to database
messageService.addResponse(response.getChatId(), response);
// Update cache after successful DB save
messageCache.put(response.getId(), response);
log.debug("Added response {} to chat {}", response.getId(), response.getChatId());
} catch (Exception e) {
log.error("Error adding response to history", e);
}
}
/**
* Update an existing response
*/
@Override
public void updateResponse(ChatResponse response) {
if (response == null || response.getId() == null) {
log.warn("Cannot update null response or response without ID");
return;
}
try {
// Update in database
messageService.updateResponse(response);
// Update cache after successful DB update
messageCache.put(response.getId(), response);
log.debug("Updated response {} in chat {}", response.getId(), response.getChatId());
} catch (Exception e) {
log.error("Error updating response in history", e);
}
}
/**
* Get a recent request by ID
*/
@Override
public ChatRequest getRequest(String requestId) {
if (requestId == null) {
return null;
}
// Check cache first
ChatMessage message = messageCache.get(requestId);
if (message instanceof ChatRequest) {
return (ChatRequest) message;
}
// If not in cache, query from database
try {
message = messageRepository.findById(requestId).orElse(null);
if (message instanceof ChatRequest) {
// Update cache
messageCache.put(requestId, message);
return (ChatRequest) message;
}
} catch (Exception e) {
log.error("Error getting request from database", e);
}
return null;
}
/**
* Get a recent response by ID
*/
@Override
public ChatResponse getResponse(String responseId) {
if (responseId == null) {
return null;
}
// Check cache first
ChatMessage message = messageCache.get(responseId);
if (message instanceof ChatResponse) {
return (ChatResponse) message;
}
// If not in cache, query from database
try {
message = messageRepository.findById(responseId).orElse(null);
if (message instanceof ChatResponse) {
// Update cache
messageCache.put(responseId, message);
return (ChatResponse) message;
}
} catch (Exception e) {
log.error("Error getting response from database", e);
}
return null;
}
/**
* Get a message by ID
*/
@Override
public ChatMessage getMessage(String messageId) {
if (messageId == null) {
return null;
}
// Check cache first
ChatMessage message = messageCache.get(messageId);
if (message != null) {
return message;
}
// If not in cache, query from database
try {
message = messageRepository.findById(messageId).orElse(null);
if (message != null) {
// Update cache
messageCache.put(messageId, message);
}
return message;
} catch (Exception e) {
log.error("Error getting message from database", e);
return null;
}
}
/**
* Get all messages for a chat, ordered by timestamp (newest first)
*/
@Override
public Page<ChatMessage> getMessages(String chatId, Pageable pageable) {
if (chatId == null) {
return Page.empty();
}
try {
// Query from database
return messageRepository.findByChatIdOrderByTimestampDesc(chatId, pageable);
} catch (Exception e) {
log.error("Error getting messages for chat {}: {}", chatId, e.getMessage(), e);
return Page.empty();
}
}
/**
* Clean up old entries from cache every hour to prevent memory leaks
*/
@Scheduled(fixedRate = 60 * 60 * 1000) // Run every hour
public void cleanup() {
try {
long now = System.currentTimeMillis();
long cutoffTime = now - MAX_CACHE_AGE_MS;
// Clean up message cache
int messagesBefore = messageCache.size();
messageCache.entrySet().removeIf(entry -> {
ChatMessage message = entry.getValue();
return message.getTimestamp() < cutoffTime;
});
log.info("Cache cleanup complete. Messages: {} -> {}",
messagesBefore, messageCache.size());
} catch (Exception e) {
log.error("Error during cache cleanup", e);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/util/ApplicationContextProvider.java
|
package ai.driftkit.chat.framework.util;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Component;
/**
* Utility class to provide access to Spring Application Context
* from non-Spring managed classes.
*/
@Component
public class ApplicationContextProvider implements ApplicationContextAware {
private static ApplicationContext context;
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
context = applicationContext;
}
/**
* Get the Spring Application Context
* @return The application context
*/
public static ApplicationContext getApplicationContext() {
return context;
}
/**
* Get a bean from the application context
* @param clazz The class of the bean
* @return The bean instance
*/
public static <T> T getBean(Class<T> clazz) {
return context.getBean(clazz);
}
/**
* Get a bean from the application context by name
* @param name The name of the bean
* @return The bean instance
*/
public static Object getBean(String name) {
return context.getBean(name);
}
/**
* Get a bean from the application context by name and class
* @param name The name of the bean
* @param clazz The class of the bean
* @return The bean instance
*/
public static <T> T getBean(String name, Class<T> clazz) {
return context.getBean(name, clazz);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/util/SchemaUtils.java
|
package ai.driftkit.chat.framework.util;
import ai.driftkit.chat.framework.ai.domain.AIFunctionSchema;
import ai.driftkit.chat.framework.ai.domain.AIFunctionSchema.AIFunctionProperty;
import ai.driftkit.chat.framework.ai.domain.AIFunctionSchema.SchemaName;
import ai.driftkit.chat.framework.annotations.SchemaClass;
import ai.driftkit.common.utils.JsonUtils;
import com.fasterxml.jackson.annotation.JsonAlias;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.extern.slf4j.Slf4j;
import java.lang.reflect.Field;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.stream.Collectors;
@Slf4j
public class SchemaUtils {
private static final Map<Class<?>, AIFunctionSchema> schemaCache = new ConcurrentHashMap<>();
private static final Map<Class<?>, List<AIFunctionSchema>> composableSchemaCache = new ConcurrentHashMap<>();
private static final List<AIFunctionSchema> schemasList = new CopyOnWriteArrayList<>();
public static List<AIFunctionSchema> getSchemas() {
List<AIFunctionSchema> allSchemas = new ArrayList<>();
allSchemas.addAll(schemasList);
allSchemas.addAll(schemaCache.values());
composableSchemaCache.values().forEach(allSchemas::addAll);
return allSchemas;
}
public static void addSchema(AIFunctionSchema schema) {
schemasList.add(schema);
}
public static List<AIFunctionSchema> getAllSchemasFromClass(Class<?> schemaClass) {
if (schemaClass == null || schemaClass == void.class) {
return List.of();
}
SchemaClass schemaAnnotation = schemaClass.getAnnotation(SchemaClass.class);
if (schemaAnnotation != null && schemaAnnotation.composable()) {
if (composableSchemaCache.containsKey(schemaClass)) {
return composableSchemaCache.get(schemaClass);
}
List<AIFunctionSchema> composableSchemas = createComposableSchemas(schemaClass, schemaAnnotation);
composableSchemaCache.put(schemaClass, composableSchemas);
return composableSchemas;
} else {
return List.of(getSchemaFromClass(schemaClass));
}
}
private static List<AIFunctionSchema> createComposableSchemas(Class<?> schemaClass, SchemaClass annotation) {
List<AIFunctionSchema> schemas = new ArrayList<>();
AIFunctionSchema baseSchema = AIFunctionSchema.fromClass(schemaClass);
String baseSchemaId = !annotation.id().isEmpty() ? annotation.id() : schemaClass.getSimpleName();
for (AIFunctionProperty property : baseSchema.getProperties()) {
AIFunctionSchema fieldSchema = new AIFunctionSchema(
baseSchemaId + "_" + property.getName(),
List.of(property)
);
fieldSchema.setDescription(property.getDescription());
fieldSchema.setComposable(true);
schemas.add(fieldSchema);
}
return schemas;
}
public static AIFunctionSchema getSchemaFromClass(Class<?> schemaClass) {
if (schemaClass == null || schemaClass == void.class) {
return null;
}
if (schemaCache.containsKey(schemaClass)) {
return schemaCache.get(schemaClass);
}
AIFunctionSchema schema = AIFunctionSchema.fromClass(schemaClass);
SchemaClass annotation = schemaClass.getAnnotation(SchemaClass.class);
if (annotation != null) {
if (!annotation.id().isEmpty()) {
schema.setSchemaName(annotation.id());
}
if (!annotation.description().isEmpty()) {
log.debug("Schema description for {}: {}", schemaClass.getName(), annotation.description());
}
if (annotation.composable()) {
schema.setComposable(true);
log.debug("Schema {} is marked as composable", schemaClass.getName());
}
}
schemaCache.put(schemaClass, schema);
return schema;
}
public static String getSchemaId(Class<?> schemaClass) {
if (schemaClass == null || schemaClass == void.class) {
return null;
}
SchemaName schemaNameAnnotation = schemaClass.getAnnotation(SchemaName.class);
if (schemaNameAnnotation != null) {
return schemaNameAnnotation.value();
}
SchemaClass annotation = schemaClass.getAnnotation(SchemaClass.class);
if (annotation != null && !annotation.id().isEmpty()) {
return annotation.id();
}
return schemaClass.getSimpleName();
}
public static void clearCache() {
schemaCache.clear();
}
public static <T> T createInstance(Class<T> schemaClass, Map<String, String> properties) {
if (schemaClass == null || schemaClass == void.class) {
return null;
}
try {
T instance = schemaClass.getDeclaredConstructor().newInstance();
if (properties != null && !properties.isEmpty()) {
for (Field field : schemaClass.getDeclaredFields()) {
field.setAccessible(true);
String propertyName = field.getName();
JsonAlias jsonAlias = field.getAnnotation(JsonAlias.class);
if (jsonAlias != null) {
String matchedProperty = null;
String matchedValue = null;
if (properties.containsKey(propertyName)) {
matchedProperty = propertyName;
matchedValue = properties.get(propertyName);
} else {
for (String alias : jsonAlias.value()) {
if (properties.containsKey(alias)) {
matchedProperty = alias;
matchedValue = properties.get(alias);
log.debug("Found property via JsonAlias: {} -> {} for field {}",
alias, matchedValue, propertyName);
break;
}
}
}
if (matchedProperty != null) {
setFieldValue(field, instance, matchedValue);
}
} else if (properties.containsKey(propertyName)) {
String propertyValue = properties.get(propertyName);
setFieldValue(field, instance, propertyValue);
}
}
}
return instance;
} catch (Exception e) {
log.error("Error creating instance of {}: {}", schemaClass.getName(), e.getMessage(), e);
return null;
}
}
public static Map<String, String> extractProperties(Object object) {
if (object == null) {
return Map.of();
}
Map<String, String> properties = new HashMap<>();
try {
// Get all fields including inherited ones
List<Field> allFields = getAllFields(object.getClass());
for (Field field : allFields) {
field.setAccessible(true);
String propertyName = field.getName();
Object value = field.get(object);
if (value != null) {
if (value instanceof String || value instanceof Number || value instanceof Boolean || value instanceof Enum) {
properties.put(propertyName, value.toString());
} else if (value instanceof Collection || value instanceof Map || value.getClass().isArray()) {
try {
ObjectMapper objectMapper = new ObjectMapper();
properties.put(propertyName, objectMapper.writeValueAsString(value));
} catch (Exception e) {
log.warn("Error serializing collection field {}: {}", propertyName, e.getMessage());
properties.put(propertyName, value.toString());
}
} else {
try {
ObjectMapper objectMapper = new ObjectMapper();
properties.put(propertyName, objectMapper.writeValueAsString(value));
} catch (Exception e) {
log.warn("Error serializing complex field {}: {}", propertyName, e.getMessage());
properties.put(propertyName, value.toString());
}
}
}
}
} catch (Exception e) {
log.error("Error extracting properties from {}: {}",
object.getClass().getName(), e.getMessage(), e);
}
return properties;
}
public static Map<String, String> combineComposableSchemaData(
Class<?> schemaClass,
Map<String, String> existingProperties,
Map<String, String> newProperties,
String schemaId) {
if (schemaClass == null) {
return newProperties;
}
SchemaClass annotation = schemaClass.getAnnotation(SchemaClass.class);
if (annotation == null || !annotation.composable()) {
return newProperties;
}
Map<String, String> combinedProperties = existingProperties != null
? new LinkedHashMap<>(existingProperties)
: new LinkedHashMap<>();
if (newProperties != null) {
combinedProperties.putAll(newProperties);
}
AIFunctionSchema schema = getSchemaFromClass(schemaClass);
List<String> requiredFields = schema.getProperties().stream()
.filter(AIFunctionProperty::isRequired)
.map(AIFunctionProperty::getName)
.collect(Collectors.toList());
boolean isComplete = requiredFields.stream().allMatch(combinedProperties::containsKey);
log.debug("Composable schema data for {}: combined={}, required={}, isComplete={}",
schemaId, combinedProperties.keySet(), requiredFields, isComplete);
return isComplete ? combinedProperties : null;
}
/**
* Helper method to get all fields from a class including inherited fields
*/
private static List<Field> getAllFields(Class<?> clazz) {
List<Field> fields = new ArrayList<>();
while (clazz != null && clazz != Object.class) {
fields.addAll(Arrays.asList(clazz.getDeclaredFields()));
clazz = clazz.getSuperclass();
}
return fields;
}
private static void setFieldValue(Field field, Object instance, String value) {
try {
Class<?> fieldType = field.getType();
if (fieldType == String.class) {
field.set(instance, value);
} else if (fieldType == int.class || fieldType == Integer.class) {
field.set(instance, Integer.parseInt(value));
} else if (fieldType == long.class || fieldType == Long.class) {
field.set(instance, Long.parseLong(value));
} else if (fieldType == double.class || fieldType == Double.class) {
field.set(instance, Double.parseDouble(value));
} else if (fieldType == float.class || fieldType == Float.class) {
field.set(instance, Float.parseFloat(value));
} else if (fieldType == boolean.class || fieldType == Boolean.class) {
field.set(instance, Boolean.parseBoolean(value));
} else if (fieldType.isEnum()) {
@SuppressWarnings("unchecked")
Enum<?> enumValue = Enum.valueOf(fieldType.asSubclass(Enum.class), value);
field.set(instance, enumValue);
} else if (List.class.isAssignableFrom(fieldType)) {
if (value.startsWith("[") && value.endsWith("]")) {
String trimmedValue = value.substring(1, value.length() - 1);
String[] items = trimmedValue.split(",");
List<String> list = new ArrayList<>();
for (String item : items) {
list.add(item.trim());
}
field.set(instance, list);
}
} else if (Map.class.isAssignableFrom(fieldType)) {
if (value.startsWith("{") && value.endsWith("}")) {
Map<String, String> map = new HashMap<>();
String trimmedValue = value.substring(1, value.length() - 1);
String[] pairs = trimmedValue.split(",");
for (String pair : pairs) {
String[] keyValue = pair.split(":");
if (keyValue.length == 2) {
map.put(keyValue[0].trim(), keyValue[1].trim());
}
}
field.set(instance, map);
}
} else if (JsonUtils.isJSON(value)) {
// If value is JSON and field type is an Object (custom class), try to parse it
try {
Object parsedValue = JsonUtils.fromJson(value, fieldType);
field.set(instance, parsedValue);
} catch (Exception e) {
log.warn("Failed to parse JSON value for field {} of type {}: {}", field.getName(), fieldType.getName(), e.getMessage());
}
} else {
log.warn("Unsupported field type: {}", fieldType.getName());
}
} catch (Exception e) {
log.error("Error setting field {}: {}", field.getName(), e.getMessage(), e);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/workflow/AnnotatedWorkflow.java
|
package ai.driftkit.chat.framework.workflow;
import ai.driftkit.chat.framework.ai.client.AiClient;
import ai.driftkit.chat.framework.ai.domain.AIFunctionSchema;
import ai.driftkit.chat.framework.ai.domain.AIFunctionSchema.SchemaName;
import ai.driftkit.chat.framework.ai.utils.AIUtils;
import ai.driftkit.chat.framework.model.ChatDomain.ChatRequest;
import ai.driftkit.chat.framework.model.ChatDomain.ChatResponse;
import ai.driftkit.chat.framework.model.ChatDomain.ChatResponse.NextSchema;
import ai.driftkit.chat.framework.model.ChatDomain.MessageType;
import ai.driftkit.chat.framework.util.SchemaUtils;
import ai.driftkit.chat.framework.annotations.SchemaClass;
import ai.driftkit.chat.framework.annotations.AsyncStep;
import ai.driftkit.chat.framework.annotations.WorkflowStep;
import ai.driftkit.chat.framework.events.AsyncTaskEvent;
import ai.driftkit.chat.framework.events.StepEvent;
import ai.driftkit.chat.framework.events.WorkflowTransitionEvent;
import ai.driftkit.chat.framework.model.StepDefinition;
import ai.driftkit.chat.framework.model.WorkflowContext;
import ai.driftkit.chat.framework.service.AsyncResponseTracker;
import ai.driftkit.chat.framework.service.ChatHistoryService;
import ai.driftkit.chat.framework.service.ChatMessageService;
import ai.driftkit.chat.framework.repository.WorkflowContextRepository;
import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.expression.Expression;
import org.springframework.expression.ExpressionParser;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import org.springframework.expression.spel.support.StandardEvaluationContext;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StringUtils;
import java.lang.reflect.Method;
import java.lang.reflect.Parameter;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Supplier;
import java.util.stream.Collectors;
/**
* Base class for workflows that use annotations to define steps.
* This class handles the automatic discovery and registration of workflow steps
* based on annotations. It implements the ChatWorkflow interface for integration
* with the chat assistant framework.
*/
@Slf4j
public abstract class AnnotatedWorkflow implements ChatWorkflow {
@Autowired
protected AiClient aiClient;
@Autowired
protected AsyncResponseTracker asyncResponseTracker;
@Autowired
protected ChatHistoryService historyService;
@Autowired
protected ChatMessageService messageService;
@Autowired
protected WorkflowContextRepository workflowContextRepository;
// Expression parser for conditions
private final ExpressionParser expressionParser = new SpelExpressionParser();
// Cache for step methods
private final Map<String, StepMetadata> stepMetadata = new ConcurrentHashMap<>();
private final Map<String, AsyncStepMetadata> asyncStepMetadata = new ConcurrentHashMap<>();
private final Map<String, Method> asyncStepMethods = new ConcurrentHashMap<>(); // Kept for backward compatibility
private List<StepDefinition> stepDefinitions = new ArrayList<>();
// Cache for schemas automatically generated from classes
private final Map<Class<?>, AIFunctionSchema> schemaCache = new ConcurrentHashMap<>();
private final Map<String, AIFunctionSchema> schemaIdCache = new ConcurrentHashMap<>();
// Static class to store metadata about steps
private static class StepMetadata {
Method method;
WorkflowStep annotation;
List<Class<?>> inputClasses = new ArrayList<>();
List<Class<?>> outputClasses = new ArrayList<>();
List<Class<?>> nextClasses = new ArrayList<>();
List<Parameter> parameters;
}
// Static class to store metadata about async steps
private static class AsyncStepMetadata {
Method method;
AsyncStep annotation;
List<Class<?>> inputClasses = new ArrayList<>();
List<Class<?>> outputClasses = new ArrayList<>();
List<Parameter> parameters;
}
public AnnotatedWorkflow() {
// Discover and register steps
discoverSteps();
// Register this workflow in the registry
WorkflowRegistry.registerWorkflow(this);
}
/**
* The ID of this workflow.
*/
public abstract String getWorkflowId();
/**
* Whether this workflow can handle a message based on its content and properties.
*/
public abstract boolean canHandle(String message, Map<String, String> properties);
/**
* Process a chat request and return a response.
*/
public ChatResponse processChat(ChatRequest request) {
try {
// Get or create a session for this chat from the central registry
WorkflowContext session = WorkflowRegistry.getOrCreateSession(request.getChatId(), this);
// Initialize a new response
String responseId = asyncResponseTracker.generateResponseId();
// Store the current request in the session
Map<String, String> properties = new HashMap<>(request.getPropertiesMap());
// Process the chat request based on current session state
switch (session.getState()) {
case NEW:
// Start the workflow from the first step
return startWorkflow(session, request, responseId, properties);
case PROCESSING:
case WAITING_FOR_USER_INPUT:
// Continue the workflow with user input
return continueWorkflow(session, request, responseId, properties);
case COMPLETED:
// Restart the workflow
return startWorkflow(session, request, responseId, properties);
default:
throw new IllegalStateException("Unknown session state: " + session.getState());
}
} catch (Exception e) {
log.error("Error processing chat request", e);
throw new RuntimeException("Failed to process chat request", e);
}
}
/**
* Get all steps defined in this workflow.
*/
public List<StepDefinition> getStepDefinitions() {
return Collections.unmodifiableList(stepDefinitions);
}
protected void saveContext(WorkflowContext session) {
workflowContextRepository.saveOrUpdate(session);
}
protected Optional<WorkflowContext> getContext(String sessionId) {
return workflowContextRepository.findById(sessionId);
}
/**
* Create an error response.
*/
protected ChatResponse createErrorResponse(WorkflowContext session, String errorMessage) {
// Ensure session has a response ID
if (session.getCurrentResponseId() == null) {
session.setCurrentResponseId(asyncResponseTracker.generateResponseId());
}
return ChatResponse.fromSessionWithError(session, getWorkflowId(), errorMessage);
}
// Private methods
/**
* Start the workflow from the first step.
*/
private ChatResponse startWorkflow(WorkflowContext session, ChatRequest request,
String responseId, Map<String, String> properties) {
// Reset session state
session.setCurrentStepId(getFirstStepId());
session.setCurrentResponseId(responseId);
// Add properties to session context
if (properties != null && !properties.isEmpty()) {
session.getProperties().putAll(properties);
}
return executeStep(session, request, properties);
}
@Nullable
private ChatResponse executeStep(WorkflowContext session, ChatRequest request, Map<String, String> properties) {
if (request.getId() == null) {
request.setId(UUID.randomUUID().toString());
}
session.setState(WorkflowContext.WorkflowSessionState.PROCESSING);
if (session.getCurrentStepId() == null) {
session.setCurrentStepId(getFirstStepId());
}
if (request.getRequestSchemaName() == null) {
request.setRequestSchemaName(session.getCurrentStepId());
} else {
session.setCurrentStepId(request.getRequestSchemaName());
}
// Handle composable schemas - when frontend sends data field by field
String requestSchemaName = request.getRequestSchemaName();
// Check if this is a field from a composable schema
if (requestSchemaName != null && requestSchemaName.contains("_")) {
// Extract base schema name (e.g., "checkInfo" from "checkInfo_message")
String baseSchemaName = requestSchemaName.substring(0, requestSchemaName.indexOf("_"));
// Check if we have a step with this base schema name
StepMetadata stepMetadata = this.stepMetadata.get(baseSchemaName);
if (stepMetadata != null && !stepMetadata.inputClasses.isEmpty()) {
// This is a composable schema field request
Class<?> schemaClass = stepMetadata.inputClasses.get(0);
// Check if this is a composable schema
SchemaClass schemaAnnotation = schemaClass.getAnnotation(SchemaClass.class);
if (schemaAnnotation != null && schemaAnnotation.composable()) {
log.debug("Detected composable schema field: {} for base schema: {}",
requestSchemaName, baseSchemaName);
// Get existing partial data from the session
@SuppressWarnings("unchecked")
Map<String, String> existingData = session.getContextValue(
"composable_" + baseSchemaName, Map.class);
// Combine with the new data
Map<String, String> combinedData = SchemaUtils.combineComposableSchemaData(
schemaClass, existingData, properties, baseSchemaName);
// Store updated data back in the session
session.setContextValue("composable_" + baseSchemaName,
combinedData != null ? combinedData : (existingData != null ? existingData : new HashMap<>(properties)));
if (combinedData == null) {
// Not all required fields are present yet, return a response asking for the next field
// Find the next field schema
List<AIFunctionSchema> fieldSchemas = SchemaUtils.getAllSchemasFromClass(schemaClass);
// Skip fields we already have data for
Map<String, String> currentData = session.getContextValue(
"composable_" + baseSchemaName, Map.class);
if (currentData != null) {
String nextFieldSchema = fieldSchemas.stream()
.filter(schema -> {
// Extract field name from schema name (e.g., "message" from "checkInfo_message")
String fieldName = schema.getSchemaName().substring(schema.getSchemaName().indexOf("_") + 1);
return !currentData.containsKey(fieldName);
})
.findFirst()
.map(AIFunctionSchema::getSchemaName)
.orElse(null);
if (nextFieldSchema != null) {
ChatResponse response = ChatResponse.fromSession(
session,
session.getWorkflowId(),
Map.of("messageId", "composable_schema_fields_in_progress")
);
// Save the user message to history
saveUserMessage(session.getContextId(), request);
// Set state to waiting for more input
session.setState(WorkflowContext.WorkflowSessionState.WAITING_FOR_USER_INPUT);
WorkflowRegistry.saveSession(session);
return response;
}
}
} else {
// All required fields are present, continue with the full data
// Replace the current properties with the combined data for processing
properties = combinedData;
// Set the correct schema name for the step
request.setRequestSchemaName(baseSchemaName);
session.setCurrentStepId(baseSchemaName);
// Clear the composable data from the session
session.setContextValue("composable_" + baseSchemaName, null);
}
}
}
}
Optional<StepDefinition> stepDefinition = stepDefinitions.stream()
.filter(e -> e.getId().equals(requestSchemaName))
.findAny();
if (stepDefinition.isPresent()) {
List<AIFunctionSchema> inputSchemas = stepDefinition.get().getInputSchemas();
if (CollectionUtils.isNotEmpty(inputSchemas)) {
request.setComposable(inputSchemas.getFirst().isComposable());
request.fillCurrentSchema(inputSchemas);
}
}
// Execute the step
ChatResponse response = executeStep(session, properties);
String currentStepId = Optional.ofNullable(response.getNextSchema()).map(NextSchema::getSchemaName).orElse(session.getCurrentStepId());
session.setCurrentStepId(currentStepId);
// Save the user message to history
saveUserMessage(session.getContextId(), request);
// Save the updated session
WorkflowRegistry.saveSession(session);
return response;
}
/**
* Continue the workflow with user input.
*/
private ChatResponse continueWorkflow(WorkflowContext session, ChatRequest request,
String responseId, Map<String, String> properties) {
session.setCurrentResponseId(responseId);
return executeStep(session, request, properties);
}
/**
* Execute a step and return the response.
*/
private ChatResponse executeStep(WorkflowContext session, Map<String, String> properties) {
try {
// Get the current step ID
String stepId = session.getCurrentStepId();
if (stepId == null) {
throw new IllegalStateException("No current step ID in session");
}
// Get the step metadata
StepMetadata metadata = stepMetadata.get(stepId);
if (metadata == null) {
throw new IllegalStateException("No metadata found for step: " + stepId);
}
// Prepare arguments for the method
Object[] args = prepareMethodArguments(metadata, properties, session);
// Invoke the step method
StepEvent result = (StepEvent) metadata.method.invoke(this, args);
// Store any output data in the session
if (result.getProperties() != null) {
session.putAll(result.getProperties());
}
// Process the result
if (result instanceof AsyncTaskEvent) {
// Handle async task
return handleAsyncTask(session, (AsyncTaskEvent) result);
} else {
// Determine the next step based on conditions and flow rules
String nextStepId = determineNextStep(metadata.annotation, result, session);
if (nextStepId != null) {
result.setNextStepId(nextStepId);
}
// Handle normal step result
return handleStepResult(session, result);
}
} catch (Exception e) {
log.error("Error executing step", e);
return createErrorResponse(session, "Error executing step: " + e.getMessage());
}
}
/**
* Prepare arguments for a step method based on its parameter types.
* Automatically converts properties to input objects as needed.
*/
private Object[] prepareMethodArguments(StepMetadata metadata, Map<String, String> properties, WorkflowContext session) {
List<Object> args = new ArrayList<>();
for (Parameter param : metadata.parameters) {
Class<?> paramType = param.getType();
if (Map.class.isAssignableFrom(paramType)) {
// For Map parameters, pass the properties directly
args.add(properties);
} else if (WorkflowContext.class.isAssignableFrom(paramType)) {
// For WorkflowSession parameters, pass the session
args.add(session);
} else {
// Check if the parameter type matches any of the input classes
boolean matched = false;
for (Class<?> inputClass : metadata.inputClasses) {
if (inputClass.equals(paramType)) {
// Convert properties to an instance of the input class
Object inputObject = createSchemaInstance(inputClass, properties);
args.add(inputObject);
matched = true;
break;
}
}
if (!matched) {
// For other parameter types, try to find a matching property
String paramName = param.getName();
if (properties.containsKey(paramName)) {
args.add(convertValue(properties.get(paramName), paramType));
} else {
// If no matching property, pass null
args.add(null);
}
}
}
}
return args.toArray();
}
/**
* Convert a string value to the specified type.
*/
private Object convertValue(String value, Class<?> targetType) {
if (value == null) {
return null;
}
if (String.class.equals(targetType)) {
return value;
} else if (Integer.class.equals(targetType) || int.class.equals(targetType)) {
return Integer.parseInt(value);
} else if (Long.class.equals(targetType) || long.class.equals(targetType)) {
return Long.parseLong(value);
} else if (Boolean.class.equals(targetType) || boolean.class.equals(targetType)) {
return Boolean.parseBoolean(value);
} else if (Double.class.equals(targetType) || double.class.equals(targetType)) {
return Double.parseDouble(value);
} else if (Float.class.equals(targetType) || float.class.equals(targetType)) {
return Float.parseFloat(value);
} else if (targetType.isEnum()) {
return Enum.valueOf((Class<Enum>) targetType, value);
} else {
try {
// For complex types, try to use Jackson
return AIUtils.OBJECT_MAPPER.readValue(value, targetType);
} catch (Exception e) {
log.warn("Failed to convert value '{}' to type {}: {}", value, targetType.getName(), e.getMessage());
return null;
}
}
}
/**
* Prepare arguments for an async step method based on its parameter types.
* Similar to prepareMethodArguments but for async steps.
*/
private Object[] prepareAsyncMethodArguments(AsyncStepMetadata metadata, Map<String, Object> properties, WorkflowContext session) {
List<Object> args = new ArrayList<>();
for (Parameter param : metadata.parameters) {
Class<?> paramType = param.getType();
if (Map.class.isAssignableFrom(paramType)) {
// For Map parameters, pass the properties directly
args.add(properties);
} else if (WorkflowContext.class.isAssignableFrom(paramType)) {
// For WorkflowSession parameters, pass the session
args.add(session);
} else {
// For other parameter types, try to find a matching property
String paramName = param.getName();
if (properties.containsKey(paramName)) {
args.add(properties.get(paramName));
} else {
// If no matching property, pass null
args.add(null);
}
}
}
return args.toArray();
}
/**
* Determine the next step based on conditions and flow rules.
*/
private String determineNextStep(WorkflowStep annotation, StepEvent result, WorkflowContext session) {
// If result has a next step ID, use that
if (result.getNextStepId() != null) {
return result.getNextStepId();
}
// Check if there's a condition in the annotation
if (StringUtils.hasText(annotation.condition())) {
boolean conditionResult = evaluateCondition(annotation.condition(), result, session);
if (conditionResult && StringUtils.hasText(annotation.onTrue())) {
return annotation.onTrue();
} else if (!conditionResult && StringUtils.hasText(annotation.onFalse())) {
return annotation.onFalse();
}
}
if (result.getNextInputSchema() != null) {
return result.getNextInputSchema().getSchemaName();
}
// Check if there are next steps defined in the annotation
if (annotation.nextSteps().length > 0) {
return annotation.nextSteps()[0];
}
// Otherwise, let the framework determine the next step
return null;
}
/**
* Evaluate a condition expression.
*/
private boolean evaluateCondition(String conditionExpression, StepEvent stepEvent, WorkflowContext session) {
try {
Expression expression = expressionParser.parseExpression(conditionExpression);
StandardEvaluationContext context = new StandardEvaluationContext();
// Add step event to context
context.setVariable("event", stepEvent);
// Add properties to context
if (stepEvent.getProperties() != null) {
for (Map.Entry<String, String> entry : stepEvent.getProperties().entrySet()) {
context.setVariable(entry.getKey(), entry.getValue());
}
}
// Add session properties to context
for (Map.Entry<String, String> entry : new HashMap<>(session.getProperties()).entrySet()) {
context.setVariable(entry.getKey(), entry.getValue());
}
// Add session to context
context.setVariable("session", session);
// Evaluate the expression
Object result = expression.getValue(context);
if (result instanceof Boolean) {
return (Boolean) result;
} else {
log.warn("Condition expression '{}' did not evaluate to a boolean: {}", conditionExpression, result);
return false;
}
} catch (Exception e) {
log.error("Error evaluating condition expression '{}': {}", conditionExpression, e.getMessage());
return false;
}
}
/**
* Handle an asynchronous task.
*/
private ChatResponse handleAsyncTask(WorkflowContext session, AsyncTaskEvent event) {
try {
// Register the response for tracking
String responseId = session.getCurrentResponseId();
// Create the response using the helper method
ChatResponse response = ChatResponse.fromSession(
session,
getWorkflowId(),
event.getProperties()
);
response.fillCurrentSchema(event.getCurrentSchema());
// Add next schema if available
if (event.getNextInputSchema() != null) {
response.setNextSchemaAsSchema(event.getNextInputSchema());
}
// Set completion status and required flag
response.setCompleted(event.isCompleted());
response.setPercentComplete(event.getPercentComplete());
response.setRequired(event.isRequired());
// Register for tracking
asyncResponseTracker.trackResponse(responseId, response);
// Find the async step metadata
AsyncStepMetadata asyncMetadata = getAsyncStepMetadata(event);
// Create a supplier for the async task
Supplier<ChatResponse> asyncTask = () -> {
try {
// Prepare arguments for the async method based on parameter types
Map<String, Object> stringProps = new HashMap<>();
if (event.getTaskArgs() != null) {
for (Map.Entry<String, Object> entry : event.getTaskArgs().entrySet()) {
if (entry.getValue() != null) {
stringProps.put(entry.getKey(), entry.getValue());
}
}
}
// Use the new prepareMethodArguments for async methods
Object[] asyncArgs = prepareAsyncMethodArguments(asyncMetadata, stringProps, session);
// Invoke the async method
StepEvent asyncResult = (StepEvent) asyncMetadata.method.invoke(this, asyncArgs);
// Update the response with the result
if (asyncResult != null) {
response.setCompleted(asyncResult.isCompleted());
response.setPercentComplete(asyncResult.getPercentComplete());
response.setRequired(asyncResult.isRequired());
if (asyncResult.getProperties() != null) {
response.setPropertiesMap(asyncResult.getProperties());
// Store result properties in session
session.putAll(asyncResult.getProperties());
}
}
// Update the response with any changes from the asyncResult
if (asyncResult.getNextInputSchema() != null) {
// Make sure the nextInputSchema is properly set in the response
response.setNextSchemaAsSchema(asyncResult.getNextInputSchema());
log.debug("Setting next request schema from asyncResult: {}",
asyncResult.getNextInputSchema().getSchemaName());
}
// Update the tracked response
asyncResponseTracker.updateResponseStatus(responseId, response);
// Update session state if completed
if (asyncResult.isCompleted()) {
// Move to the next step if specified
if (asyncResult.getNextStepId() != null) {
session.setCurrentStepId(asyncResult.getNextStepId());
}
// Set session state
session.setState(WorkflowContext.WorkflowSessionState.WAITING_FOR_USER_INPUT);
// Save the updated session
WorkflowRegistry.saveSession(session);
// IMPORTANT: Also explicitly update the completed response in history
// This ensures history will show the completed status (100%)
historyService.updateResponse(response);
log.debug("Explicitly updated completed async response in history: {}", responseId);
}
} catch (Exception e) {
log.error("Error executing async task", e);
// Update response with error
Map<String, String> errorProps = new HashMap<>();
errorProps.put("error", "Error executing async task: " + e.getMessage());
response.setPropertiesMap(errorProps);
response.setCompleted(true);
// Preserve next request schema if it was set in AsyncTaskEvent
if (event.getNextInputSchema() != null) {
response.setNextSchemaAsSchema(event.getNextInputSchema());
log.debug("Preserving next request schema in error case: {}",
event.getNextInputSchema().getSchemaName());
}
// Update tracker
asyncResponseTracker.updateResponseStatus(responseId, response);
// IMPORTANT: Also explicitly update the error response in history
// This ensures history will show the completed status (100%)
historyService.updateResponse(response);
log.debug("Explicitly updated error async response in history: {}", responseId);
}
// Return the updated response
return response;
};
// Execute the async task with proper tracking
asyncResponseTracker.executeAsync(responseId, response, asyncTask);
// Set session state
session.setState(WorkflowContext.WorkflowSessionState.WAITING_FOR_USER_INPUT);
// Save the updated session
WorkflowRegistry.saveSession(session);
// Return the initial response
return response;
} catch (Exception e) {
log.error("Error handling async task", e);
return createErrorResponse(session, "Error handling async task: " + e.getMessage());
}
}
@NotNull
private AsyncStepMetadata getAsyncStepMetadata(AsyncTaskEvent event) {
AsyncStepMetadata asyncMetadata = asyncStepMetadata.get(event.getTaskName());
if (asyncMetadata == null) {
// Fall back to legacy method lookup if no metadata found
Method asyncMethod = asyncStepMethods.get(event.getTaskName());
if (asyncMethod == null) {
throw new IllegalStateException("No async method found for task: " + event.getTaskName());
}
// Create minimal metadata for backward compatibility
asyncMetadata = new AsyncStepMetadata();
asyncMetadata.method = asyncMethod;
asyncMetadata.parameters = Arrays.asList(asyncMethod.getParameters());
}
return asyncMetadata;
}
/**
* Handle a normal step result.
*/
private ChatResponse handleStepResult(WorkflowContext session, StepEvent event) {
try {
// Check if this is a workflow transition event
if (event instanceof WorkflowTransitionEvent) {
WorkflowTransitionEvent transitionEvent = (WorkflowTransitionEvent) event;
// Log the transition
log.info("Workflow transition requested from {} to {}",
transitionEvent.getSourceWorkflowId(),
transitionEvent.getTargetWorkflowId());
// Get the target workflow to find the appropriate step
Optional<AnnotatedWorkflow> targetWorkflow = WorkflowRegistry.getWorkflow(transitionEvent.getTargetWorkflowId());
if (targetWorkflow.isEmpty()) {
throw new IllegalStateException("Target workflow not found: " + transitionEvent.getTargetWorkflowId());
}
// Find the target step
StepDefinition targetStep = null;
if (transitionEvent.getTargetStepId() != null) {
// Find step by ID
targetStep = targetWorkflow.get().getStepDefinitions().stream()
.filter(step -> transitionEvent.getTargetStepId().equals(step.getId()))
.findFirst()
.orElseThrow(() -> new IllegalStateException(
"Target step not found: " + transitionEvent.getTargetStepId() +
" in workflow " + transitionEvent.getTargetWorkflowId()));
} else {
// Default to first step with index 0
List<StepDefinition> targetSteps = targetWorkflow.get().getStepDefinitions();
targetStep = targetSteps.stream()
.filter(step -> step.getIndex() == 0)
.findFirst()
.orElse(targetSteps.isEmpty() ? null : targetSteps.get(0));
}
if (targetStep == null) {
throw new IllegalStateException("No suitable target step found in workflow " +
transitionEvent.getTargetWorkflowId());
}
// Create response with transition properties
ChatResponse response = ChatResponse.fromSession(
session,
getWorkflowId(),
event.getProperties()
);
// Set the next schema to the target workflow's step
if (!targetStep.getInputSchemas().isEmpty()) {
response.setNextSchemaAsSchema(targetStep.getInputSchemas().get(0));
}
return response;
}
// Update session state based on result
if (event.getNextStepId() != null) {
session.setCurrentStepId(event.getNextStepId());
} else {
// Move to the next step in the flow if no specific next step is specified
moveToNextStep(session);
}
// Set session state
session.setState(WorkflowContext.WorkflowSessionState.WAITING_FOR_USER_INPUT);
// Save the updated session
WorkflowRegistry.saveSession(session);
// Create response using the helper method
ChatResponse response = ChatResponse.fromSession(
session,
getWorkflowId(),
event.getProperties()
);
// Set required flag from StepEvent
response.setRequired(event.isRequired());
response.fillCurrentSchema(event.getCurrentSchema());
// Add next schema if available
if (event.getNextInputSchema() != null) {
response.setNextSchemaAsSchema(event.getNextInputSchema());
}
return response;
} catch (Exception e) {
log.error("Error handling step result", e);
return createErrorResponse(session, "Error handling step result: " + e.getMessage());
}
}
/**
* Move to the next step in the workflow.
*/
private void moveToNextStep(WorkflowContext session) {
String currentStepId = session.getCurrentStepId();
int currentIndex = -1;
// Find the current step index
for (int i = 0; i < stepDefinitions.size(); i++) {
if (stepDefinitions.get(i).getId().equals(currentStepId)) {
currentIndex = i;
break;
}
}
// Move to the next step if found
if (currentIndex >= 0 && currentIndex < stepDefinitions.size() - 1) {
String nextStepId = stepDefinitions.get(currentIndex + 1).getId();
session.setCurrentStepId(nextStepId);
} else {
// If no next step, mark as completed
session.setState(WorkflowContext.WorkflowSessionState.COMPLETED);
}
}
/**
* Get the first step ID in the workflow.
*/
private String getFirstStepId() {
if (stepDefinitions.isEmpty()) {
throw new IllegalStateException("No steps defined in workflow");
}
return stepDefinitions.get(0).getId();
}
/**
* Save a user message to history.
*/
private void saveUserMessage(String chatId, ChatRequest request) {
historyService.addRequest(request);
}
/**
* Discover and register steps from annotated methods.
*/
private void discoverSteps() {
log.info("Discovering steps for workflow: {}", getWorkflowId());
// Scan for @WorkflowStep annotations
Method[] methods = this.getClass().getMethods();
for (Method method : methods) {
// Process @WorkflowStep annotations
WorkflowStep stepAnnotation = method.getAnnotation(WorkflowStep.class);
if (stepAnnotation != null) {
registerStepMethod(method, stepAnnotation);
}
// Process @AsyncStep annotations
AsyncStep asyncAnnotation = method.getAnnotation(AsyncStep.class);
if (asyncAnnotation != null) {
registerAsyncStepMethod(method, asyncAnnotation);
}
}
// Sort step definitions by their order in the class
List<StepDefinition> steps = stepDefinitions.stream()
.sorted(Comparator.comparing(StepDefinition::getIndex))
.collect(Collectors.toList());
this.stepDefinitions = steps;
log.info("Discovered {} steps and {} async steps in workflow: {}",
stepMetadata.size(), asyncStepMethods.size(), getWorkflowId());
}
/**
* Register a step method.
*/
private void registerStepMethod(Method method, WorkflowStep annotation) {
// Process input classes first to derive stepId if needed
StepMetadata metadata = new StepMetadata();
metadata.method = method;
metadata.annotation = annotation;
metadata.parameters = Arrays.asList(method.getParameters());
// Process input classes - first try explicitly defined classes
if (annotation.inputClasses().length > 0) {
// Use explicitly defined multiple input classes
metadata.inputClasses.addAll(Arrays.asList(annotation.inputClasses()));
} else if (annotation.inputClass() != void.class) {
// Use single input class
metadata.inputClasses.add(annotation.inputClass());
} else {
// Auto-discover from method parameters
for (Parameter param : metadata.parameters) {
Class<?> paramType = param.getType();
// Skip framework parameter types
if (Map.class.isAssignableFrom(paramType) || WorkflowContext.class.isAssignableFrom(paramType)) {
continue;
}
// Skip classes without schema annotations
if (!paramType.isAnnotationPresent(SchemaClass.class) && !paramType.isAnnotationPresent(SchemaName.class)) {
continue;
}
// Found a valid schema class parameter
metadata.inputClasses.add(paramType);
log.debug("Auto-discovered input class from parameter: {}", paramType.getName());
break; // Only use the first valid parameter
}
}
// Determine the step ID with the following priority:
// 1. Explicit ID from annotation
// 2. Name of the inputClass if available
// 3. Method name as fallback
String stepId;
if (!annotation.id().isEmpty()) {
// Explicit ID in annotation
stepId = annotation.id();
} else if (!metadata.inputClasses.isEmpty()) {
// Use the first input class name
Class<?> inputClass = metadata.inputClasses.get(0);
// Get name from schema ID first, or class simple name as fallback
stepId = SchemaUtils.getSchemaId(inputClass);
} else {
// Fallback to method name
stepId = method.getName();
}
log.debug("Registering step: {} ({})", stepId, method.getName());
// Process output classes
if (annotation.outputClasses().length > 0) {
// Use explicitly defined output classes (direct step outputs)
metadata.outputClasses.addAll(Arrays.asList(annotation.outputClasses()));
}
// Process next classes
if (annotation.nextClasses().length > 0) {
// Use explicitly defined next classes (possible next step inputs)
metadata.nextClasses.addAll(Arrays.asList(annotation.nextClasses()));
}
stepMetadata.put(stepId, metadata);
// Determine description with priority: WorkflowStep annotation, then inputClass
String description;
if (!annotation.description().isEmpty()) {
// Use explicit description from WorkflowStep annotation
description = annotation.description();
} else if (!metadata.inputClasses.isEmpty()) {
// Try to get description from SchemaClass annotation on inputClass
Class<?> inputClass = metadata.inputClasses.get(0);
SchemaClass schemaAnnotation = inputClass.getAnnotation(SchemaClass.class);
if (schemaAnnotation != null && !schemaAnnotation.description().isEmpty()) {
description = schemaAnnotation.description();
} else {
// Fallback to generic description based on class name
description = "Process " + SchemaUtils.getSchemaId(inputClass);
}
} else {
// Default fallback
description = "Execute " + stepId;
}
// Create a step definition for the workflow graph
StepDefinition definition = StepDefinition.builder()
.id(stepId)
.index(annotation.index())
.action(description)
.userInputRequired(annotation.requiresUserInput())
.asyncExecution(annotation.async())
.build();
// Process input schemas
List<AIFunctionSchema> inputSchemas = processSchemasFromClasses(metadata.inputClasses);
// If no class-based schemas, try ID-based schema
if (inputSchemas.isEmpty() && !annotation.inputSchemaId().isEmpty()) {
AIFunctionSchema schema = getSchemaById(annotation.inputSchemaId());
if (schema != null) {
inputSchemas.add(schema);
}
}
// Set the input schemas
if (!inputSchemas.isEmpty()) {
definition.setInputSchemas(inputSchemas);
}
// Process output schemas
List<AIFunctionSchema> outputSchemas = processSchemasFromClasses(metadata.outputClasses);
// If no class-based schemas, try ID-based schema
if (outputSchemas.isEmpty() && !annotation.outputSchemaId().isEmpty()) {
AIFunctionSchema schema = getSchemaById(annotation.outputSchemaId());
if (schema != null) {
outputSchemas.add(schema);
}
}
// Set the output schemas
if (!outputSchemas.isEmpty()) {
definition.setOutputSchemas(outputSchemas);
}
stepDefinitions.add(definition);
}
/**
* Register an async step method.
*/
private void registerAsyncStepMethod(Method method, AsyncStep annotation) {
String stepId = annotation.forStep();
log.debug("Registering async step for: {}", stepId);
// Store the method by its step ID (for backward compatibility)
asyncStepMethods.put(stepId, method);
// Create and store async step metadata
AsyncStepMetadata metadata = new AsyncStepMetadata();
metadata.method = method;
metadata.annotation = annotation;
metadata.parameters = Arrays.asList(method.getParameters());
// Process input classes - first try explicitly defined classes
if (annotation.inputClasses().length > 0) {
// Use explicitly defined multiple input classes
metadata.inputClasses.addAll(Arrays.asList(annotation.inputClasses()));
} else if (annotation.inputClass() != void.class) {
// Use single input class
metadata.inputClasses.add(annotation.inputClass());
} else {
// Auto-discover from method parameters
for (Parameter param : metadata.parameters) {
Class<?> paramType = param.getType();
// Skip framework parameter types
if (Map.class.isAssignableFrom(paramType) || WorkflowContext.class.isAssignableFrom(paramType)) {
continue;
}
// Skip classes without schema annotations
if (!paramType.isAnnotationPresent(SchemaClass.class) && !paramType.isAnnotationPresent(SchemaName.class)) {
continue;
}
// Found a valid schema class parameter
metadata.inputClasses.add(paramType);
log.debug("Auto-discovered input class from parameter: {}", paramType.getName());
break; // Only use the first valid parameter
}
}
// Process output classes
if (annotation.nextClasses().length > 0) {
// Use explicitly defined multiple output classes
metadata.outputClasses.addAll(Arrays.asList(annotation.nextClasses()));
} else if (annotation.outputClass() != void.class) {
// Use single output class
metadata.outputClasses.add(annotation.outputClass());
}
// Generate and cache schemas for all input classes
for (Class<?> inputClass : metadata.inputClasses) {
AIFunctionSchema schema = getSchemaFromClass(inputClass);
if (schema != null) {
// Cache the schema by ID for future reference
String schemaId = SchemaUtils.getSchemaId(inputClass);
if (schemaId != null) {
schemaIdCache.put(schemaId, schema);
}
}
}
// Generate and cache schemas for all output classes
for (Class<?> outputClass : metadata.outputClasses) {
AIFunctionSchema schema = getSchemaFromClass(outputClass);
if (schema != null) {
// Cache the schema by ID for future reference
String schemaId = SchemaUtils.getSchemaId(outputClass);
if (schemaId != null) {
schemaIdCache.put(schemaId, schema);
}
}
}
// Store the metadata
asyncStepMetadata.put(stepId, metadata);
}
/**
* Get a schema by its ID.
* First checks the cache, then tries to derive the schema from known classes.
*/
protected AIFunctionSchema getSchemaById(String schemaId) {
if (schemaId == null || schemaId.isEmpty()) {
return null;
}
// Check cache first
if (schemaIdCache.containsKey(schemaId)) {
return schemaIdCache.get(schemaId);
}
// Try to find a class with this schema ID and cache it for next time
for (Class<?> schemaClass : schemaCache.keySet()) {
String derivedId = SchemaUtils.getSchemaId(schemaClass);
if (schemaId.equals(derivedId)) {
AIFunctionSchema schema = schemaCache.get(schemaClass);
schemaIdCache.put(schemaId, schema);
return schema;
}
}
// If not found in cache, subclasses can override this to provide schemas
return null;
}
/**
* Process a list of classes and extract all schemas (regular or composable)
* @param classes List of classes to process
* @return List of schemas (combined for regular and composable)
*/
private List<AIFunctionSchema> processSchemasFromClasses(List<Class<?>> classes) {
List<AIFunctionSchema> schemas = new ArrayList<>();
for (Class<?> cls : classes) {
// Check if it's a composable schema
// Check if class has SchemaClass annotation with composable=true
SchemaClass schemaAnnotation = cls.getAnnotation(SchemaClass.class);
if (schemaAnnotation != null && schemaAnnotation.composable()) {
// Get all composable schemas
List<AIFunctionSchema> composableSchemas = SchemaUtils.getAllSchemasFromClass(cls);
schemas.addAll(composableSchemas);
// Cache each schema by ID
for (AIFunctionSchema schema : composableSchemas) {
if (schema.getSchemaName() != null) {
schemaIdCache.put(schema.getSchemaName(), schema);
}
}
} else {
// Standard schema processing
AIFunctionSchema schema = getSchemaFromClass(cls);
if (schema != null) {
schemas.add(schema);
// Cache the schema by ID for future reference
String schemaId = SchemaUtils.getSchemaId(cls);
if (schemaId != null) {
schemaIdCache.put(schemaId, schema);
}
}
}
}
return schemas;
}
/**
* Get a schema from a class.
* This uses the SchemaUtils class to convert the class to an AIFunctionSchema.
*/
protected AIFunctionSchema getSchemaFromClass(Class<?> schemaClass) {
if (schemaClass == null || schemaClass == void.class) {
return null;
}
// Check cache first
if (schemaCache.containsKey(schemaClass)) {
return schemaCache.get(schemaClass);
}
// Generate schema from class
AIFunctionSchema schema = SchemaUtils.getSchemaFromClass(schemaClass);
// Cache for future use
schemaCache.put(schemaClass, schema);
return schema;
}
/**
* Create an instance of a schema class from properties.
*/
protected <T> T createSchemaInstance(Class<T> schemaClass, Map<String, String> properties) {
return SchemaUtils.createInstance(schemaClass, properties);
}
/**
* Extract properties from a schema class instance.
*/
protected Map<String, String> extractSchemaProperties(Object schemaObject) {
return SchemaUtils.extractProperties(schemaObject);
}
/**
* Convert an object to a map of strings for response properties.
*/
protected Map<String, String> objectToProperties(Object object) {
Map<String, String> properties = new LinkedHashMap<>();
if (object == null) {
return properties;
}
try {
if (object instanceof Map) {
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) object;
for (Map.Entry<String, Object> entry : map.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if (value != null) {
if (value instanceof String || value instanceof Number || value instanceof Boolean) {
properties.put(key, value.toString());
} else {
// Convert complex objects to JSON
properties.put(key, AIUtils.OBJECT_MAPPER.writeValueAsString(value));
}
}
}
} else {
// Use reflection to convert Java object to properties
ReflectionUtils.doWithFields(object.getClass(), field -> {
field.setAccessible(true);
Object value = field.get(object);
if (value != null) {
if (value instanceof Enum || value instanceof String || value instanceof Number || value instanceof Boolean) {
properties.put(field.getName(), value.toString());
} else {
// Convert complex objects to JSON
try {
properties.put(field.getName(), AIUtils.OBJECT_MAPPER.writeValueAsString(value));
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}
});
}
} catch (JsonProcessingException e) {
log.error("Error converting object to properties", e);
}
return properties;
}
protected void addDebugContext(WorkflowContext session, String message) {
log.debug("[{}] [{}] {}", getWorkflowId(), session.getContextId(), message);
messageService.addMessage(session.getContextId(), message, MessageType.CONTEXT);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/workflow/ChatWorkflow.java
|
package ai.driftkit.chat.framework.workflow;
import ai.driftkit.chat.framework.model.ChatDomain.ChatRequest;
import ai.driftkit.chat.framework.model.ChatDomain.ChatResponse;
import java.util.Map;
/**
* Interface for chat-based workflows.
* This interface is now a marker interface that extends AnnotatedWorkflow.
* All implementations should extend AnnotatedWorkflow and implement this interface.
*/
public interface ChatWorkflow {
/**
* Get the unique identifier for this workflow
*/
String getWorkflowId();
/**
* Check if this workflow is suitable for handling the given input
*
* @param message The message from the user
* @param properties Additional properties
* @return true if the workflow can handle this input
*/
boolean canHandle(String message, Map<String, String> properties);
/**
* Process a chat request and return a response.
*
* @param request The chat request to process
* @return A chat response
*/
ChatResponse processChat(ChatRequest request);
}
|
0
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework
|
java-sources/ai/driftkit/driftkit-chat-assistant-framework/0.8.1/ai/driftkit/chat/framework/workflow/WorkflowRegistry.java
|
package ai.driftkit.chat.framework.workflow;
import ai.driftkit.chat.framework.ai.domain.AIFunctionSchema;
import ai.driftkit.chat.framework.model.StepDefinition;
import ai.driftkit.chat.framework.model.WorkflowContext;
import ai.driftkit.chat.framework.repository.WorkflowContextRepository;
import ai.driftkit.chat.framework.util.ApplicationContextProvider;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
/**
* Registry for workflow sessions and workflows.
* Provides centralized management of workflow state.
*/
@Slf4j
public class WorkflowRegistry {
private static final Map<String, WorkflowContext> sessions = new ConcurrentHashMap<>();
private static final Map<String, AnnotatedWorkflow> workflows = new ConcurrentHashMap<>();
/**
* Register a workflow
* @param workflow The workflow to register
*/
public static void registerWorkflow(AnnotatedWorkflow workflow) {
workflows.put(workflow.getWorkflowId(), workflow);
log.info("Registered workflow: {}", workflow.getWorkflowId());
}
/**
* Get a workflow by ID
* @param workflowId The workflow ID
* @return The workflow
*/
public static Optional<AnnotatedWorkflow> getWorkflow(String workflowId) {
return Optional.ofNullable(workflows.get(workflowId));
}
/**
* Get all registered workflows
* @return Collection of all registered workflows
*/
public static Collection<AnnotatedWorkflow> getAllWorkflows() {
return Collections.unmodifiableCollection(workflows.values());
}
/**
* Find a workflow that can handle a message
* @param message The message
* @param properties Additional properties
* @return The workflow that can handle the message
*/
public static Optional<AnnotatedWorkflow> findWorkflowForMessage(String message, Map<String, String> properties) {
// If workflowId is explicitly specified, try to use it
if (properties != null && properties.containsKey("workflowId")) {
String workflowId = properties.get("workflowId");
AnnotatedWorkflow workflow = workflows.get(workflowId);
if (workflow != null) {
return Optional.of(workflow);
}
}
// If requestSchemaName is provided, look for a workflow with that schema
if (properties != null && properties.containsKey("requestSchemaName")) {
String schemaName = properties.get("requestSchemaName");
for (AnnotatedWorkflow workflow : workflows.values()) {
if (hasMatchingSchema(workflow, schemaName)) {
log.info("Selected workflow {} based on schema: {}", workflow.getWorkflowId(), schemaName);
return Optional.of(workflow);
}
}
}
// Find the first workflow that can handle the message
for (AnnotatedWorkflow workflow : workflows.values()) {
if (workflow.canHandle(message, properties)) {
return Optional.of(workflow);
}
}
return Optional.empty();
}
/**
* Check if workflow has a step with a matching schema name
* @param workflow The workflow to check
* @param schemaName The schema name to match
* @return True if the workflow has a step with the schema name
*/
private static boolean hasMatchingSchema(AnnotatedWorkflow workflow, String schemaName) {
if (workflow == null || StringUtils.isBlank(schemaName)) {
return false;
}
for (StepDefinition step : workflow.getStepDefinitions()) {
if (step.getInputSchemas() != null) {
boolean foundSchema = step.getInputSchemas().stream().map(AIFunctionSchema::getSchemaName).anyMatch(e -> e.equals(schemaName));
if (foundSchema) {
return true;
}
}
if (step.getOutputSchemas() != null) {
boolean foundSchema = step.getOutputSchemas().stream().map(AIFunctionSchema::getSchemaName).anyMatch(e -> e.equals(schemaName));
if (foundSchema) {
return true;
}
}
}
return false;
}
/**
* Get or create a session for a chat
* @param chatId The chat ID
* @param workflow The workflow
* @return The session
*/
public static WorkflowContext getOrCreateSession(String chatId, AnnotatedWorkflow workflow) {
// Check if we already have a session for this chat
WorkflowContext session = sessions.get(chatId);
if (session == null) {
// Try to get from repository
try {
WorkflowContextRepository repository = getRepository();
Optional<WorkflowContext> savedSession = repository.findById(chatId);
if (savedSession.isPresent()) {
session = savedSession.get();
sessions.put(chatId, session);
}
} catch (Exception e) {
log.warn("Could not retrieve session from repository: {}", e.getMessage());
}
}
if (session == null) {
// Create a new session
session = new WorkflowContext();
session.setContextId(chatId);
session.setWorkflowId(workflow.getWorkflowId());
session.setState(WorkflowContext.WorkflowSessionState.NEW);
session.setCreatedTime(System.currentTimeMillis());
session.setUpdatedTime(System.currentTimeMillis());
// Store the session
sessions.put(chatId, session);
saveSession(session);
log.info("Created new session: {} for workflow: {}", chatId, workflow.getWorkflowId());
} else if (!session.getWorkflowId().equals(workflow.getWorkflowId())) {
// If the session exists but is for a different workflow, update it
log.info("Switching session: {} from workflow: {} to: {}",
chatId, session.getWorkflowId(), workflow.getWorkflowId());
// Reset the session for the new workflow
session.setWorkflowId(workflow.getWorkflowId());
session.setState(WorkflowContext.WorkflowSessionState.NEW);
session.setCurrentStepId(null);
session.setCurrentResponseId(null);
session.setProperties(new HashMap<>());
session.setUpdatedTime(System.currentTimeMillis());
saveSession(session);
}
return session;
}
/**
* Get a session for a chat
* @param chatId The chat ID
* @return The session, if it exists
*/
public static Optional<WorkflowContext> getSession(String chatId) {
WorkflowContext session = sessions.get(chatId);
if (session == null) {
// Try to get from repository
try {
WorkflowContextRepository repository = getRepository();
Optional<WorkflowContext> savedSession = repository.findById(chatId);
if (savedSession.isPresent()) {
session = savedSession.get();
sessions.put(chatId, session);
}
} catch (Exception e) {
log.warn("Could not retrieve session from repository: {}", e.getMessage());
}
}
return Optional.ofNullable(session);
}
/**
* Save a session
* @param session The session to save
*/
public static void saveSession(WorkflowContext session) {
if (session == null || session.getContextId() == null) {
log.warn("Cannot save session: session or contextId is null");
return;
}
// Update in-memory cache
sessions.put(session.getContextId(), session);
// Save to repository
try {
WorkflowContextRepository repository = getRepository();
repository.saveOrUpdate(session);
} catch (Exception e) {
log.warn("Could not save session to repository: {}", e.getMessage());
}
}
/**
* Get the repository bean
* @return The repository
*/
private static WorkflowContextRepository getRepository() {
try {
return ApplicationContextProvider.getBean(WorkflowContextRepository.class);
} catch (Exception e) {
log.warn("Could not get repository bean: {}", e.getMessage());
throw e;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude/client/ClaudeApiClient.java
|
package ai.driftkit.clients.claude.client;
import ai.driftkit.clients.claude.domain.ClaudeMessageRequest;
import ai.driftkit.clients.claude.domain.ClaudeMessageResponse;
import feign.Headers;
import feign.RequestLine;
public interface ClaudeApiClient {
@RequestLine("POST /v1/messages")
@Headers({
"Content-Type: application/json",
"Accept: application/json",
"anthropic-version: 2023-06-01"
})
ClaudeMessageResponse createMessage(ClaudeMessageRequest request);
// TODO: Add streaming support when needed
// @RequestLine("POST /v1/messages")
// @Headers({
// "Content-Type: application/json",
// "Accept: text/event-stream",
// "anthropic-version: 2023-06-01"
// })
// Stream<ClaudeStreamEvent> createMessageStream(ClaudeMessageRequest request);
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude/client/ClaudeClientFactory.java
|
package ai.driftkit.clients.claude.client;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import feign.*;
import feign.jackson.JacksonDecoder;
import feign.jackson.JacksonEncoder;
import feign.slf4j.Slf4jLogger;
import lombok.experimental.UtilityClass;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@UtilityClass
public class ClaudeClientFactory {
private static final String DEFAULT_BASE_URL = "https://api.anthropic.com";
public static ClaudeApiClient createClient(String apiKey) {
return createClient(apiKey, null);
}
public static ClaudeApiClient createClient(String apiKey, String baseUrl) {
if (baseUrl == null || baseUrl.isEmpty()) {
baseUrl = DEFAULT_BASE_URL;
}
ObjectMapper objectMapper = new ObjectMapper()
.registerModule(new JavaTimeModule())
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false);
return Feign.builder()
.encoder(new JacksonEncoder(objectMapper))
.decoder(new JacksonDecoder(objectMapper))
.logger(new Slf4jLogger(ClaudeApiClient.class))
.logLevel(Logger.Level.BASIC)
.requestInterceptor(new ClaudeRequestInterceptor(apiKey))
.retryer(new Retryer.Default(100, 1000, 3))
.options(new Request.Options(30000, 60000)) // 30s connect, 60s read timeout
.target(ClaudeApiClient.class, baseUrl);
}
private static class ClaudeRequestInterceptor implements RequestInterceptor {
private final String apiKey;
public ClaudeRequestInterceptor(String apiKey) {
this.apiKey = apiKey;
}
@Override
public void apply(RequestTemplate template) {
template.header("x-api-key", apiKey);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude/client/ClaudeModelClient.java
|
package ai.driftkit.clients.claude.client;
import ai.driftkit.clients.claude.domain.*;
import ai.driftkit.clients.claude.domain.ClaudeMessageRequest.ToolChoice;
import ai.driftkit.clients.claude.utils.ClaudeUtils;
import ai.driftkit.common.domain.client.*;
import ai.driftkit.common.domain.client.ModelClient.ModelClientInit;
import ai.driftkit.common.domain.client.ModelImageResponse.ModelContentMessage;
import ai.driftkit.common.domain.client.ModelImageResponse.ModelContentMessage.ModelContentElement;
import ai.driftkit.common.domain.client.ModelImageResponse.ModelMessage;
import ai.driftkit.common.domain.client.ModelTextRequest.ToolMode;
import ai.driftkit.common.domain.client.ModelTextResponse.ResponseMessage;
import ai.driftkit.common.domain.client.ModelTextResponse.Usage;
import ai.driftkit.common.domain.streaming.StreamingCallback;
import ai.driftkit.common.domain.streaming.StreamingResponse;
import ai.driftkit.common.tools.ToolCall;
import ai.driftkit.common.utils.JsonUtils;
import ai.driftkit.common.utils.ModelUtils;
import ai.driftkit.config.EtlConfig.VaultConfig;
import com.fasterxml.jackson.databind.JsonNode;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Flow;
import java.util.concurrent.atomic.AtomicBoolean;
@Slf4j
public class ClaudeModelClient extends ModelClient implements ModelClientInit {
public static final String CLAUDE_DEFAULT = ClaudeUtils.CLAUDE_SONNET_4;
public static final String CLAUDE_SMART_DEFAULT = ClaudeUtils.CLAUDE_OPUS_4;
public static final String CLAUDE_MINI_DEFAULT = ClaudeUtils.CLAUDE_HAIKU_3_5;
public static final String CLAUDE_PREFIX = ClaudeUtils.CLAUDE_PREFIX;
public static final int MAX_TOKENS = 8192;
private ClaudeApiClient client;
private VaultConfig config;
private final HttpClient httpClient = HttpClient.newBuilder()
.version(HttpClient.Version.HTTP_2)
.connectTimeout(Duration.ofSeconds(30))
.build();
@Override
public ModelClient init(VaultConfig config) {
this.config = config;
this.client = ClaudeClientFactory.createClient(
config.getApiKey(),
config.getBaseUrl()
);
this.setTemperature(config.getTemperature());
this.setModel(config.getModel());
this.setStop(config.getStop());
this.jsonObjectSupport = config.isJsonObject();
return this;
}
public static ModelClient create(VaultConfig config) {
ClaudeModelClient modelClient = new ClaudeModelClient();
modelClient.init(config);
return modelClient;
}
@Override
public Set<Capability> getCapabilities() {
return Set.of(
Capability.TEXT_TO_TEXT,
Capability.IMAGE_TO_TEXT,
Capability.FUNCTION_CALLING,
Capability.JSON_OBJECT,
Capability.JSON_SCHEMA,
Capability.TOOLS
// Note: TEXT_TO_IMAGE is not supported by Claude
);
}
@Override
public ModelTextResponse textToText(ModelTextRequest prompt) {
super.textToText(prompt);
return processPrompt(prompt);
}
@Override
public ModelTextResponse imageToText(ModelTextRequest prompt) throws UnsupportedCapabilityException {
super.imageToText(prompt);
return processPrompt(prompt);
}
@Override
public ModelImageResponse textToImage(ModelImageRequest prompt) {
throw new UnsupportedCapabilityException("Claude does not support image generation");
}
private ModelTextResponse processPrompt(ModelTextRequest prompt) {
String model = Optional.ofNullable(prompt.getModel())
.orElse(Optional.ofNullable(getModel())
.orElse(CLAUDE_DEFAULT));
// Build messages
List<ClaudeMessage> messages = new ArrayList<>();
String systemPrompt = null;
for (ModelContentMessage message : prompt.getMessages()) {
String role = message.getRole().name().toLowerCase();
// Handle system messages separately
if ("system".equals(role)) {
StringBuilder systemBuilder = new StringBuilder();
for (ModelContentElement element : message.getContent()) {
if (element.getType() == ModelTextRequest.MessageType.text) {
systemBuilder.append(element.getText());
}
}
systemPrompt = systemBuilder.toString();
continue;
}
// Convert content elements to Claude content blocks
List<ClaudeContent> contents = new ArrayList<>();
for (ModelContentElement element : message.getContent()) {
switch (element.getType()) {
case text:
contents.add(ClaudeContent.text(element.getText()));
break;
case image:
if (element.getImage() != null) {
contents.add(ClaudeContent.image(
ClaudeUtils.bytesToBase64(element.getImage().getImage()),
element.getImage().getMimeType()
));
}
break;
}
}
messages.add(ClaudeMessage.contentMessage(role, contents));
}
// Add system messages from config if not already present
if (systemPrompt == null && CollectionUtils.isNotEmpty(getSystemMessages())) {
systemPrompt = String.join("\n", getSystemMessages());
}
// Build request
Integer maxTokens = Optional.ofNullable(getMaxCompletionTokens()).orElse(getMaxTokens());
if (maxTokens == null) {
maxTokens = Optional.ofNullable(config.getMaxTokens()).orElse(MAX_TOKENS);
}
ClaudeMessageRequest.ClaudeMessageRequestBuilder requestBuilder = ClaudeMessageRequest.builder()
.model(model)
.messages(messages)
.maxTokens(maxTokens)
.temperature(Optional.ofNullable(prompt.getTemperature()).orElse(getTemperature()))
.topP(getTopP())
.stopSequences(getStop())
.system(systemPrompt);
// Handle tools/functions
if (prompt.getToolMode() != ToolMode.none) {
List<Tool> modelTools = CollectionUtils.isNotEmpty(prompt.getTools()) ? prompt.getTools() : getTools();
if (CollectionUtils.isNotEmpty(modelTools)) {
requestBuilder.tools(ClaudeUtils.convertToClaudeTools(modelTools));
// Set tool choice based on mode
if (prompt.getToolMode() == ToolMode.auto) {
requestBuilder.toolChoice(ToolChoice.builder()
.type("auto")
.build());
}
}
}
ClaudeMessageRequest request = requestBuilder.build();
try {
ClaudeMessageResponse response = client.createMessage(request);
return mapToModelTextResponse(response);
} catch (Exception e) {
log.error("Error calling Claude API", e);
throw new RuntimeException("Failed to call Claude API", e);
}
}
private ModelTextResponse mapToModelTextResponse(ClaudeMessageResponse response) {
if (response == null) {
return null;
}
// Extract content and tool calls
StringBuilder contentBuilder = new StringBuilder();
List<ToolCall> toolCalls = new ArrayList<>();
if (response.getContent() != null) {
for (ClaudeContent content : response.getContent()) {
if ("text".equals(content.getType())) {
contentBuilder.append(content.getText());
} else if ("tool_use".equals(content.getType())) {
// Convert tool use to tool call
Map<String, JsonNode> arguments = new HashMap<>();
if (content.getInput() != null) {
content.getInput().forEach((key, value) -> {
try {
JsonNode node = ModelUtils.OBJECT_MAPPER.valueToTree(value);
arguments.put(key, node);
} catch (Exception e) {
log.error("Error converting argument to JsonNode", e);
}
});
}
toolCalls.add(ToolCall.builder()
.id(content.getId())
.type("function")
.function(ToolCall.FunctionCall.builder()
.name(content.getName())
.arguments(arguments)
.build())
.build());
}
}
}
String textContent = contentBuilder.toString();
if (JsonUtils.isJSON(textContent) && !JsonUtils.isValidJSON(textContent)) {
textContent = JsonUtils.fixIncompleteJSON(textContent);
}
ModelMessage message = ModelMessage.builder()
.role(Role.assistant)
.content(textContent)
.toolCalls(toolCalls.isEmpty() ? null : toolCalls)
.build();
ResponseMessage choice = ResponseMessage.builder()
.index(0)
.message(message)
.finishReason(response.getStopReason())
.build();
// Map usage
Usage usage = null;
if (response.getUsage() != null) {
int totalTokens = (response.getUsage().getInputTokens() != null ? response.getUsage().getInputTokens() : 0) +
(response.getUsage().getOutputTokens() != null ? response.getUsage().getOutputTokens() : 0);
usage = new Usage(
response.getUsage().getInputTokens(),
response.getUsage().getOutputTokens(),
totalTokens
);
}
return ModelTextResponse.builder()
.id(response.getId())
.method("claude.messages")
.createdTime(System.currentTimeMillis())
.model(response.getModel())
.choices(List.of(choice))
.usage(usage)
.build();
}
@Override
public StreamingResponse<String> streamTextToText(ModelTextRequest prompt) {
// Check if streaming is supported
if (!getCapabilities().contains(Capability.TEXT_TO_TEXT)) {
throw new UnsupportedCapabilityException("Text to text is not supported");
}
return new StreamingResponse<String>() {
private final AtomicBoolean active = new AtomicBoolean(false);
private final AtomicBoolean cancelled = new AtomicBoolean(false);
private CompletableFuture<Void> streamFuture;
@Override
public void subscribe(StreamingCallback<String> callback) {
if (!active.compareAndSet(false, true)) {
callback.onError(new IllegalStateException("Stream already subscribed"));
return;
}
streamFuture = CompletableFuture.runAsync(() -> {
try {
processStreamingPrompt(prompt, callback, cancelled);
} catch (Exception e) {
if (!cancelled.get()) {
callback.onError(e);
}
} finally {
active.set(false);
}
});
}
@Override
public void cancel() {
cancelled.set(true);
if (streamFuture != null) {
streamFuture.cancel(true);
}
active.set(false);
}
@Override
public boolean isActive() {
return active.get();
}
};
}
private void processStreamingPrompt(ModelTextRequest prompt, StreamingCallback<String> callback, AtomicBoolean cancelled) throws Exception {
// Build Claude request with streaming enabled
ClaudeMessageRequest request = buildClaudeRequest(prompt);
request.setStream(true);
String apiKey = config.getApiKey();
String baseUrl = Optional.ofNullable(config.getBaseUrl()).orElse("https://api.anthropic.com");
String requestBody = JsonUtils.toJson(request);
// Build HTTP request for streaming
HttpRequest httpRequest = HttpRequest.newBuilder()
.uri(URI.create(baseUrl + "/v1/messages"))
.header("Content-Type", "application/json")
.header("Accept", "text/event-stream")
.header("x-api-key", apiKey)
.header("anthropic-version", "2023-06-01")
.POST(HttpRequest.BodyPublishers.ofString(requestBody))
.timeout(Duration.ofMinutes(5))
.build();
// Create SSE subscriber
SSESubscriber sseSubscriber = new SSESubscriber(callback, cancelled);
// Send request with streaming response
HttpResponse<Void> response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.fromLineSubscriber(sseSubscriber));
// Check for errors
if (response.statusCode() >= 400) {
// Try to get error body
HttpRequest errorRequest = HttpRequest.newBuilder()
.uri(URI.create(baseUrl + "/v1/messages"))
.header("Content-Type", "application/json")
.header("x-api-key", apiKey)
.header("anthropic-version", "2023-06-01")
.POST(HttpRequest.BodyPublishers.ofString(requestBody))
.build();
HttpResponse<String> errorResponse = httpClient.send(errorRequest, HttpResponse.BodyHandlers.ofString());
log.error("Error response: {}", errorResponse.body());
throw new RuntimeException("Claude API error: HTTP " + response.statusCode() + " - " + errorResponse.body());
}
}
/**
* SSE Subscriber for handling streaming responses from Claude
*/
private static class SSESubscriber implements Flow.Subscriber<String> {
private final StreamingCallback<String> callback;
private final AtomicBoolean cancelled;
private Flow.Subscription subscription;
private boolean completed = false;
public SSESubscriber(StreamingCallback<String> callback, AtomicBoolean cancelled) {
this.callback = callback;
this.cancelled = cancelled;
}
@Override
public void onSubscribe(Flow.Subscription subscription) {
this.subscription = subscription;
subscription.request(Long.MAX_VALUE);
}
@Override
public void onNext(String line) {
if (cancelled.get() || completed) {
// Don't cancel if already completed - just ignore further messages
if (cancelled.get() && !completed) {
subscription.cancel();
}
return;
}
try {
// Claude uses SSE format
if (line.startsWith("data: ")) {
String data = line.substring(6).trim();
// Parse the JSON event
JsonNode eventNode = JsonUtils.fromJson(data, JsonNode.class);
if (eventNode != null) {
String eventType = eventNode.path("type").asText();
switch (eventType) {
case "content_block_delta":
// Extract text delta
JsonNode delta = eventNode.path("delta");
String text = delta.path("text").asText(null);
if (text != null) {
callback.onNext(text);
}
break;
case "message_stop":
// Stream completed
completed = true;
callback.onComplete();
break;
case "error":
// Error occurred
String errorMessage = eventNode.path("error").path("message").asText("Unknown error");
completed = true;
callback.onError(new RuntimeException("Claude API error: " + errorMessage));
break;
// Ignore other event types like message_start, content_block_start, etc.
default:
break;
}
}
} else if (line.startsWith("event: ")) {
// Claude also sends event type in separate line, we can ignore it
// as we parse the type from the JSON data
}
} catch (Exception e) {
log.error("Error processing SSE line: {}", line, e);
// Continue processing other lines
}
}
@Override
public void onError(Throwable throwable) {
if (!cancelled.get() && !completed) {
completed = true;
callback.onError(throwable);
}
}
@Override
public void onComplete() {
if (!cancelled.get() && !completed) {
completed = true;
callback.onComplete();
}
}
}
private ClaudeMessageRequest buildClaudeRequest(ModelTextRequest prompt) {
// Extract messages and separate system message
String systemMessage = null;
List<ClaudeMessage> messages = new ArrayList<>();
for (ModelContentMessage msg : prompt.getMessages()) {
if (msg.getRole() == Role.system) {
// Claude expects system message separately
if (msg.getContent() != null && !msg.getContent().isEmpty()) {
ModelContentElement element = msg.getContent().get(0);
if (element.getText() != null) {
systemMessage = element.getText();
}
}
} else {
// Convert to Claude message format
List<ClaudeContent> contents = new ArrayList<>();
if (msg.getContent() != null) {
for (ModelContentElement element : msg.getContent()) {
if (element.getText() != null) {
contents.add(ClaudeContent.builder()
.type("text")
.text(element.getText())
.build());
} else if (element.getImage() != null) {
contents.add(ClaudeContent.builder()
.type("image")
.source(ClaudeContent.ImageSource.builder()
.type("base64")
.mediaType(element.getImage().getMimeType())
.data(Base64.getEncoder().encodeToString(element.getImage().getImage()))
.build())
.build());
}
}
}
messages.add(ClaudeMessage.builder()
.role(msg.getRole().name())
.content(contents)
.build());
}
}
// Build request
ClaudeMessageRequest.ClaudeMessageRequestBuilder builder = ClaudeMessageRequest.builder()
.model(Optional.ofNullable(prompt.getModel()).orElse(getModel()))
.messages(messages)
.maxTokens(MAX_TOKENS)
.temperature(Optional.ofNullable(prompt.getTemperature()).orElse(getTemperature()));
if (systemMessage != null) {
builder.system(systemMessage);
}
// Add tools if present
if (prompt.getTools() != null && !prompt.getTools().isEmpty()) {
List<ClaudeTool> tools = ClaudeUtils.convertToClaudeTools(prompt.getTools());
builder.tools(tools);
if (prompt.getToolMode() == ToolMode.auto) {
builder.toolChoice(ToolChoice.builder().type("auto").build());
}
}
return builder.build();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude/domain/ClaudeContent.java
|
package ai.driftkit.clients.claude.domain;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Map;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public class ClaudeContent {
@JsonProperty("type")
private String type; // "text", "image", "tool_use", "tool_result"
@JsonProperty("text")
private String text;
@JsonProperty("source")
private ImageSource source;
@JsonProperty("id")
private String id; // For tool_use and tool_result
@JsonProperty("name")
private String name; // For tool_use
@JsonProperty("input")
private Map<String, Object> input; // For tool_use
@JsonProperty("tool_use_id")
private String toolUseId; // For tool_result
@JsonProperty("content")
private String content; // For tool_result
@JsonProperty("is_error")
private Boolean isError; // For tool_result
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class ImageSource {
@JsonProperty("type")
private String type; // "base64"
@JsonProperty("media_type")
private String mediaType; // "image/jpeg", "image/png", "image/gif", "image/webp"
@JsonProperty("data")
private String data; // Base64 encoded image data
}
// Helper methods for creating content blocks
public static ClaudeContent text(String text) {
return ClaudeContent.builder()
.type("text")
.text(text)
.build();
}
public static ClaudeContent image(String base64Data, String mediaType) {
return ClaudeContent.builder()
.type("image")
.source(ImageSource.builder()
.type("base64")
.mediaType(mediaType)
.data(base64Data)
.build())
.build();
}
public static ClaudeContent toolUse(String id, String name, Map<String, Object> input) {
return ClaudeContent.builder()
.type("tool_use")
.id(id)
.name(name)
.input(input)
.build();
}
public static ClaudeContent toolResult(String toolUseId, String content) {
return ClaudeContent.builder()
.type("tool_result")
.toolUseId(toolUseId)
.content(content)
.build();
}
public static ClaudeContent toolError(String toolUseId, String error) {
return ClaudeContent.builder()
.type("tool_result")
.toolUseId(toolUseId)
.content(error)
.isError(true)
.build();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude/domain/ClaudeMessage.java
|
package ai.driftkit.clients.claude.domain;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public class ClaudeMessage {
@JsonProperty("role")
private String role; // "user" or "assistant"
@JsonProperty("content")
private List<ClaudeContent> content;
// Helper method to create a simple text message
public static ClaudeMessage textMessage(String role, String text) {
return ClaudeMessage.builder()
.role(role)
.content(List.of(ClaudeContent.text(text)))
.build();
}
// Helper method to create a message with content blocks
public static ClaudeMessage contentMessage(String role, List<ClaudeContent> contents) {
return ClaudeMessage.builder()
.role(role)
.content(contents)
.build();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude/domain/ClaudeMessageRequest.java
|
package ai.driftkit.clients.claude.domain;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
import java.util.Map;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public class ClaudeMessageRequest {
@JsonProperty("model")
private String model;
@JsonProperty("messages")
private List<ClaudeMessage> messages;
@JsonProperty("max_tokens")
private Integer maxTokens;
@JsonProperty("metadata")
private Map<String, Object> metadata;
@JsonProperty("stop_sequences")
private List<String> stopSequences;
@JsonProperty("stream")
private Boolean stream;
@JsonProperty("system")
private String system;
@JsonProperty("temperature")
private Double temperature;
@JsonProperty("tool_choice")
private ToolChoice toolChoice;
@JsonProperty("tools")
private List<ClaudeTool> tools;
@JsonProperty("top_k")
private Integer topK;
@JsonProperty("top_p")
private Double topP;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class ToolChoice {
@JsonProperty("type")
private String type; // "auto", "any", "tool"
@JsonProperty("name")
private String name; // Only when type is "tool"
@JsonProperty("disable_parallel_tool_use")
private Boolean disableParallelToolUse;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude/domain/ClaudeMessageResponse.java
|
package ai.driftkit.clients.claude.domain;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public class ClaudeMessageResponse {
@JsonProperty("id")
private String id;
@JsonProperty("type")
private String type; // "message"
@JsonProperty("role")
private String role; // "assistant"
@JsonProperty("content")
private List<ClaudeContent> content;
@JsonProperty("model")
private String model;
@JsonProperty("stop_reason")
private String stopReason; // "end_turn", "max_tokens", "stop_sequence", "tool_use"
@JsonProperty("stop_sequence")
private String stopSequence;
@JsonProperty("usage")
private ClaudeUsage usage;
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude/domain/ClaudeStreamEvent.java
|
package ai.driftkit.clients.claude.domain;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public class ClaudeStreamEvent {
@JsonProperty("type")
private String type; // "message_start", "content_block_start", "content_block_delta", "content_block_stop", "message_delta", "message_stop", "error"
@JsonProperty("message")
private ClaudeMessageResponse message; // For message_start
@JsonProperty("index")
private Integer index; // For content_block events
@JsonProperty("content_block")
private ClaudeContent contentBlock; // For content_block_start
@JsonProperty("delta")
private Delta delta; // For content_block_delta and message_delta
@JsonProperty("usage")
private ClaudeUsage usage; // For message_delta
@JsonProperty("error")
private Error error; // For error events
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class Delta {
@JsonProperty("type")
private String type; // "text_delta", "input_json_delta"
@JsonProperty("text")
private String text;
@JsonProperty("partial_json")
private String partialJson;
@JsonProperty("stop_reason")
private String stopReason;
@JsonProperty("stop_sequence")
private String stopSequence;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class Error {
@JsonProperty("type")
private String type;
@JsonProperty("message")
private String message;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude/domain/ClaudeTool.java
|
package ai.driftkit.clients.claude.domain;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Map;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public class ClaudeTool {
@JsonProperty("name")
private String name;
@JsonProperty("description")
private String description;
@JsonProperty("input_schema")
private InputSchema inputSchema;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class InputSchema {
@JsonProperty("type")
private String type; // Usually "object"
@JsonProperty("properties")
private Map<String, SchemaProperty> properties;
@JsonProperty("required")
private String[] required;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class SchemaProperty {
@JsonProperty("type")
private String type;
@JsonProperty("description")
private String description;
@JsonProperty("enum")
private String[] enumValues;
@JsonProperty("items")
private SchemaProperty items; // For array types
@JsonProperty("properties")
private Map<String, SchemaProperty> properties; // For object types
@JsonProperty("required")
private String[] required; // For object types
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude/domain/ClaudeUsage.java
|
package ai.driftkit.clients.claude.domain;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public class ClaudeUsage {
@JsonProperty("input_tokens")
private Integer inputTokens;
@JsonProperty("output_tokens")
private Integer outputTokens;
@JsonProperty("cache_creation_input_tokens")
private Integer cacheCreationInputTokens;
@JsonProperty("cache_read_input_tokens")
private Integer cacheReadInputTokens;
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude
|
java-sources/ai/driftkit/driftkit-clients-claude/0.8.1/ai/driftkit/clients/claude/utils/ClaudeUtils.java
|
package ai.driftkit.clients.claude.utils;
import ai.driftkit.clients.claude.domain.ClaudeContent;
import ai.driftkit.clients.claude.domain.ClaudeTool;
import ai.driftkit.common.domain.client.ModelClient;
import lombok.experimental.UtilityClass;
import lombok.extern.slf4j.Slf4j;
import java.util.Base64;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@Slf4j
@UtilityClass
public class ClaudeUtils {
// Model constants - Claude 4 series (latest)
public static final String CLAUDE_OPUS_4 = "claude-opus-4-20250514";
public static final String CLAUDE_SONNET_4 = "claude-sonnet-4-20250514";
public static final String CLAUDE_HAIKU_3_5 = "claude-3-5-haiku-20241022";
// Older models (for compatibility)
public static final String CLAUDE_SONNET_3_5 = "claude-3-5-sonnet-20241022";
public static final String CLAUDE_PREFIX = "claude";
public static String bytesToBase64(byte[] bytes) {
return Base64.getEncoder().encodeToString(bytes);
}
public static byte[] base64ToBytes(String base64) {
return Base64.getDecoder().decode(base64);
}
public static ClaudeContent.ImageSource createImageSource(byte[] imageData, String mimeType) {
return ClaudeContent.ImageSource.builder()
.type("base64")
.mediaType(mimeType)
.data(bytesToBase64(imageData))
.build();
}
public static List<ClaudeTool> convertToClaudeTools(List<ModelClient.Tool> tools) {
if (tools == null || tools.isEmpty()) {
return null;
}
return tools.stream()
.filter(tool -> tool.getType() == ModelClient.ResponseFormatType.function)
.map(tool -> {
ModelClient.ToolFunction function = tool.getFunction();
return ClaudeTool.builder()
.name(function.getName())
.description(function.getDescription())
.inputSchema(convertToInputSchema(function.getParameters()))
.build();
})
.collect(Collectors.toList());
}
private static ClaudeTool.InputSchema convertToInputSchema(ModelClient.ToolFunction.FunctionParameters params) {
if (params == null) {
return null;
}
Map<String, ClaudeTool.SchemaProperty> properties = new HashMap<>();
if (params.getProperties() != null) {
params.getProperties().forEach((key, value) -> {
properties.put(key, convertToSchemaProperty(value));
});
}
return ClaudeTool.InputSchema.builder()
.type("object")
.properties(properties)
.required(params.getRequired() != null ? params.getRequired().toArray(new String[0]) : null)
.build();
}
private static ClaudeTool.SchemaProperty convertToSchemaProperty(ModelClient.Property property) {
if (property == null) {
return null;
}
ClaudeTool.SchemaProperty.SchemaPropertyBuilder builder = ClaudeTool.SchemaProperty.builder()
.type(mapPropertyType(property.getType()))
.description(property.getDescription());
if (property.getEnumValues() != null) {
builder.enumValues(property.getEnumValues().toArray(new String[0]));
}
// Handle nested properties for objects
if (property.getProperties() != null) {
Map<String, ClaudeTool.SchemaProperty> nestedProps = new HashMap<>();
property.getProperties().forEach((key, value) -> {
nestedProps.put(key, convertToSchemaProperty(value));
});
builder.properties(nestedProps);
if (property.getRequired() != null) {
builder.required(property.getRequired().toArray(new String[0]));
}
}
// Handle array items
if (property.getItems() != null) {
builder.items(convertToSchemaProperty(property.getItems()));
}
return builder.build();
}
private static String mapPropertyType(ModelClient.ResponseFormatType type) {
if (type == null) {
return "string";
}
switch (type) {
case String:
case Enum:
return "string";
case Number:
return "number";
case Integer:
return "integer";
case Boolean:
return "boolean";
case Array:
return "array";
case Object:
return "object";
default:
return "string";
}
}
public static String determineBestModel(String requestedModel) {
if (requestedModel != null && !requestedModel.isEmpty()) {
return requestedModel;
}
return CLAUDE_SONNET_4; // Default to Sonnet 4
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-core/0.8.1/ai/driftkit/clients
|
java-sources/ai/driftkit/driftkit-clients-core/0.8.1/ai/driftkit/clients/core/ModelClientFactory.java
|
package ai.driftkit.clients.core;
import ai.driftkit.config.EtlConfig.VaultConfig;
import ai.driftkit.common.domain.client.ModelClient;
import ai.driftkit.common.service.TextTokenizer;
import java.util.Map;
import java.util.ServiceLoader;
import java.util.concurrent.ConcurrentHashMap;
/**
* This factory creates instances of model clients from config.
* Uses ServiceLoader to dynamically discover available model client implementations.
*/
public class ModelClientFactory {
private static final Map<String, ModelClient<?>> clients = new ConcurrentHashMap<>();
@SuppressWarnings("unchecked")
public static <T> ModelClient<T> fromConfig(VaultConfig config) {
if (config == null || config.getName() == null) {
throw new IllegalArgumentException("Configuration and client name must not be null");
}
return (ModelClient<T>) clients.computeIfAbsent(config.getName(), name -> {
String clientName = config.getName();
ServiceLoader<ModelClient> loader = ServiceLoader.load(ModelClient.class);
for (ModelClient<?> client : loader) {
if (!(client instanceof ModelClient.ModelClientInit)) {
continue;
}
if (!supportsClientName(client, clientName)) {
continue;
}
try {
ModelClient.ModelClientInit initClient = (ModelClient.ModelClientInit) client;
ModelClient<?> configuredClient = initClient.init(config);
if (config.isTracing()) {
return new TraceableModelClient<>(configuredClient);
}
return configuredClient;
} catch (Exception e) {
throw new RuntimeException("Failed to initialize client: " + clientName, e);
}
}
throw new IllegalArgumentException("Unknown or unavailable model client: " + clientName);
});
}
@SuppressWarnings("unchecked")
public static <T> ModelClient<T> fromConfig(VaultConfig config, TextTokenizer tokenizer) {
ModelClient<T> baseClient = fromConfig(config);
if (config.isTracing() && !(baseClient instanceof TraceableModelClient)) {
return new TraceableModelClient<>(baseClient, tokenizer);
}
return baseClient;
}
public static <T> TraceableModelClient<T> createTraceable(ModelClient<T> delegate, TextTokenizer tokenizer) {
return new TraceableModelClient<>(delegate, tokenizer);
}
/**
* Checks if the client supports the given client name.
*/
private static boolean supportsClientName(ModelClient<?> client, String clientName) {
return client.getClass().getSimpleName().toLowerCase().contains(clientName.toLowerCase());
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-core/0.8.1/ai/driftkit/clients
|
java-sources/ai/driftkit/driftkit-clients-core/0.8.1/ai/driftkit/clients/core/TraceableModelClient.java
|
package ai.driftkit.clients.core;
import ai.driftkit.common.domain.client.*;
import ai.driftkit.config.EtlConfig.VaultConfig;
import ai.driftkit.common.domain.*;
import ai.driftkit.common.service.TextTokenizer;
import ai.driftkit.common.service.impl.SimpleTextTokenizer;
import ai.driftkit.common.domain.client.ModelTextResponse.Usage;
import lombok.Data;
import lombok.Getter;
import org.apache.commons.lang3.StringUtils;
import java.time.Duration;
import java.time.Instant;
import java.util.List;
import java.util.Set;
import java.util.function.Function;
import java.util.function.Supplier;
@Data
public class TraceableModelClient<T> extends ModelClient<T> {
@Getter
private final ModelClient<T> delegate;
private final TextTokenizer tokenizer;
public TraceableModelClient(ModelClient<T> delegate) {
this(delegate, new SimpleTextTokenizer());
}
public TraceableModelClient(ModelClient<T> delegate, TextTokenizer tokenizer) {
this.delegate = delegate;
this.tokenizer = tokenizer;
}
public ModelClient<T> init(VaultConfig config) {
((ModelClientInit)delegate).init(config);
return this;
}
@Override
public Set<Capability> getCapabilities() {
return delegate.getCapabilities();
}
@Override
public ModelTextResponse textToText(ModelTextRequest prompt) throws UnsupportedCapabilityException {
ModelTrace trace = ModelTrace.builder().build();
fillTraceFromTextRequest(trace, prompt);
return executeWithTracing(
() -> delegate.textToText(prompt),
trace,
response -> {
if (response != null) {
enhanceTraceFromTextResponse(trace, response);
}
return response;
},
() -> createErrorResponse(prompt)
);
}
@Override
public ModelImageResponse textToImage(ModelImageRequest prompt) throws UnsupportedCapabilityException {
ModelTrace trace = ModelTrace.builder().build();
fillTraceFromImageRequest(trace, prompt);
return executeWithTracing(
() -> delegate.textToImage(prompt),
trace,
response -> {
if (response != null) {
trace.setModel(response.getModel());
}
return response;
},
() -> createErrorImageResponse(prompt)
);
}
@Override
public ModelTextResponse imageToText(ModelTextRequest prompt) throws UnsupportedCapabilityException {
ModelTrace trace = ModelTrace.builder().build();
fillTraceFromTextRequest(trace, prompt);
return executeWithTracing(
() -> delegate.imageToText(prompt),
trace,
response -> {
if (response != null) {
enhanceTraceFromTextResponse(trace, response);
}
return response;
},
() -> createErrorResponse(prompt)
);
}
private <R> R executeWithTracing(
Supplier<R> operation,
ModelTrace trace,
Function<R, R> responseHandler,
Supplier<R> errorResponseCreator) {
Instant start = Instant.now();
R response = null;
try {
response = operation.get();
if (response != null) {
response = responseHandler.apply(response);
}
return response;
} catch (Exception e) {
if (response == null) {
response = errorResponseCreator.get();
}
trace.setHasError(true);
trace.setErrorMessage(e.getMessage());
throw e;
} finally {
trace.setExecutionTimeMs(Duration.between(start, Instant.now()).toMillis());
if (response != null) {
attachTraceToResponse(response, trace);
}
}
}
private void attachTraceToResponse(Object response, ModelTrace trace) {
if (response instanceof ModelTextResponse) {
((ModelTextResponse) response).setTrace(trace);
} else if (response instanceof ModelImageResponse) {
((ModelImageResponse) response).setTrace(trace);
}
}
private void fillTraceFromTextRequest(ModelTrace trace, ModelTextRequest prompt) {
if (prompt == null) return;
trace.setModel(prompt.getModel() != null ? prompt.getModel() : delegate.getModel());
if (prompt.getResponseFormat() != null) {
trace.setResponseFormat(prompt.getResponseFormat().getType().toString());
}
trace.setTemperature(prompt.getTemperature() != null ?
prompt.getTemperature() : delegate.getTemperature());
trace.setPromptTokens(estimatePromptTokens(prompt));
}
private void fillTraceFromImageRequest(ModelTrace trace, ModelImageRequest prompt) {
if (prompt == null) return;
trace.setModel(delegate.getModel());
trace.setResponseFormat("image");
trace.setTemperature(0d);
if (StringUtils.isNotBlank(prompt.getPrompt())) {
trace.setPromptTokens(estimateTextTokens(prompt.getPrompt()));
}
trace.setCompletionTokens(0);
}
private void enhanceTraceFromTextResponse(ModelTrace trace, ModelTextResponse response) {
if (response == null) return;
if (response.getModel() != null) {
trace.setModel(response.getModel());
}
if (response.getUsage() != null) {
Usage usage = response.getUsage();
if (usage.getPromptTokens() != null) {
trace.setPromptTokens(usage.getPromptTokens());
}
if (usage.getCompletionTokens() != null) {
trace.setCompletionTokens(usage.getCompletionTokens());
}
} else {
String responseContent = response.getResponse();
if (StringUtils.isNotBlank(responseContent)) {
trace.setCompletionTokens(estimateTextTokens(responseContent));
}
}
}
private ModelTextResponse createErrorResponse(ModelTextRequest prompt) {
return ModelTextResponse.builder()
.model(prompt != null && prompt.getModel() != null ?
prompt.getModel() : delegate.getModel())
.build();
}
private ModelImageResponse createErrorImageResponse(ModelImageRequest prompt) {
return ModelImageResponse.builder()
.model(delegate.getModel())
.build();
}
private int estimatePromptTokens(ModelTextRequest prompt) {
if (prompt == null || prompt.getMessages() == null) {
return 0;
}
return prompt.getMessages().stream()
.map(this::estimateContentMessageTokens)
.mapToInt(Integer::intValue)
.sum();
}
private int estimateContentMessageTokens(ModelImageResponse.ModelContentMessage message) {
if (message == null || message.getContent() == null) {
return 0;
}
return message.getContent().stream()
.filter(element -> element.getType() == ModelTextRequest.MessageType.text)
.map(element -> estimateTextTokens(element.getText()))
.mapToInt(Integer::intValue)
.sum();
}
private int estimateTextTokens(String text) {
if (StringUtils.isBlank(text)) {
return 0;
}
return tokenizer.estimateTokens(text);
}
@Override
public T getWorkflow() {
return delegate.getWorkflow();
}
@Override
public void setWorkflow(T workflow) {
delegate.setWorkflow(workflow);
}
@Override
public String getModel() {
return delegate.getModel();
}
@Override
public void setModel(String model) {
delegate.setModel(model);
}
@Override
public List<String> getSystemMessages() {
return delegate.getSystemMessages();
}
@Override
public void setSystemMessages(List<String> systemMessages) {
delegate.setSystemMessages(systemMessages);
}
@Override
public Double getTemperature() {
return delegate.getTemperature();
}
@Override
public void setTemperature(Double temperature) {
delegate.setTemperature(temperature);
}
@Override
public Double getTopP() {
return delegate.getTopP();
}
@Override
public void setTopP(Double topP) {
delegate.setTopP(topP);
}
@Override
public List<String> getStop() {
return delegate.getStop();
}
@Override
public void setStop(List<String> stop) {
delegate.setStop(stop);
}
@Override
public boolean isJsonObjectSupport() {
return delegate.isJsonObjectSupport();
}
@Override
public void setJsonObjectSupport(boolean jsonObjectSupport) {
delegate.setJsonObjectSupport(jsonObjectSupport);
}
@Override
public Boolean getLogprobs() {
return delegate.getLogprobs();
}
@Override
public void setLogprobs(Boolean logprobs) {
delegate.setLogprobs(logprobs);
}
@Override
public Integer getTopLogprobs() {
return delegate.getTopLogprobs();
}
@Override
public void setTopLogprobs(Integer topLogprobs) {
delegate.setTopLogprobs(topLogprobs);
}
@Override
public Integer getMaxTokens() {
return delegate.getMaxTokens();
}
@Override
public void setMaxTokens(Integer maxTokens) {
delegate.setMaxTokens(maxTokens);
}
@Override
public Integer getMaxCompletionTokens() {
return delegate.getMaxCompletionTokens();
}
@Override
public void setMaxCompletionTokens(Integer maxCompletionTokens) {
delegate.setMaxCompletionTokens(maxCompletionTokens);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-core/0.8.1/ai/driftkit/clients
|
java-sources/ai/driftkit/driftkit-clients-core/0.8.1/ai/driftkit/clients/util/ChatStoreUtils.java
|
package ai.driftkit.clients.util;
import ai.driftkit.common.domain.chat.ChatMessage;
import ai.driftkit.common.domain.chat.ChatMessage.MessageType;
import ai.driftkit.common.domain.client.ModelImageResponse.ModelContentMessage;
import ai.driftkit.common.domain.client.Role;
import ai.driftkit.common.service.ChatStore;
import ai.driftkit.common.utils.JsonUtils;
import lombok.SneakyThrows;
import org.apache.commons.lang3.StringUtils;
import java.util.List;
import java.util.stream.Collectors;
/**
* Utility methods for ChatStore operations and conversions to model API format.
*/
public class ChatStoreUtils {
/**
* Convert ChatMessages to ModelContentMessages for LLM API calls.
*/
public static List<ModelContentMessage> toModelMessages(List<ChatMessage> messages) {
return messages.stream()
.map(ChatStoreUtils::toModelMessage)
.collect(Collectors.toList());
}
/**
* Convert a single ChatMessage to ModelContentMessage.
*/
@SneakyThrows
public static ModelContentMessage toModelMessage(ChatMessage message) {
Role role = switch (message.getType()) {
case USER -> Role.user;
case AI -> Role.assistant;
case SYSTEM -> Role.system;
case CONTEXT -> Role.system; // Context messages are system messages
};
// Get content from properties
String content = message.getPropertiesMap().get(ChatMessage.PROPERTY_MESSAGE);
if (StringUtils.isBlank(content)) {
// If no "message" property, use JSON representation of all properties
content = JsonUtils.toJson(message.getPropertiesMap());
}
return ModelContentMessage.create(role, content);
}
/**
* Get messages from ChatStore and convert to model format.
*/
public static List<ModelContentMessage> getModelMessages(ChatStore chatStore, String chatId) {
List<ChatMessage> messages = chatStore.getRecent(chatId);
return toModelMessages(messages);
}
/**
* Get messages within token limit and convert to model format.
*/
public static List<ModelContentMessage> getModelMessages(ChatStore chatStore, String chatId, int maxTokens) {
List<ChatMessage> messages = chatStore.getRecentWithinTokens(chatId, maxTokens);
return toModelMessages(messages);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini/client/GeminiApiClient.java
|
package ai.driftkit.clients.gemini.client;
import ai.driftkit.clients.gemini.domain.GeminiChatRequest;
import ai.driftkit.clients.gemini.domain.GeminiChatResponse;
import com.fasterxml.jackson.annotation.JsonProperty;
import feign.Headers;
import feign.Param;
import feign.RequestLine;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
public interface GeminiApiClient {
@RequestLine("POST /v1beta/models/{model}:generateContent")
@Headers("Content-Type: application/json")
GeminiChatResponse generateContent(@Param("model") String model, GeminiChatRequest request);
@RequestLine("POST /v1beta/models/{model}:streamGenerateContent")
@Headers("Content-Type: application/json")
GeminiChatResponse streamGenerateContent(@Param("model") String model, GeminiChatRequest request);
@RequestLine("POST /v1beta/models/{model}:countTokens")
@Headers("Content-Type: application/json")
TokenCountResponse countTokens(@Param("model") String model, GeminiChatRequest request);
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class TokenCountResponse {
@JsonProperty("totalTokens")
private Integer totalTokens;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini/client/GeminiClientFactory.java
|
package ai.driftkit.clients.gemini.client;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategies;
import feign.Feign;
import feign.RequestInterceptor;
import feign.jackson.JacksonDecoder;
import feign.jackson.JacksonEncoder;
import feign.slf4j.Slf4jLogger;
public class GeminiClientFactory {
private static final String DEFAULT_BASE_URL = "https://generativelanguage.googleapis.com";
public static GeminiApiClient createClient(String apiKey, String baseUrl) {
ObjectMapper objectMapper = new ObjectMapper()
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.setPropertyNamingStrategy(PropertyNamingStrategies.LOWER_CAMEL_CASE);
RequestInterceptor apiKeyInterceptor = requestTemplate -> {
// Gemini uses API key in header
requestTemplate.header("x-goog-api-key", apiKey);
};
return Feign.builder()
.encoder(new JacksonEncoder(objectMapper))
.decoder(new JacksonDecoder(objectMapper))
.logger(new Slf4jLogger(GeminiApiClient.class))
.logLevel(feign.Logger.Level.BASIC)
.requestInterceptor(apiKeyInterceptor)
.target(GeminiApiClient.class, baseUrl != null ? baseUrl : DEFAULT_BASE_URL);
}
public static GeminiApiClient createClient(String apiKey) {
return createClient(apiKey, DEFAULT_BASE_URL);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini/client/GeminiModelClient.java
|
package ai.driftkit.clients.gemini.client;
import ai.driftkit.clients.gemini.domain.*;
import ai.driftkit.clients.gemini.domain.GeminiContent.Part;
import ai.driftkit.clients.gemini.domain.GeminiGenerationConfig.ThinkingConfig;
import ai.driftkit.clients.gemini.domain.GeminiImageRequest.ImageGenerationConfig;
import ai.driftkit.clients.gemini.utils.GeminiUtils;
import ai.driftkit.common.domain.client.*;
import ai.driftkit.common.domain.client.ModelClient.ModelClientInit;
import ai.driftkit.common.domain.client.ModelImageResponse.ModelContentMessage;
import ai.driftkit.common.domain.client.ModelImageResponse.ModelContentMessage.ModelContentElement;
import ai.driftkit.common.domain.client.ModelImageResponse.ModelMessage;
import ai.driftkit.common.domain.client.ModelTextRequest.ToolMode;
import ai.driftkit.common.domain.client.ModelTextResponse.ResponseMessage;
import ai.driftkit.common.domain.client.ModelTextResponse.Usage;
import ai.driftkit.common.domain.streaming.StreamingCallback;
import ai.driftkit.common.domain.streaming.StreamingResponse;
import ai.driftkit.common.tools.ToolCall;
import ai.driftkit.common.utils.JsonUtils;
import ai.driftkit.common.utils.ModelUtils;
import ai.driftkit.config.EtlConfig.VaultConfig;
import com.fasterxml.jackson.databind.JsonNode;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.jetbrains.annotations.Nullable;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Flow;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
@Slf4j
public class GeminiModelClient extends ModelClient implements ModelClientInit {
public static final String GEMINI_DEFAULT = GeminiUtils.GEMINI_FLASH_2_5;
public static final String GEMINI_SMART_DEFAULT = GeminiUtils.GEMINI_PRO_2_5;
public static final String GEMINI_MINI_DEFAULT = GeminiUtils.GEMINI_FLASH_LITE_2_5;
public static final String GEMINI_IMAGE_DEFAULT = GeminiUtils.GEMINI_IMAGE_MODEL;
public static final String GEMINI_PREFIX = "gemini";
private GeminiApiClient client;
private VaultConfig config;
private final HttpClient httpClient = HttpClient.newBuilder()
.version(HttpClient.Version.HTTP_2)
.connectTimeout(Duration.ofSeconds(30))
.build();
@Override
public ModelClient init(VaultConfig config) {
this.config = config;
this.client = GeminiClientFactory.createClient(
config.getApiKey(),
Optional.ofNullable(config.getBaseUrl()).orElse(null)
);
this.setTemperature(config.getTemperature());
this.setModel(config.getModel());
this.setStop(config.getStop());
this.jsonObjectSupport = config.isJsonObject();
return this;
}
public static ModelClient create(VaultConfig config) {
GeminiModelClient modelClient = new GeminiModelClient();
modelClient.init(config);
return modelClient;
}
@Override
public Set<Capability> getCapabilities() {
return Set.of(
Capability.TEXT_TO_TEXT,
Capability.TEXT_TO_IMAGE,
Capability.IMAGE_TO_TEXT,
Capability.FUNCTION_CALLING,
Capability.JSON_OBJECT,
Capability.JSON_SCHEMA,
Capability.TOOLS
// Note: TTS (Text-to-Speech) and native audio capabilities are available
// through experimental models but not yet exposed through standard capabilities
);
}
@Override
public ModelTextResponse textToText(ModelTextRequest prompt) {
super.textToText(prompt);
return processPrompt(prompt);
}
@Override
public ModelTextResponse imageToText(ModelTextRequest prompt) throws UnsupportedCapabilityException {
super.imageToText(prompt);
return processPrompt(prompt);
}
@Override
public ModelImageResponse textToImage(ModelImageRequest prompt) {
super.textToImage(prompt);
String message = prompt.getPrompt();
String imageModel = Optional.ofNullable(prompt.getModel())
.orElse(Optional.ofNullable(config.getImageModel())
.orElse(GEMINI_IMAGE_DEFAULT));
// For Gemini image generation, we need to use the special model with responseModalities
GeminiContent userContent = GeminiContent.builder()
.role("user")
.parts(List.of(Part.builder()
.text(message)
.build()))
.build();
ImageGenerationConfig generationConfig = ImageGenerationConfig.builder()
.temperature(getTemperature())
.candidateCount(prompt.getN())
.responseModalities(List.of("TEXT", "IMAGE"))
.build();
GeminiImageRequest imageRequest = GeminiImageRequest.builder()
.contents(List.of(userContent))
.generationConfig(generationConfig)
.build();
// Convert to standard chat request for the API
GeminiChatRequest chatRequest = GeminiChatRequest.builder()
.contents(imageRequest.getContents())
.generationConfig(GeminiGenerationConfig.builder()
.temperature(generationConfig.getTemperature())
.candidateCount(generationConfig.getCandidateCount())
.build())
.safetySettings(imageRequest.getSafetySettings())
.build();
try {
GeminiChatResponse response = client.generateContent(imageModel, chatRequest);
List<ModelContentElement.ImageData> images = new ArrayList<>();
if (response.getCandidates() != null) {
for (GeminiChatResponse.Candidate candidate : response.getCandidates()) {
if (candidate.getContent() == null || candidate.getContent().getParts() == null) {
continue;
}
for (Part part : candidate.getContent().getParts()) {
if (part.getInlineData() == null) {
continue;
}
GeminiUtils.ImageData imageData = GeminiUtils.base64toBytes(
part.getInlineData().getMimeType(),
part.getInlineData().getData()
);
images.add(new ModelContentElement.ImageData(
imageData.getImage(),
imageData.getMimeType()
));
}
}
}
return ModelImageResponse.builder()
.model(imageModel)
.bytes(images)
.createdTime(System.currentTimeMillis())
.build();
} catch (Exception e) {
log.error("Error generating image with Gemini", e);
throw new RuntimeException("Failed to generate image", e);
}
}
@Nullable
private ModelTextResponse processPrompt(ModelTextRequest prompt) {
String model = Optional.ofNullable(prompt.getModel()).orElse(getModel());
// Build contents from messages
List<GeminiContent> contents = new ArrayList<>();
GeminiContent systemInstruction = null;
for (ModelContentMessage message : prompt.getMessages()) {
String role = message.getRole().name().toLowerCase();
// Handle system messages as system instruction
if ("system".equals(role)) {
List<Part> parts = new ArrayList<>();
for (ModelContentElement element : message.getContent()) {
if (element.getType() != ModelTextRequest.MessageType.text) {
continue;
}
parts.add(Part.builder()
.text(element.getText())
.build());
}
systemInstruction = GeminiContent.builder()
.parts(parts)
.build();
continue;
}
// Map role appropriately
if ("assistant".equals(role)) {
role = "model";
}
List<Part> parts = new ArrayList<>();
for (ModelContentElement element : message.getContent()) {
switch (element.getType()) {
case text:
parts.add(Part.builder()
.text(element.getText())
.build());
break;
case image:
if (element.getImage() == null) {
continue;
}
parts.add(Part.builder()
.inlineData(GeminiUtils.createInlineData(
element.getImage().getImage(),
element.getImage().getMimeType()
))
.build());
break;
}
}
contents.add(GeminiContent.builder()
.role(role)
.parts(parts)
.build());
}
// Add system messages from config if not already present
if (systemInstruction == null && CollectionUtils.isNotEmpty(getSystemMessages())) {
List<String> systemMessages = getSystemMessages();
List<Part> systemParts = systemMessages.stream()
.map(msg -> Part.builder().text(msg).build())
.collect(Collectors.toList());
systemInstruction = GeminiContent.builder()
.parts(systemParts)
.build();
}
// Build generation config
GeminiGenerationConfig.GeminiGenerationConfigBuilder configBuilder = GeminiGenerationConfig.builder()
.temperature(Optional.ofNullable(prompt.getTemperature()).orElse(getTemperature()))
.topP(getTopP())
.maxOutputTokens(Optional.ofNullable(getMaxCompletionTokens()).orElse(getMaxTokens()))
.stopSequences(getStop())
.presencePenalty(getPresencePenalty())
.frequencyPenalty(getFrequencyPenalty());
// Handle structured output
if (jsonObjectSupport && prompt.getResponseFormat() != null) {
if (prompt.getResponseFormat().getType() == ResponseFormat.ResponseType.JSON_OBJECT) {
configBuilder.responseMimeType("application/json");
} else if (prompt.getResponseFormat().getType() == ResponseFormat.ResponseType.JSON_SCHEMA) {
configBuilder.responseMimeType("application/json");
configBuilder.responseSchema(GeminiUtils.convertToGeminiSchema(prompt.getResponseFormat().getJsonSchema()));
}
}
// Handle logprobs
if (Boolean.TRUE.equals(prompt.getLogprobs()) || Boolean.TRUE.equals(getLogprobs())) {
configBuilder.responseLogprobs(true);
configBuilder.logprobs(Optional.ofNullable(prompt.getTopLogprobs()).orElse(getTopLogprobs()));
}
// Handle reasoning/thinking for Gemini 2.5 models
if (model != null && model.contains("2.5") && prompt.getReasoningEffort() != null) {
ThinkingConfig.ThinkingConfigBuilder thinkingBuilder = ThinkingConfig.builder();
switch (prompt.getReasoningEffort()) {
case none:
thinkingBuilder.thinkingBudget(0); // Disable thinking
break;
case low:
thinkingBuilder.thinkingBudget(4096); // Disable thinking
break;
case medium:
thinkingBuilder.thinkingBudget(8192); // Disable thinking
break;
case dynamic:
thinkingBuilder.thinkingBudget(-1); // Dynamic thinking
break;
//128 to 32768
case high:
// Use higher budget for pro models
if (model.contains("pro")) {
thinkingBuilder.thinkingBudget(32768); // Max for Pro
} else if (model.contains("lite")) {
//512 to 24576
thinkingBuilder.thinkingBudget(16384); // Mid-range for Lite
} else {
//0 to 24576
thinkingBuilder.thinkingBudget(16384); // Mid-range for Flash
}
thinkingBuilder.includeThoughts(true);
break;
}
configBuilder.thinkingConfig(thinkingBuilder.build());
}
GeminiGenerationConfig generationConfig = configBuilder.build();
// Handle tools/functions
List<GeminiTool> tools = null;
GeminiChatRequest.ToolConfig toolConfig = null;
if (prompt.getToolMode() != ToolMode.none) {
List<Tool> modelTools = CollectionUtils.isNotEmpty(prompt.getTools()) ? prompt.getTools() : getTools();
if (CollectionUtils.isNotEmpty(modelTools)) {
tools = List.of(GeminiUtils.convertToGeminiTool(modelTools));
// Set tool config based on mode
String mode = prompt.getToolMode() == ToolMode.auto ? "AUTO" : "ANY";
toolConfig = GeminiChatRequest.ToolConfig.builder()
.functionCallingConfig(GeminiChatRequest.ToolConfig.FunctionCallingConfig.builder()
.mode(mode)
.build())
.build();
}
}
// Build the request
GeminiChatRequest request = GeminiChatRequest.builder()
.contents(contents)
.systemInstruction(systemInstruction)
.generationConfig(generationConfig)
.tools(tools)
.toolConfig(toolConfig)
.build();
try {
GeminiChatResponse response = client.generateContent(model, request);
return mapToModelTextResponse(response);
} catch (Exception e) {
log.error("Error calling Gemini API", e);
throw new RuntimeException("Failed to call Gemini API", e);
}
}
private ModelTextResponse mapToModelTextResponse(GeminiChatResponse response) {
if (response == null) {
return null;
}
List<ResponseMessage> choices = new ArrayList<>();
if (response.getCandidates() != null) {
for (int i = 0; i < response.getCandidates().size(); i++) {
GeminiChatResponse.Candidate candidate = response.getCandidates().get(i);
// Extract text content
StringBuilder contentBuilder = new StringBuilder();
List<ToolCall> toolCalls = new ArrayList<>();
if (candidate.getContent() != null && candidate.getContent().getParts() != null) {
for (Part part : candidate.getContent().getParts()) {
if (part.getText() != null) {
contentBuilder.append(part.getText());
} else if (part.getFunctionCall() != null) {
// Convert function call to tool call
Map<String, JsonNode> arguments = new HashMap<>();
if (part.getFunctionCall().getArgs() != null) {
part.getFunctionCall().getArgs().forEach((key, value) -> {
try {
JsonNode node = ModelUtils.OBJECT_MAPPER.valueToTree(value);
arguments.put(key, node);
} catch (Exception e) {
log.error("Error converting argument to JsonNode", e);
}
});
}
toolCalls.add(ToolCall.builder()
.id(UUID.randomUUID().toString())
.type("function")
.function(ToolCall.FunctionCall.builder()
.name(part.getFunctionCall().getName())
.arguments(arguments)
.build())
.build());
}
}
}
String content = contentBuilder.toString();
if (JsonUtils.isJSON(content) && !JsonUtils.isValidJSON(content)) {
content = JsonUtils.fixIncompleteJSON(content);
}
ModelMessage message = ModelMessage.builder()
.role(Role.assistant)
.content(content)
.toolCalls(toolCalls.isEmpty() ? null : toolCalls)
.build();
// Handle logprobs if present
LogProbs logProbs = null;
if (candidate.getLogprobsResult() != null) {
List<LogProbs.TokenLogProb> tokenLogProbs = new ArrayList<>();
if (candidate.getLogprobsResult().getChosenCandidates() != null) {
for (GeminiChatResponse.TopCandidate topCandidate : candidate.getLogprobsResult().getChosenCandidates()) {
List<LogProbs.TopLogProb> topLogProbs = new ArrayList<>();
// Add the chosen candidate as the first top logprob
topLogProbs.add(LogProbs.TopLogProb.builder()
.token(topCandidate.getToken())
.logprob(topCandidate.getLogProbability())
.build());
// Add other top candidates if available
if (candidate.getLogprobsResult().getTopCandidates() != null) {
for (GeminiChatResponse.TopCandidate tc : candidate.getLogprobsResult().getTopCandidates()) {
if (!tc.getToken().equals(topCandidate.getToken())) {
topLogProbs.add(LogProbs.TopLogProb.builder()
.token(tc.getToken())
.logprob(tc.getLogProbability())
.build());
}
}
}
tokenLogProbs.add(LogProbs.TokenLogProb.builder()
.token(topCandidate.getToken())
.logprob(topCandidate.getLogProbability())
.topLogprobs(topLogProbs)
.build());
}
}
logProbs = LogProbs.builder()
.content(tokenLogProbs)
.build();
}
choices.add(ResponseMessage.builder()
.index(i)
.message(message)
.finishReason(candidate.getFinishReason())
.logprobs(logProbs)
.build());
}
}
// Map usage
Usage usage = null;
if (response.getUsageMetadata() != null) {
usage = new Usage(
response.getUsageMetadata().getPromptTokenCount(),
response.getUsageMetadata().getCandidatesTokenCount(),
response.getUsageMetadata().getTotalTokenCount()
);
}
return ModelTextResponse.builder()
.id(UUID.randomUUID().toString())
.method("gemini.chat.completions")
.createdTime(System.currentTimeMillis())
.model(response.getModelVersion())
.choices(choices)
.usage(usage)
.build();
}
@Override
public StreamingResponse<String> streamTextToText(ModelTextRequest prompt) {
// Check if streaming is supported
if (!getCapabilities().contains(Capability.TEXT_TO_TEXT)) {
throw new UnsupportedCapabilityException("Text to text is not supported");
}
return new StreamingResponse<String>() {
private final AtomicBoolean active = new AtomicBoolean(false);
private final AtomicBoolean cancelled = new AtomicBoolean(false);
private CompletableFuture<Void> streamFuture;
private SSESubscriber sseSubscriber;
@Override
public void subscribe(StreamingCallback<String> callback) {
if (!active.compareAndSet(false, true)) {
callback.onError(new IllegalStateException("Stream already subscribed"));
return;
}
streamFuture = CompletableFuture.runAsync(() -> {
try {
sseSubscriber = processStreamingPrompt(prompt, callback, cancelled);
} catch (Exception e) {
if (!cancelled.get()) {
callback.onError(e);
}
} finally {
active.set(false);
}
});
}
@Override
public void cancel() {
cancelled.set(true);
if (sseSubscriber != null) {
sseSubscriber.cancel();
}
if (streamFuture != null) {
streamFuture.cancel(true);
}
active.set(false);
}
@Override
public boolean isActive() {
return active.get();
}
};
}
private SSESubscriber processStreamingPrompt(ModelTextRequest prompt, StreamingCallback<String> callback, AtomicBoolean cancelled) throws Exception {
// Build Gemini request
GeminiChatRequest request = buildGeminiRequest(prompt);
// Add streaming specific configuration
if (request.getGenerationConfig() == null) {
request.setGenerationConfig(new GeminiGenerationConfig());
}
String model = Optional.ofNullable(prompt.getModel()).orElse(getModel());
String apiKey = config.getApiKey();
String baseUrl = Optional.ofNullable(config.getBaseUrl()).orElse("https://generativelanguage.googleapis.com");
String requestBody = JsonUtils.toJson(request);
// Build HTTP request for streaming with alt=sse parameter for SSE format
HttpRequest httpRequest = HttpRequest.newBuilder()
.uri(URI.create(baseUrl + "/v1beta/models/" + model + ":streamGenerateContent?alt=sse"))
.header("Content-Type", "application/json")
.header("x-goog-api-key", apiKey)
.POST(HttpRequest.BodyPublishers.ofString(requestBody))
.timeout(Duration.ofMinutes(5))
.build();
// Create SSE subscriber
SSESubscriber sseSubscriber = new SSESubscriber(callback, cancelled);
// Send request with streaming response asynchronously
httpClient.sendAsync(httpRequest, HttpResponse.BodyHandlers.fromLineSubscriber(sseSubscriber))
.thenAccept(response -> {
// Check for errors
if (response.statusCode() >= 400) {
callback.onError(new RuntimeException("Gemini API error: HTTP " + response.statusCode()));
}
})
.exceptionally(throwable -> {
callback.onError(throwable);
return null;
});
return sseSubscriber;
}
/**
* SSE Subscriber for handling streaming responses from Gemini
*/
private static class SSESubscriber implements Flow.Subscriber<String> {
private final StreamingCallback<String> callback;
private final AtomicBoolean cancelled;
private Flow.Subscription subscription;
private boolean completed = false;
public SSESubscriber(StreamingCallback<String> callback, AtomicBoolean cancelled) {
this.callback = callback;
this.cancelled = cancelled;
}
public void cancel() {
if (subscription != null) {
subscription.cancel();
}
completed = true;
}
@Override
public void onSubscribe(Flow.Subscription subscription) {
this.subscription = subscription;
subscription.request(Long.MAX_VALUE);
}
@Override
public void onNext(String line) {
if (cancelled.get() || completed) {
// Don't cancel if already completed - just ignore further messages
if (cancelled.get() && !completed) {
subscription.cancel();
}
return;
}
try {
// Skip empty lines
if (line.trim().isEmpty()) {
return;
}
// Handle SSE format
String data = line;
if (line.startsWith("data: ")) {
data = line.substring(6);
// Skip "[DONE]" marker
if (data.equals("[DONE]")) {
completed = true;
callback.onComplete();
return;
}
}
// Try to parse as JSON directly (each line should be a complete JSON object)
if (data.trim().startsWith("{")) {
try {
GeminiChatResponse chunk = JsonUtils.fromJson(data, GeminiChatResponse.class);
// Extract text from the chunk
if (chunk != null && chunk.getCandidates() != null && !chunk.getCandidates().isEmpty()) {
GeminiChatResponse.Candidate candidate = chunk.getCandidates().get(0);
if (candidate.getContent() != null && candidate.getContent().getParts() != null) {
for (Part part : candidate.getContent().getParts()) {
if (part.getText() != null && !part.getText().isEmpty()) {
callback.onNext(part.getText());
}
}
}
// Check if stream is finished
if (candidate.getFinishReason() != null) {
completed = true;
callback.onComplete();
}
}
} catch (Exception e) {
log.debug("Failed to parse chunk as JSON: {}", e.getMessage());
// Could accumulate in buffer for multi-line JSON, but Gemini typically sends complete JSON per line
}
}
} catch (Exception e) {
log.error("Error processing streaming line: {}", line, e);
// Continue processing other lines
}
}
@Override
public void onError(Throwable throwable) {
if (!cancelled.get() && !completed) {
completed = true;
callback.onError(throwable);
}
}
@Override
public void onComplete() {
if (!cancelled.get() && !completed) {
completed = true;
callback.onComplete();
}
}
}
private GeminiChatRequest buildGeminiRequest(ModelTextRequest prompt) {
// Extract configuration
Double temperature = Optional.ofNullable(prompt.getTemperature()).orElse(getTemperature());
String model = Optional.ofNullable(prompt.getModel()).orElse(getModel());
// Build Gemini contents
List<GeminiContent> contents = new ArrayList<>();
// Process messages
for (ModelContentMessage message : prompt.getMessages()) {
List<Part> parts = new ArrayList<>();
if (message.getContent() != null) {
for (ModelContentElement element : message.getContent()) {
if (element.getText() != null) {
parts.add(Part.builder().text(element.getText()).build());
}
}
}
String role = message.getRole() == Role.assistant ? "model" : message.getRole().name();
contents.add(GeminiContent.builder()
.role(role)
.parts(parts)
.build());
}
// Build generation config
GeminiGenerationConfig config = GeminiGenerationConfig.builder()
.temperature(temperature)
.candidateCount(1)
.build();
// Add tools if present
List<GeminiTool> tools = null;
if (prompt.getTools() != null && !prompt.getTools().isEmpty()) {
GeminiTool tool = GeminiUtils.convertToGeminiTool(prompt.getTools());
if (tool != null) {
tools = List.of(tool);
}
}
return GeminiChatRequest.builder()
.contents(contents)
.generationConfig(config)
.tools(tools)
.build();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini/domain/GeminiChatRequest.java
|
package ai.driftkit.clients.gemini.domain;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public class GeminiChatRequest {
@JsonProperty("contents")
private List<GeminiContent> contents;
@JsonProperty("systemInstruction")
private GeminiContent systemInstruction;
@JsonProperty("tools")
private List<GeminiTool> tools;
@JsonProperty("toolConfig")
private ToolConfig toolConfig;
@JsonProperty("generationConfig")
private GeminiGenerationConfig generationConfig;
@JsonProperty("safetySettings")
private List<GeminiSafetySettings> safetySettings;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class ToolConfig {
@JsonProperty("functionCallingConfig")
private FunctionCallingConfig functionCallingConfig;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class FunctionCallingConfig {
@JsonProperty("mode")
private String mode; // AUTO, ANY, NONE
@JsonProperty("allowedFunctionNames")
private List<String> allowedFunctionNames;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini/domain/GeminiChatResponse.java
|
package ai.driftkit.clients.gemini.domain;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonIgnoreProperties(ignoreUnknown = true)
public class GeminiChatResponse {
@JsonProperty("candidates")
private List<Candidate> candidates;
@JsonProperty("usageMetadata")
private UsageMetadata usageMetadata;
@JsonProperty("modelVersion")
private String modelVersion;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonIgnoreProperties(ignoreUnknown = true)
public static class Candidate {
@JsonProperty("content")
private GeminiContent content;
@JsonProperty("finishReason")
private String finishReason; // STOP, SAFETY, MAX_TOKENS, etc.
@JsonProperty("safetyRatings")
private List<SafetyRating> safetyRatings;
@JsonProperty("citationMetadata")
private CitationMetadata citationMetadata;
@JsonProperty("tokenCount")
private Integer tokenCount;
@JsonProperty("groundingAttributions")
private List<GroundingAttribution> groundingAttributions;
@JsonProperty("logprobsResult")
private LogprobsResult logprobsResult;
@JsonProperty("index")
private Integer index;
@JsonProperty("thoughts")
private List<String> thoughts; // Synthesized thoughts when includeThoughts is true
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonIgnoreProperties(ignoreUnknown = true)
public static class UsageMetadata {
@JsonProperty("promptTokenCount")
private Integer promptTokenCount;
@JsonProperty("candidatesTokenCount")
private Integer candidatesTokenCount;
@JsonProperty("totalTokenCount")
private Integer totalTokenCount;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class SafetyRating {
@JsonProperty("category")
private String category;
@JsonProperty("probability")
private String probability; // NEGLIGIBLE, LOW, MEDIUM, HIGH
@JsonProperty("blocked")
private Boolean blocked;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class CitationMetadata {
@JsonProperty("citations")
private List<Citation> citations;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class Citation {
@JsonProperty("startIndex")
private Integer startIndex;
@JsonProperty("endIndex")
private Integer endIndex;
@JsonProperty("uri")
private String uri;
@JsonProperty("title")
private String title;
@JsonProperty("license")
private String license;
@JsonProperty("publicationDate")
private String publicationDate;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class GroundingAttribution {
@JsonProperty("sourceId")
private String sourceId;
@JsonProperty("content")
private GeminiContent content;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class LogprobsResult {
@JsonProperty("topCandidates")
private List<TopCandidate> topCandidates;
@JsonProperty("chosenCandidates")
private List<TopCandidate> chosenCandidates;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class TopCandidate {
@JsonProperty("token")
private String token;
@JsonProperty("tokenId")
private Integer tokenId;
@JsonProperty("logProbability")
private Double logProbability;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini/domain/GeminiContent.java
|
package ai.driftkit.clients.gemini.domain;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
import java.util.Map;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public class GeminiContent {
@JsonProperty("role")
private String role; // user, model, function
@JsonProperty("parts")
private List<Part> parts;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class Part {
@JsonProperty("text")
private String text;
@JsonProperty("inlineData")
private InlineData inlineData;
@JsonProperty("fileData")
private FileData fileData;
@JsonProperty("functionCall")
private FunctionCall functionCall;
@JsonProperty("functionResponse")
private FunctionResponse functionResponse;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class InlineData {
@JsonProperty("mimeType")
private String mimeType;
@JsonProperty("data")
private String data; // Base64 encoded
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class FileData {
@JsonProperty("mimeType")
private String mimeType;
@JsonProperty("fileUri")
private String fileUri;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class FunctionCall {
@JsonProperty("name")
private String name;
@JsonProperty("args")
private Map<String, Object> args;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class FunctionResponse {
@JsonProperty("name")
private String name;
@JsonProperty("response")
private Map<String, Object> response;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini/domain/GeminiGenerationConfig.java
|
package ai.driftkit.clients.gemini.domain;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public class GeminiGenerationConfig {
@JsonProperty("temperature")
private Double temperature;
@JsonProperty("topP")
private Double topP;
@JsonProperty("topK")
private Integer topK;
@JsonProperty("candidateCount")
private Integer candidateCount;
@JsonProperty("maxOutputTokens")
private Integer maxOutputTokens;
@JsonProperty("stopSequences")
private List<String> stopSequences;
@JsonProperty("presencePenalty")
private Double presencePenalty;
@JsonProperty("frequencyPenalty")
private Double frequencyPenalty;
@JsonProperty("responseMimeType")
private String responseMimeType; // text/plain, application/json, text/x.enum
@JsonProperty("responseSchema")
private GeminiSchema responseSchema; // JSON schema for structured output
@JsonProperty("responseLogprobs")
private Boolean responseLogprobs;
@JsonProperty("logprobs")
private Integer logprobs; // Number of top logprobs to return
@JsonProperty("thinkingConfig")
private ThinkingConfig thinkingConfig;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class ThinkingConfig {
@JsonProperty("thinkingBudget")
private Integer thinkingBudget; // 0 to disable, -1 for dynamic, or specific token count
@JsonProperty("includeThoughts")
private Boolean includeThoughts; // Include synthesized thoughts in response
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini/domain/GeminiImageRequest.java
|
package ai.driftkit.clients.gemini.domain;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public class GeminiImageRequest {
@JsonProperty("contents")
private List<GeminiContent> contents;
@JsonProperty("generationConfig")
private ImageGenerationConfig generationConfig;
@JsonProperty("safetySettings")
private List<GeminiSafetySettings> safetySettings;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class ImageGenerationConfig {
@JsonProperty("temperature")
private Double temperature;
@JsonProperty("topP")
private Double topP;
@JsonProperty("topK")
private Integer topK;
@JsonProperty("candidateCount")
private Integer candidateCount;
@JsonProperty("maxOutputTokens")
private Integer maxOutputTokens;
@JsonProperty("stopSequences")
private List<String> stopSequences;
@JsonProperty("responseModalities")
private List<String> responseModalities; // ["TEXT", "IMAGE"]
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini/domain/GeminiSafetySettings.java
|
package ai.driftkit.clients.gemini.domain;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonValue;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public class GeminiSafetySettings {
@JsonProperty("category")
private HarmCategory category;
@JsonProperty("threshold")
private HarmBlockThreshold threshold;
public enum HarmCategory {
HARM_CATEGORY_UNSPECIFIED("HARM_CATEGORY_UNSPECIFIED"),
HARM_CATEGORY_DEROGATORY("HARM_CATEGORY_DEROGATORY"),
HARM_CATEGORY_TOXICITY("HARM_CATEGORY_TOXICITY"),
HARM_CATEGORY_VIOLENCE("HARM_CATEGORY_VIOLENCE"),
HARM_CATEGORY_SEXUAL("HARM_CATEGORY_SEXUAL"),
HARM_CATEGORY_MEDICAL("HARM_CATEGORY_MEDICAL"),
HARM_CATEGORY_DANGEROUS("HARM_CATEGORY_DANGEROUS"),
HARM_CATEGORY_HARASSMENT("HARM_CATEGORY_HARASSMENT"),
HARM_CATEGORY_HATE_SPEECH("HARM_CATEGORY_HATE_SPEECH"),
HARM_CATEGORY_SEXUALLY_EXPLICIT("HARM_CATEGORY_SEXUALLY_EXPLICIT"),
HARM_CATEGORY_DANGEROUS_CONTENT("HARM_CATEGORY_DANGEROUS_CONTENT");
private final String value;
HarmCategory(String value) {
this.value = value;
}
@JsonValue
public String getValue() {
return value;
}
}
public enum HarmBlockThreshold {
HARM_BLOCK_THRESHOLD_UNSPECIFIED("HARM_BLOCK_THRESHOLD_UNSPECIFIED"),
BLOCK_LOW_AND_ABOVE("BLOCK_LOW_AND_ABOVE"),
BLOCK_MEDIUM_AND_ABOVE("BLOCK_MEDIUM_AND_ABOVE"),
BLOCK_ONLY_HIGH("BLOCK_ONLY_HIGH"),
BLOCK_NONE("BLOCK_NONE");
private final String value;
HarmBlockThreshold(String value) {
this.value = value;
}
@JsonValue
public String getValue() {
return value;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini/domain/GeminiSchema.java
|
package ai.driftkit.clients.gemini.domain;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
import java.util.Map;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public class GeminiSchema {
@JsonProperty("type")
private String type; // OBJECT, ARRAY, STRING, NUMBER, BOOLEAN
@JsonProperty("description")
private String description;
@JsonProperty("properties")
private Map<String, GeminiSchema> properties;
@JsonProperty("items")
private GeminiSchema items;
@JsonProperty("enum")
private List<String> enumValues;
@JsonProperty("required")
private List<String> required;
@JsonProperty("propertyOrdering")
private List<String> propertyOrdering;
@JsonProperty("nullable")
private Boolean nullable;
@JsonProperty("format")
private String format;
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini/domain/GeminiTool.java
|
package ai.driftkit.clients.gemini.domain;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
import java.util.Map;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public class GeminiTool {
@JsonProperty("functionDeclarations")
private List<FunctionDeclaration> functionDeclarations;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class FunctionDeclaration {
@JsonProperty("name")
private String name;
@JsonProperty("description")
private String description;
@JsonProperty("parameters")
private FunctionParameters parameters;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class FunctionParameters {
@JsonProperty("type")
private String type; // Always "object"
@JsonProperty("properties")
private Map<String, PropertyDefinition> properties;
@JsonProperty("required")
private List<String> required;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class PropertyDefinition {
@JsonProperty("type")
private String type; // string, number, boolean, array, object
@JsonProperty("description")
private String description;
@JsonProperty("enum")
private List<String> enumValues;
@JsonProperty("items")
private PropertyDefinition items; // For array type
@JsonProperty("properties")
private Map<String, PropertyDefinition> properties; // For object type
@JsonProperty("required")
private List<String> required; // For object type
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini
|
java-sources/ai/driftkit/driftkit-clients-gemini/0.8.1/ai/driftkit/clients/gemini/utils/GeminiUtils.java
|
package ai.driftkit.clients.gemini.utils;
import ai.driftkit.common.domain.client.ModelClient;
import ai.driftkit.clients.gemini.domain.GeminiContent;
import ai.driftkit.clients.gemini.domain.GeminiSchema;
import ai.driftkit.clients.gemini.domain.GeminiTool;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.UtilityClass;
import lombok.extern.slf4j.Slf4j;
import java.util.Base64;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@Slf4j
@UtilityClass
public class GeminiUtils {
// Stable models (2.5 series - latest release)
public static final String GEMINI_PRO_2_5 = "gemini-2.5-pro";
public static final String GEMINI_FLASH_2_5 = "gemini-2.5-flash";
public static final String GEMINI_FLASH_LITE_2_5 = "gemini-2.5-flash-lite";
// Experimental models
public static final String GEMINI_IMAGE_MODEL = "gemini-2.0-flash-preview-image-generation";
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class ImageData {
private byte[] image;
private String mimeType;
}
public static ImageData base64toBytes(String mimeType, String base64Data) {
try {
byte[] imageBytes = Base64.getDecoder().decode(base64Data);
return new ImageData(imageBytes, mimeType);
} catch (Exception e) {
log.error("Error decoding base64 image", e);
throw new RuntimeException("Failed to decode base64 image", e);
}
}
public static String bytesToBase64(byte[] bytes) {
return Base64.getEncoder().encodeToString(bytes);
}
public static GeminiContent.Part.InlineData createInlineData(byte[] data, String mimeType) {
return GeminiContent.Part.InlineData.builder()
.data(bytesToBase64(data))
.mimeType(mimeType)
.build();
}
public static GeminiTool convertToGeminiTool(List<ModelClient.Tool> tools) {
if (tools == null || tools.isEmpty()) {
return null;
}
List<GeminiTool.FunctionDeclaration> declarations = tools.stream()
.filter(tool -> tool.getType() == ModelClient.ResponseFormatType.function)
.map(tool -> {
ModelClient.ToolFunction function = tool.getFunction();
return GeminiTool.FunctionDeclaration.builder()
.name(function.getName())
.description(function.getDescription())
.parameters(convertFunctionParameters(function.getParameters()))
.build();
})
.collect(Collectors.toList());
return GeminiTool.builder()
.functionDeclarations(declarations)
.build();
}
private static GeminiTool.FunctionParameters convertFunctionParameters(ModelClient.ToolFunction.FunctionParameters params) {
if (params == null) {
return null;
}
Map<String, GeminiTool.PropertyDefinition> properties = new HashMap<>();
if (params.getProperties() != null) {
params.getProperties().forEach((key, value) -> {
properties.put(key, convertPropertyDefinition(value));
});
}
return GeminiTool.FunctionParameters.builder()
.type("object")
.properties(properties)
.required(params.getRequired())
.build();
}
private static GeminiTool.PropertyDefinition convertPropertyDefinition(ModelClient.Property property) {
if (property == null) {
return null;
}
GeminiTool.PropertyDefinition.PropertyDefinitionBuilder builder = GeminiTool.PropertyDefinition.builder()
.type(mapPropertyType(property.getType()))
.description(property.getDescription())
.enumValues(property.getEnumValues());
// Handle nested properties for objects
if (property.getProperties() != null) {
Map<String, GeminiTool.PropertyDefinition> nestedProps = new HashMap<>();
property.getProperties().forEach((key, value) -> {
nestedProps.put(key, convertPropertyDefinition(value));
});
builder.properties(nestedProps);
builder.required(property.getRequired());
}
// Handle array items
if (property.getItems() != null) {
builder.items(convertPropertyDefinition(property.getItems()));
}
return builder.build();
}
private static String mapPropertyType(ModelClient.ResponseFormatType type) {
if (type == null) {
return "string";
}
switch (type) {
case String:
case Enum:
return "string";
case Number:
return "number";
case Integer:
return "integer";
case Boolean:
return "boolean";
case Array:
return "array";
case Object:
return "object";
default:
return "string";
}
}
public static GeminiSchema convertToGeminiSchema(ai.driftkit.common.domain.client.ResponseFormat.JsonSchema schema) {
if (schema == null) {
return null;
}
GeminiSchema.GeminiSchemaBuilder builder = GeminiSchema.builder()
.type(mapSchemaType(schema.getType()))
.description(schema.getTitle()) // Use title as description
.required(schema.getRequired());
// Convert properties
if (schema.getProperties() != null) {
Map<String, GeminiSchema> properties = new HashMap<>();
schema.getProperties().forEach((key, value) -> {
properties.put(key, convertSchemaProperty(value));
});
builder.properties(properties);
}
// Handle additional properties
if (schema.getAdditionalProperties() != null) {
// Gemini doesn't support additionalProperties directly
// We'll need to handle this in the generation config
}
return builder.build();
}
private static GeminiSchema convertSchemaProperty(ai.driftkit.common.domain.client.ResponseFormat.SchemaProperty property) {
if (property == null) {
return null;
}
GeminiSchema.GeminiSchemaBuilder builder = GeminiSchema.builder()
.type(mapSchemaType(property.getType()))
.description(property.getDescription())
.enumValues(property.getEnumValues())
.required(property.getRequired());
// Handle nested properties
if (property.getProperties() != null) {
Map<String, GeminiSchema> properties = new HashMap<>();
property.getProperties().forEach((key, value) -> {
properties.put(key, convertSchemaProperty(value));
});
builder.properties(properties);
}
// Handle array items
if (property.getItems() != null) {
builder.items(convertSchemaProperty(property.getItems()));
}
return builder.build();
}
private static String mapSchemaType(String type) {
if (type == null) {
return "STRING";
}
switch (type.toLowerCase()) {
case "string":
return "STRING";
case "number":
case "integer":
return "NUMBER";
case "boolean":
return "BOOLEAN";
case "array":
return "ARRAY";
case "object":
return "OBJECT";
default:
return "STRING";
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai/client/OpenAIApiClient.java
|
package ai.driftkit.clients.openai.client;
import ai.driftkit.clients.openai.domain.*;
import feign.Headers;
import feign.Param;
import feign.RequestLine;
import java.io.File;
public interface OpenAIApiClient {
@RequestLine("POST /v1/chat/completions")
@Headers("Content-Type: application/json")
ChatCompletionResponse createChatCompletion(ChatCompletionRequest request);
@RequestLine("POST /v1/audio/transcriptions")
@Headers("Content-Type: multipart/form-data")
AudioTranscriptionResponse createAudioTranscription(@Param("file") File file);
@RequestLine("POST /v1/embeddings")
@Headers("Content-Type: application/json")
EmbeddingsResponse createEmbedding(EmbeddingsRequest request);
@RequestLine("POST /v1/images/generations")
@Headers("Content-Type: application/json")
CreateImageResponse createImage(CreateImageRequest request);
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai/client/OpenAIClientFactory.java
|
package ai.driftkit.clients.openai.client;
import feign.Feign;
import feign.jackson.JacksonDecoder;
import feign.jackson.JacksonEncoder;
import feign.slf4j.Slf4jLogger;
public class OpenAIClientFactory {
public static OpenAIApiClient createClient(String apiKey, String host) {
return Feign.builder()
.encoder(new JacksonEncoder())
.decoder(new JacksonDecoder())
.requestInterceptor(template -> template.header("Authorization", "Bearer " + apiKey))
.logger(new Slf4jLogger(OpenAIApiClient.class))
.logLevel(feign.Logger.Level.FULL)
.target(OpenAIApiClient.class, host);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai/client/OpenAIModelClient.java
|
package ai.driftkit.clients.openai.client;
import ai.driftkit.common.domain.client.*;
import ai.driftkit.common.domain.client.ModelClient.ModelClientInit;
import ai.driftkit.common.domain.client.ModelTextRequest.ReasoningEffort;
import ai.driftkit.common.domain.client.ResponseFormat.ResponseType;
import ai.driftkit.common.domain.streaming.StreamingResponse;
import ai.driftkit.common.domain.streaming.StreamingCallback;
import ai.driftkit.common.tools.ToolCall;
import ai.driftkit.config.EtlConfig.VaultConfig;
import ai.driftkit.common.domain.client.ModelImageResponse.ModelContentMessage.ModelContentElement;
import ai.driftkit.common.domain.client.ModelTextRequest.ToolMode;
import ai.driftkit.common.domain.client.ModelTextResponse.ResponseMessage;
import ai.driftkit.common.domain.client.ModelTextResponse.Usage;
import ai.driftkit.common.utils.JsonUtils;
import ai.driftkit.common.utils.ModelUtils;
import ai.driftkit.clients.openai.domain.ChatCompletionRequest;
import ai.driftkit.clients.openai.domain.ChatCompletionRequest.ContentMessage;
import ai.driftkit.clients.openai.domain.ChatCompletionRequest.Message;
import ai.driftkit.clients.openai.domain.ChatCompletionRequest.Message.ContentElement;
import ai.driftkit.clients.openai.domain.ChatCompletionRequest.Message.ImageContentElement;
import ai.driftkit.clients.openai.domain.ChatCompletionRequest.Message.TextContentElement;
import ai.driftkit.clients.openai.domain.ChatCompletionRequest.StringMessage;
import ai.driftkit.clients.openai.domain.ChatCompletionResponse;
import ai.driftkit.clients.openai.domain.ChatCompletionChunk;
import ai.driftkit.clients.openai.domain.CreateImageRequest;
import ai.driftkit.clients.openai.domain.CreateImageRequest.CreateImageRequestBuilder;
import ai.driftkit.clients.openai.domain.CreateImageRequest.Quality;
import ai.driftkit.clients.openai.domain.CreateImageResponse;
import ai.driftkit.clients.openai.utils.OpenAIUtils;
import ai.driftkit.clients.openai.utils.OpenAIUtils.ImageData;
import ai.driftkit.common.domain.client.LogProbs.TokenLogProb;
import ai.driftkit.common.domain.client.LogProbs.TopLogProb;
import com.fasterxml.jackson.databind.JsonNode;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.BooleanUtils;
import org.apache.commons.lang3.StringUtils;
import org.jetbrains.annotations.Nullable;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.StringReader;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.Flow;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
@Slf4j
public class OpenAIModelClient extends ModelClient implements ModelClientInit {
public static final String GPT_DEFAULT = "gpt-4o";
public static final String GPT_SMART_DEFAULT = "o3-mini";
public static final String GPT_MINI_DEFAULT = "gpt-4o-mini";
public static final String IMAGE_MODEL_DEFAULT = "dall-e-3";
public static final String OPENAI_PREFIX = "openai";
public static final String GPT_IMAGE_1 = "gpt-image-1";
// Shared HttpClient for downloading images
private static final HttpClient httpClient = HttpClient.newBuilder()
.connectTimeout(Duration.ofSeconds(10))
.followRedirects(HttpClient.Redirect.NORMAL)
.build();
OpenAIApiClient client;
VaultConfig config;
@Override
public ModelClient init(VaultConfig config) {
this.config = config;
this.client = OpenAIClientFactory.createClient(
config.getApiKey(),
Optional.ofNullable(config.getBaseUrl()).orElse("https://api.openai.com")
);
this.setTemperature(config.getTemperature());
this.setModel(config.getModel());
this.setStop(config.getStop());
this.jsonObjectSupport = config.isJsonObject();
return this;
}
public static ModelClient create(VaultConfig config) {
OpenAIModelClient modelClient = new OpenAIModelClient();
modelClient.init(config);
return modelClient;
}
@Override
public Set<Capability> getCapabilities() {
return Set.of(
Capability.TEXT_TO_TEXT,
Capability.TEXT_TO_IMAGE,
Capability.FUNCTION_CALLING,
Capability.IMAGE_TO_TEXT,
Capability.JSON_OBJECT,
Capability.TOOLS,
Capability.JSON_SCHEMA
);
}
@Override
public ModelTextResponse imageToText(ModelTextRequest prompt) throws UnsupportedCapabilityException {
super.imageToText(prompt);
return processPrompt(prompt);
}
@Override
public ModelImageResponse textToImage(ModelImageRequest prompt) {
super.textToImage(prompt);
String message = prompt.getPrompt();
// Determine which model to use
String imageModel = prompt.getModel();
if (StringUtils.isBlank(imageModel)) {
imageModel = config.getImageModel();
if (StringUtils.isBlank(imageModel)) {
imageModel = IMAGE_MODEL_DEFAULT;
}
}
// Determine quality
String qualityStr = prompt.getQuality();
if (StringUtils.isBlank(qualityStr)) {
qualityStr = config.getImageQuality();
if (StringUtils.isBlank(qualityStr)) {
qualityStr = "low";
}
}
// Convert quality string to enum
Quality quality;
try {
quality = Quality.valueOf(qualityStr);
} catch (IllegalArgumentException e) {
log.warn("Invalid image quality value: {}, using 'low' as default", qualityStr);
quality = Quality.low;
}
// Determine size
String outputFormat = null;
Integer compression = null;
String size;
if (GPT_IMAGE_1.equals(imageModel)) {
// For gpt-image-1, always use "auto"
size = "auto";
outputFormat = "jpeg";
compression = 90;
} else {
// For other models, use size from request or config
size = prompt.getSize();
if (StringUtils.isBlank(size)) {
size = config.getImageSize();
if (StringUtils.isBlank(size)) {
size = "1024x1024";
}
}
switch (quality) {
case low:
case medium:
quality = Quality.standard;
break;
case high:
quality = Quality.hd;
break;
}
}
CreateImageRequestBuilder style = CreateImageRequest.builder()
.prompt(message)
.quality(quality)
.size(size)
.outputFormat(outputFormat)
.compression(compression)
.n(prompt.getN())
.model(imageModel);
CreateImageResponse imageResponse = client.createImage(style.build());
// Process images in parallel for performance
List<CompletableFuture<ModelContentElement.ImageData>> imageFutures = imageResponse.getData()
.stream()
.map(e -> CompletableFuture.supplyAsync(() -> {
try {
if (StringUtils.isNotBlank(e.getB64Json())) {
// Handle base64 encoded image
ImageData openAIImage = OpenAIUtils.base64toBytes("image/jpeg", e.getB64Json());
return new ModelContentElement.ImageData(openAIImage.getImage(), openAIImage.getMimeType());
} else if (StringUtils.isNotBlank(e.getUrl())) {
// Handle URL - download the image
return downloadImage(e.getUrl());
} else {
log.warn("Image data has neither base64 nor URL");
return null;
}
} catch (Exception ex) {
log.error("Error processing image data", ex);
return null;
}
}))
.toList();
// Wait for all downloads to complete
List<ModelContentElement.ImageData> image = imageFutures.stream()
.map(CompletableFuture::join)
.filter(Objects::nonNull)
.toList();
return ModelImageResponse.builder()
.model(imageModel)
.bytes(image)
.createdTime(imageResponse.getCreated())
.revisedPrompt(imageResponse.getData().getFirst().getRevisedPrompt())
.build();
}
@Override
public ModelTextResponse textToText(ModelTextRequest prompt) {
super.textToText(prompt);
return processPrompt(prompt);
}
@Override
public StreamingResponse<String> streamTextToText(ModelTextRequest prompt) throws UnsupportedCapabilityException {
return new StreamingResponse<String>() {
private final AtomicBoolean active = new AtomicBoolean(false);
private final AtomicBoolean cancelled = new AtomicBoolean(false);
private CompletableFuture<Void> streamFuture;
@Override
public void subscribe(StreamingCallback<String> callback) {
if (!active.compareAndSet(false, true)) {
callback.onError(new IllegalStateException("Stream already subscribed"));
return;
}
streamFuture = CompletableFuture.runAsync(() -> {
try {
processStreamingPrompt(prompt, callback, cancelled);
} catch (Exception e) {
if (!cancelled.get()) {
callback.onError(e);
}
} finally {
active.set(false);
}
});
}
@Override
public void cancel() {
cancelled.set(true);
if (streamFuture != null) {
streamFuture.cancel(true);
}
active.set(false);
}
@Override
public boolean isActive() {
return active.get();
}
};
}
private void processStreamingPrompt(ModelTextRequest prompt, StreamingCallback<String> callback, AtomicBoolean cancelled) throws Exception {
List<Tool> tools = prompt.getToolMode() == ToolMode.none ? null : getTools();
if (CollectionUtils.isEmpty(tools)) {
tools = prompt.getTools();
}
String model = Optional.ofNullable(prompt.getModel()).orElse(getModel());
Double temperature = Optional.ofNullable(prompt.getTemperature()).orElse(getTemperature());
Boolean logprobs = Optional.ofNullable(prompt.getLogprobs()).orElse(getLogprobs());
Integer topLogprobs = Optional.ofNullable(prompt.getTopLogprobs()).orElse(getTopLogprobs());
ChatCompletionRequest.ResponseFormat responseFormat = prompt.getResponseFormat() == null || prompt.getResponseFormat().getType() == ResponseType.TEXT ? null : new ChatCompletionRequest.ResponseFormat(
prompt.getResponseFormat().getType().getValue(),
convertModelJsonSchema(prompt.getResponseFormat().getJsonSchema())
);
ChatCompletionRequest.ChatCompletionRequestBuilder reqBuilder = ChatCompletionRequest.builder()
.model(model)
.n(1)
.stream(true) // Enable streaming
.maxTokens(getMaxTokens())
.maxCompletionTokens(getMaxCompletionTokens())
.temperature(temperature)
.tools(tools)
.responseFormat(responseFormat)
.logprobs(logprobs)
.messages(
prompt.getMessages().stream()
.map(this::toStreamingMessage) // Use special converter for streaming
.toList()
);
// Only add topLogprobs if logprobs is enabled
if (Boolean.TRUE.equals(logprobs)) {
reqBuilder.topLogprobs(topLogprobs);
}
ChatCompletionRequest req = reqBuilder.build();
// Prepare the HTTP request for SSE streaming
String apiKey = config.getApiKey();
String baseUrl = Optional.ofNullable(config.getBaseUrl()).orElse("https://api.openai.com");
String requestBody = JsonUtils.toJson(req);
HttpRequest httpRequest = HttpRequest.newBuilder()
.uri(URI.create(baseUrl + "/v1/chat/completions"))
.header("Content-Type", "application/json")
.header("Authorization", "Bearer " + apiKey)
.header("Accept", "text/event-stream")
.POST(HttpRequest.BodyPublishers.ofString(requestBody))
.timeout(Duration.ofMinutes(5))
.build();
// Create SSE subscriber
SSESubscriber sseSubscriber = new SSESubscriber(callback, cancelled);
// Send request with streaming response
HttpResponse<Void> response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.fromLineSubscriber(sseSubscriber));
// Check for errors
if (response.statusCode() >= 400) {
// Try to get error body
HttpRequest errorRequest = HttpRequest.newBuilder()
.uri(URI.create(baseUrl + "/v1/chat/completions"))
.header("Content-Type", "application/json")
.header("Authorization", "Bearer " + apiKey)
.POST(HttpRequest.BodyPublishers.ofString(requestBody))
.build();
HttpResponse<String> errorResponse = httpClient.send(errorRequest, HttpResponse.BodyHandlers.ofString());
log.error("Error response: {}", errorResponse.body());
throw new RuntimeException("OpenAI API error: HTTP " + response.statusCode() + " - " + errorResponse.body());
}
}
/**
* SSE Subscriber for handling streaming responses
*/
private static class SSESubscriber implements Flow.Subscriber<String> {
private final StreamingCallback<String> callback;
private final AtomicBoolean cancelled;
private Flow.Subscription subscription;
private boolean completed = false;
public SSESubscriber(StreamingCallback<String> callback, AtomicBoolean cancelled) {
this.callback = callback;
this.cancelled = cancelled;
}
@Override
public void onSubscribe(Flow.Subscription subscription) {
this.subscription = subscription;
subscription.request(Long.MAX_VALUE);
}
@Override
public void onNext(String line) {
if (cancelled.get() || completed) {
// Don't cancel if already completed - just ignore further messages
if (cancelled.get() && !completed) {
subscription.cancel();
}
return;
}
try {
// Check if this is an error response (JSON object starting with {)
if (line.trim().startsWith("{") && line.contains("\"error\"")) {
// This is an error response, not SSE
log.error("Received error response: {}", line);
completed = true;
callback.onError(new RuntimeException("OpenAI API error: " + line));
// Cancel subscription for error case
if (subscription != null) {
subscription.cancel();
}
return;
}
// SSE format: lines starting with "data: "
if (line.startsWith("data: ")) {
String data = line.substring(6).trim();
// Check for end of stream
if ("[DONE]".equals(data)) {
completed = true;
callback.onComplete();
return; // Don't cancel subscription yet - let it complete naturally
}
// Parse the JSON chunk
ChatCompletionChunk chunk = JsonUtils.fromJson(data, ChatCompletionChunk.class);
// Extract content from the chunk
if (chunk != null && chunk.getChoices() != null && !chunk.getChoices().isEmpty()) {
ChatCompletionChunk.ChunkChoice choice = chunk.getChoices().get(0);
if (choice != null && choice.getDelta() != null && choice.getDelta().getContent() != null) {
String content = choice.getDelta().getContent();
callback.onNext(content);
}
// Check if stream is finished
if (choice != null && choice.getFinishReason() != null) {
// Don't call onComplete here - wait for [DONE] message
}
}
}
// Empty lines are part of SSE format, ignore them
} catch (Exception e) {
log.error("Error processing SSE line: {}", line, e);
// Continue processing other lines
}
}
@Override
public void onError(Throwable throwable) {
if (!cancelled.get() && !completed) {
completed = true;
callback.onError(throwable);
}
}
@Override
public void onComplete() {
if (!cancelled.get() && !completed) {
completed = true;
callback.onComplete();
}
}
}
@Nullable
private ModelTextResponse processPrompt(ModelTextRequest prompt) {
List<Tool> tools = prompt.getToolMode() == ToolMode.none ? null : getTools();
if (CollectionUtils.isEmpty(tools)) {
tools = prompt.getTools();
}
String model = Optional.ofNullable(prompt.getModel()).orElse(getModel());
Double temperature = Optional.ofNullable(prompt.getTemperature()).orElse(getTemperature());
Boolean logprobs = Optional.ofNullable(prompt.getLogprobs()).orElse(getLogprobs());
Integer topLogprobs = Optional.ofNullable(prompt.getTopLogprobs()).orElse(getTopLogprobs());
ChatCompletionRequest.ResponseFormat responseFormat = prompt.getResponseFormat() == null || prompt.getResponseFormat().getType() == ResponseType.TEXT ? null : new ChatCompletionRequest.ResponseFormat(
prompt.getResponseFormat().getType().getValue(),
convertModelJsonSchema(prompt.getResponseFormat().getJsonSchema())
);
ChatCompletionRequest req = ChatCompletionRequest.builder()
.model(model)
.n(1)
.maxTokens(getMaxTokens())
.maxCompletionTokens(getMaxCompletionTokens())
.temperature(temperature)
.tools(tools)
.responseFormat(responseFormat)
.logprobs(logprobs)
.topLogprobs(topLogprobs)
.messages(
prompt.getMessages().stream()
.map(this::toMessage)
.toList()
)
.build();
try {
if (model != null && model.startsWith("o")) {
req.setTemperature(null);
ReasoningEffort effort = prompt.getReasoningEffort();
if (effort == null) {
effort = ReasoningEffort.medium;
}
switch (effort) {
case dynamic:
effort = ReasoningEffort.high;
break;
case none:
effort = ReasoningEffort.low;
break;
}
req.setReasoningEffort(effort.name());
}
if (BooleanUtils.isNotTrue(req.getLogprobs())) {
req.setTopLogprobs(null);
}
ChatCompletionResponse completion = client.createChatCompletion(req);
return mapToModelTextResponse(completion);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static ChatCompletionRequest.ResponseFormat.JsonSchema convertModelJsonSchema(ResponseFormat.JsonSchema schemaA) {
if (schemaA == null) {
return null;
}
Map<String, ChatCompletionRequest.ResponseFormat.Property> properties = schemaA.getProperties() != null
? schemaA.getProperties().entrySet().stream()
.collect(Collectors.toMap(
Map.Entry::getKey,
entry -> convertSchemaProperty(entry.getValue())
))
: null;
ChatCompletionRequest.ResponseFormat.SchemaDefinition schemaDefinition =
new ChatCompletionRequest.ResponseFormat.SchemaDefinition(
schemaA.getType(),
properties,
new ArrayList<>(properties.keySet())
);
schemaDefinition.setAdditionalProperties(schemaA.getAdditionalProperties());
ChatCompletionRequest.ResponseFormat.JsonSchema result =
new ChatCompletionRequest.ResponseFormat.JsonSchema(
schemaA.getTitle(),
schemaDefinition
);
// Set strict mode if specified in schema
if (schemaA.getStrict() != null && schemaA.getStrict()) {
result.setStrict(true);
}
return result;
}
private static ChatCompletionRequest.ResponseFormat.Property convertSchemaProperty(ResponseFormat.SchemaProperty property) {
if (property == null) {
return null;
}
Map<String, ChatCompletionRequest.ResponseFormat.Property> nestedProperties = null;
if (property.getProperties() != null) {
nestedProperties = property.getProperties().entrySet().stream()
.collect(Collectors.toMap(
Map.Entry::getKey,
entry -> convertSchemaProperty(entry.getValue())
));
}
ChatCompletionRequest.ResponseFormat.Property items = null;
if (property.getItems() != null) {
items = convertSchemaProperty(property.getItems());
}
ChatCompletionRequest.ResponseFormat.Property result = new ChatCompletionRequest.ResponseFormat.Property(
property.getType(),
property.getDescription(),
property.getEnumValues()
);
if (nestedProperties != null) {
result.setProperties(nestedProperties);
result.setRequired(new ArrayList<>(nestedProperties.keySet()));
} else if (property.getRequired() != null) {
result.setRequired(property.getRequired());
}
if (items != null) {
result.setItems(items);
}
// For objects, always set additionalProperties to false if not explicitly set
if (ResponseFormatType.Object.getType().equals(property.getType())) {
result.setAdditionalProperties(property.getAdditionalProperties() != null ?
property.getAdditionalProperties() : false);
} else if (property.getAdditionalProperties() != null) {
result.setAdditionalProperties(property.getAdditionalProperties());
}
return result;
}
public static ModelTextResponse mapToModelTextResponse(ChatCompletionResponse completion) {
if (completion == null) {
return null;
}
var usage = completion.getUsage();
List<ResponseMessage> choices = completion.getChoices() != null ?
completion.getChoices().stream()
.map(OpenAIModelClient::mapChoice)
.filter(Objects::nonNull)
.collect(Collectors.toList())
: null;
return ModelTextResponse.builder()
.id(completion.getId())
.method(completion.getObject())
.createdTime(completion.getCreated())
.model(completion.getModel())
.usage(usage == null ? null : new Usage(
usage.getPromptTokens(),
usage.getCompletionTokens(),
usage.getTotalTokens()
))
.choices(choices)
.build();
}
private static ModelTextResponse.ResponseMessage mapChoice(ChatCompletionResponse.Choice choice) {
if (choice == null) {
return null;
}
return ModelTextResponse.ResponseMessage.builder()
.index(choice.getIndex())
.message(mapMessage(choice.getMessage()))
.finishReason(choice.getFinishReason())
.logprobs(mapLogProbs(choice.getLogprobs()))
.build();
}
private static LogProbs mapLogProbs(ChatCompletionResponse.LogProbs logProbs) {
if (logProbs == null) {
return null;
}
List<TokenLogProb> tokenLogprobs = null;
if (logProbs.getContent() != null) {
tokenLogprobs = logProbs.getContent().stream()
.map(token -> {
List<TopLogProb> topLogprobs = null;
if (token.getTopLogprobs() != null) {
topLogprobs = token.getTopLogprobs().stream()
.map(top -> TopLogProb.builder()
.token(top.getToken())
.logprob(top.getLogprob())
.build())
.toList();
}
return TokenLogProb.builder()
.token(token.getToken())
.logprob(token.getLogprob())
.bytes(token.getBytes())
.topLogprobs(topLogprobs)
.build();
})
.toList();
}
return LogProbs.builder()
.content(tokenLogprobs)
.build();
}
private static ModelImageResponse.ModelMessage mapMessage(ChatCompletionResponse.Message message) {
if (message == null) {
return null;
}
String content = message.getContent();
if (JsonUtils.isJSON(content) && !JsonUtils.isValidJSON(content)) {
content = JsonUtils.fixIncompleteJSON(content);
}
List<ToolCall> toolCalls = null;
if (message.getToolCalls() != null) {
toolCalls = message.getToolCalls().stream()
.map(OpenAIModelClient::mapToolCall)
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
return ModelImageResponse.ModelMessage.builder()
.role(Role.valueOf(message.getRole()))
.content(content)
.toolCalls(toolCalls)
.build();
}
private static ToolCall mapToolCall(ChatCompletionResponse.ToolCall toolCall) {
if (toolCall == null) {
return null;
}
ToolCall.FunctionCall function = null;
if (toolCall.getFunction() != null) {
Map<String, JsonNode> arguments = parseJsonStringToNodeMap(toolCall.getFunction().getArguments());
function = ToolCall.FunctionCall.builder()
.name(toolCall.getFunction().getName())
.arguments(arguments)
.build();
}
return ToolCall.builder()
.id(toolCall.getId())
.type(toolCall.getType())
.function(function)
.build();
}
private static Map<String, JsonNode> parseJsonStringToNodeMap(String jsonString) {
try {
if (jsonString == null || jsonString.trim().isEmpty()) {
return new HashMap<>();
}
JsonNode rootNode = ModelUtils.OBJECT_MAPPER.readTree(jsonString);
if (!rootNode.isObject()) {
throw new IllegalArgumentException("Expected JSON object but got: " + rootNode.getNodeType());
}
Map<String, JsonNode> result = new HashMap<>();
rootNode.fields().forEachRemaining(entry -> {
result.put(entry.getKey(), entry.getValue());
});
return result;
} catch (Exception e) {
throw new RuntimeException("Failed to parse tool call arguments: " + e.getMessage(), e);
}
}
/**
* Convert message for streaming requests - always uses StringMessage for simple text
*/
private Message toStreamingMessage(ModelImageResponse.ModelContentMessage message) {
// For streaming, we prefer StringMessage when possible for better compatibility
if (message.getContent() != null && !message.getContent().isEmpty()) {
// Check if all elements are text
boolean allText = message.getContent().stream()
.allMatch(e -> e.getType() == ModelTextRequest.MessageType.text);
if (allText) {
// Concatenate all text elements into a single string
String combinedText = message.getContent().stream()
.map(ModelContentElement::getText)
.filter(Objects::nonNull)
.collect(Collectors.joining(" "));
return new StringMessage(message.getRole().name(), message.getName(), combinedText);
}
}
// Fall back to regular conversion for complex messages
return toMessage(message);
}
private Message toMessage(ModelImageResponse.ModelContentMessage message) {
// if (message.getContent().size() == 1) {
// ModelContentElement mce = message.getContent().get(0);
//
// if (mce.getType() == MessageType.text) {
// String text = mce.getText();
// return new StringMessage(message.getRole().name(), message.getName(), text);
// }
// }
List<ContentElement> elements = message.getContent().stream()
.map(e -> {
switch (e.getType()) {
case image -> {
return new ImageContentElement(e.getImage().getImage());
}
case text -> {
String text = e.getText();
if (text.startsWith("\"")) {
text = text.substring(1, text.length() - 1);
}
return new TextContentElement(text);
}
}
return null;
}).filter(Objects::nonNull).toList();
return new ContentMessage(message.getRole().name(), message.getName(), elements);
}
/**
* Download image from URL asynchronously
*/
private static ModelContentElement.ImageData downloadImage(String url) {
try {
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create(url))
.timeout(Duration.ofSeconds(30))
.GET()
.build();
HttpResponse<byte[]> response = httpClient.send(request, HttpResponse.BodyHandlers.ofByteArray());
if (response.statusCode() == 200) {
// Detect MIME type from Content-Type header
String contentType = response.headers().firstValue("Content-Type")
.orElse("image/jpeg");
return new ModelContentElement.ImageData(response.body(), contentType);
} else {
log.error("Failed to download image from URL: {}, status: {}", url, response.statusCode());
return null;
}
} catch (IOException | InterruptedException e) {
log.error("Error downloading image from URL: {}", url, e);
if (e instanceof InterruptedException) {
Thread.currentThread().interrupt();
}
throw new CompletionException("Failed to download image", e);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai/domain/AudioTranscriptionRequest.java
|
package ai.driftkit.clients.openai.domain;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.File;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class AudioTranscriptionRequest {
private File file;
private String model;
private String prompt;
private String responseFormat;
private Float temperature;
private String language;
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai/domain/AudioTranscriptionResponse.java
|
package ai.driftkit.clients.openai.domain;
import lombok.Data;
@Data
public class AudioTranscriptionResponse {
private String text;
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai/domain/ChatCompletionChunk.java
|
package ai.driftkit.clients.openai.domain;
import ai.driftkit.common.domain.client.ModelTextResponse.Usage;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
/**
* Represents a streaming chunk from OpenAI's chat completion API
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonIgnoreProperties(ignoreUnknown = true)
public class ChatCompletionChunk {
@JsonProperty("id")
private String id;
@JsonProperty("object")
private String object;
@JsonProperty("created")
private Long created;
@JsonProperty("model")
private String model;
@JsonProperty("system_fingerprint")
private String systemFingerprint;
@JsonProperty("choices")
private List<ChunkChoice> choices;
@JsonProperty("usage")
private Usage usage;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonIgnoreProperties(ignoreUnknown = true)
public static class ChunkChoice {
@JsonProperty("index")
private Integer index;
@JsonProperty("delta")
private Delta delta;
@JsonProperty("logprobs")
private ChatCompletionResponse.LogProbs logprobs;
@JsonProperty("finish_reason")
private String finishReason;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonIgnoreProperties(ignoreUnknown = true)
public static class Delta {
@JsonProperty("role")
private String role;
@JsonProperty("content")
private String content;
@JsonProperty("tool_calls")
private List<DeltaToolCall> toolCalls;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class DeltaToolCall {
@JsonProperty("index")
private Integer index;
@JsonProperty("id")
private String id;
@JsonProperty("type")
private String type;
@JsonProperty("function")
private DeltaFunction function;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class DeltaFunction {
@JsonProperty("name")
private String name;
@JsonProperty("arguments")
private String arguments;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai/domain/ChatCompletionRequest.java
|
package ai.driftkit.clients.openai.domain;
import ai.driftkit.common.domain.client.ModelClient.Tool;
import ai.driftkit.clients.openai.utils.OpenAIUtils;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL) // Exclude null fields for cleaner serialization
public class ChatCompletionRequest {
@JsonProperty("model")
private String model;
@JsonProperty("messages")
private List<Message> messages;
/*
What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random,
while lower values like 0.2 will make it more focused and deterministic.
We generally recommend altering this or top_p but not both.
*/
@JsonProperty("temperature")
private Double temperature;
/*
An alternative to sampling with temperature, called nucleus sampling,
where the model considers the results of the tokens with top_p probability mass.
So 0.1 means only the tokens comprising the top 10% probability mass are considered.
We generally recommend altering this or temperature but not both.
*/
@JsonProperty("top_p")
private Double topP;
// How many chat completion choices to generate for each input message.
// Note that you will be charged based on the number of generated tokens across all of the choices. Keep n as 1 to minimize costs.
@JsonProperty("n")
private Integer n = 1;
@JsonProperty("stream")
private Boolean stream;
//Up to 4 sequences where the API will stop generating further tokens.
@JsonProperty("stop")
private List<String> stop;
@JsonProperty("max_tokens")
private Integer maxTokens;
@JsonProperty("max_completion_tokens")
private Integer maxCompletionTokens;
//medium,hard
@JsonProperty("reasoning_effort")
private String reasoningEffort;
/*
Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far,
increasing the model's likelihood to talk about new topics.
*/
@JsonProperty("presence_penalty")
private Double presencePenalty;
/*
Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far,
decreasing the model's likelihood to repeat the same line verbatim.
*/
@JsonProperty("frequency_penalty")
private Double frequencyPenalty;
/*
Accepts a JSON object that maps tokens (specified by their token ID in the tokenizer)
to an associated bias value from -100 to 100. Mathematically, the bias is added to the
logits generated by the model prior to sampling. The exact effect will vary per model,
but values between -1 and 1 should decrease or increase likelihood of selection;
values like -100 or 100 should result in a ban or exclusive selection of the relevant token.
*/
@JsonProperty("logit_bias")
private Map<String, Integer> logitBias;
@JsonProperty("user")
private String user;
/*
Whether or not to store the output of this chat completion request for use in our model distillation or evals products.
*/
@JsonProperty("store")
private Boolean store;
@JsonProperty("metadata")
private Map<String, Object> metadata;
/*
Whether to return log probabilities of the output tokens or not.
If true, returns the log probabilities of each output token returned in the content of message.
*/
@JsonProperty("logprobs")
private Boolean logprobs;
/*
An integer between 0 and 20 specifying the number of most likely tokens to return at each token position,
each with an associated log probability. logprobs must be set to true if this parameter is used.
*/
@JsonProperty("top_logprobs")
private Integer topLogprobs;
/*
Output types that you would like the model to generate for this request.
Most models are capable of generating text, which is the default:
["text"]
The gpt-4o-audio-preview model can also be used to generate audio.
To request that this model generate both text and audio responses, you can use:
["text", "audio"]
*/
@JsonProperty("modalities")
private List<String> modalities;
/*
Configuration for a Predicted Output, which can greatly improve response times
when large parts of the model response are known ahead of time. This is most
common when you are regenerating a file with only minor changes to most of the content.
*/
@JsonProperty("prediction")
private PredictionItem prediction;
@JsonProperty("audio")
private Map<String, Object> audio;
/*
Setting to { "type": "json_schema", "json_schema": {...} } enables Structured Outputs which ensures the model
will match your supplied JSON schema. Learn more in the Structured Outputs guide.
Setting to { "type": "json_object" } enables JSON mode, which ensures the message the model generates is valid JSON.
Important: when using JSON mode, you must also instruct the model to produce JSON yourself via a system or user message.
Without this, the model may generate an unending stream of whitespace until the generation reaches the token limit,
resulting in a long-running and seemingly "stuck" request. Also note that the message content may be partially cut
off if finish_reason="length", which indicates the generation exceeded max_tokens or the conversation exceeded the max context length.
*/
@JsonProperty("response_format")
private ResponseFormat responseFormat;
/*
This feature is in Beta. If specified, our system will make a best effort to sample deterministically,
such that repeated requests with the same seed and parameters should return the same result.
Determinism is not guaranteed, and you should refer to the system_fingerprint response parameter to monitor changes in the backend.
*/
@JsonProperty("seed")
private Integer seed;
/*
Specifies the latency tier to use for processing the request.
This parameter is relevant for customers subscribed to the scale tier service:
If set to 'auto', and the Project is Scale tier enabled, the system will utilize scale tier credits until they are exhausted.
If set to 'auto', and the Project is not Scale tier enabled,
the request will be processed using the default service tier with a lower uptime SLA and no latency guarentee.
If set to 'default', the request will be processed using the default service tier with a lower uptime SLA and no latency guarentee.
When not set, the default behavior is 'auto'.
When this parameter is set, the response body will include the service_tier utilized.
*/
@JsonProperty("service_tier")
private String serviceTier;
@JsonProperty("stream_options")
private Map<String, Object> streamOptions;
/*
A list of tools the model may call. Currently, only functions are supported as a tool.
Use this to provide a list of functions the model may generate JSON inputs for.
A max of 128 functions are supported.
*/
@JsonProperty("tools")
private List<Tool> tools;
/*
Controls which (if any) tool is called by the model.
none means the model will not call any tool and instead generates a message.
auto means the model can pick between generating a message or calling one or more tools.
required means the model must call one or more tools.
Specifying a particular tool via {"type": "function", "function": {"name": "my_function"}} forces the model to call that tool.
none is the default when no tools are present. auto is the default if tools are present.
*/
@JsonProperty("tool_choice")
private String toolChoice;
@Data
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class ContentMessage extends Message {
private List<ContentElement> content;
@Builder
public ContentMessage(String role, String name, List<ContentElement> content) {
super(role, name);
this.content = content;
}
@Override
public String getMessage() {
return content.stream().map(e -> {
if (e.getType() == MessageType.text) {
return ((TextContentElement)e).getText();
}
return null;
})
.filter(Objects::nonNull)
.collect(Collectors.joining(" "));
}
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class StringMessage extends Message {
private String content;
@Builder
public StringMessage(String role, String name, String content) {
super(role, name);
this.content = content;
}
public String getMessage() {
return content;
}
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static abstract class Message {
private String role;
@JsonProperty("name")
private String name;
@JsonIgnore
public abstract String getMessage();
@Data
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class ContentElement {
private MessageType type;
}
@Data
@NoArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class TextContentElement extends ContentElement {
private String text;
public TextContentElement(String text) {
super(MessageType.text);
this.text = text;
}
}
@Data
@NoArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class ImageUrlContentElement extends ContentElement {
@JsonProperty("image_url")
private ImageStringUrl imageUrl;
public ImageUrlContentElement(String url) {
super(MessageType.image_url);
this.imageUrl = new ImageStringUrl(url);
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class ImageStringUrl {
@JsonProperty("url")
private String url;
}
}
@Data
@NoArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class ImageContentElement extends ContentElement {
@JsonProperty("image_url")
private ImageContent imageUrl;
public ImageContentElement(byte[] image) {
super(MessageType.image_url);
this.imageUrl = new ImageContent(image);
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class ImageContent {
private String url;
public ImageContent(byte[] img) {
this.url = OpenAIUtils.imageToBase64(img);
}
}
}
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class ResponseFormat {
@JsonProperty("type")
private String type;
@JsonProperty("json_schema")
private JsonSchema jsonSchema;
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class JsonSchema {
@JsonProperty("name")
private String name;
@JsonProperty("strict")
private Boolean strict;
@JsonProperty("schema")
private SchemaDefinition schema;
public JsonSchema(String name, SchemaDefinition schema) {
this.name = name;
this.strict = true;
this.schema = schema;
}
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class SchemaDefinition {
@JsonProperty("type")
private String type;
@JsonProperty("properties")
private Map<String, Property> properties;
@JsonProperty("required")
private List<String> required;
@JsonProperty("additionalProperties")
private Boolean additionalProperties;
public SchemaDefinition(String type, Map<String, Property> properties, List<String> required) {
this.type = type;
this.properties = properties;
this.required = required;
this.additionalProperties = false;
}
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class Property {
@JsonProperty("type")
private String type;
@JsonProperty("description")
private String description;
@JsonProperty("enum")
private List<String> enumValues;
@JsonProperty("properties")
private Map<String, Property> properties;
@JsonProperty("required")
private List<String> required;
@JsonProperty("items")
private Property items;
@JsonProperty("additionalProperties")
private Object additionalProperties;
public Property(String type, String description, List<String> enumValues) {
this.type = type;
this.description = description;
this.enumValues = enumValues;
}
}
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class PredictionContentItem {
private String type;
private String text;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class PredictionItem {
//Always content
private String type;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class PredictionItemSingle extends PredictionItem {
private PredictionContentItem content;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class PredictionItems extends PredictionItem {
private List<PredictionContentItem> content;
}
public enum MessageType {
image_url,
text
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai/domain/ChatCompletionResponse.java
|
package ai.driftkit.clients.openai.domain;
import ai.driftkit.common.domain.client.ModelTextResponse.Usage;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Builder;
import lombok.Data;
import java.util.List;
@Data
public class ChatCompletionResponse {
private String id;
private String object;
private Long created;
private String model;
private List<Choice> choices;
private Usage usage;
@Data
public static class Choice {
private Integer index;
private Message message;
@JsonProperty("finish_reason")
private String finishReason;
private ChatCompletionResponse.LogProbs logprobs;
}
@Data
@Builder
public static class LogProbs {
private List<TokenLogprob> content;
@Data
@Builder
public static class TokenLogprob {
private String token;
private Double logprob;
@JsonProperty("top_logprobs")
private List<TopLogprob> topLogprobs;
private byte[] bytes;
@Data
@Builder
public static class TopLogprob {
private String token;
private Double logprob;
private byte[] bytes;
}
}
}
@Data
public static class Message {
private String role;
private String content;
@JsonProperty("tool_calls")
private List<ToolCall> toolCalls;
}
@Data
public static class ToolCall {
private String id;
private String type;
private Function function;
@Data
public static class Function {
private String name;
private String arguments;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai/domain/CreateImageRequest.java
|
package ai.driftkit.clients.openai.domain;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public class CreateImageRequest {
// Model to use, e.g., "dall-e-3"
@JsonProperty("model")
private String model;
@JsonProperty("prompt")
private String prompt;
// Number of images to generate, default is 1
@JsonProperty("n")
private Integer n = 1;
// Size of the image, e.g., "1024x1024"
@JsonProperty("size")
private String size = "1024x1024";
@JsonProperty("output_compression")
private Integer compression;
@JsonProperty("output_format")
private String outputFormat;
@JsonProperty("response_format")
private ImageResponseFormat responseFormat = ImageResponseFormat.url;
// Optional, style of the image
@JsonProperty("style")
private ImageStyle style = ImageStyle.vivid;
// Optional, unique identifier for the end-user
@JsonProperty("user")
private String user;
private Quality quality;
public enum Quality {
standard, hd, low, high, medium
}
public enum ImageResponseFormat {
url, b64_json
}
public enum ImageStyle {
vivid,
natural
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai/domain/CreateImageResponse.java
|
package ai.driftkit.clients.openai.domain;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class CreateImageResponse {
// Unix timestamp of image creation
private Long created;
private List<ImageData> data;
@Data
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class ImageData {
// URL of the generated image if response_format is "url"
private String url;
// Base64-encoded image if response_format is "b64_json"
@JsonProperty("b64_json")
private String b64Json;
// Revised prompt used for image generation, if available
@JsonProperty("revised_prompt")
private String revisedPrompt;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai/domain/EmbeddingsRequest.java
|
package ai.driftkit.clients.openai.domain;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class EmbeddingsRequest {
private String model;
private List<String> input;
private String user;
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai/domain/EmbeddingsResponse.java
|
package ai.driftkit.clients.openai.domain;
import lombok.Data;
import java.util.List;
@Data
public class EmbeddingsResponse {
private String object;
private List<Embedding> data;
private Usage usage;
@Data
public static class Embedding {
private String object;
private List<Double> embedding;
private Integer index;
}
@Data
public static class Usage {
private Integer promptTokens;
private Integer totalTokens;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai
|
java-sources/ai/driftkit/driftkit-clients-openai/0.8.1/ai/driftkit/clients/openai/utils/OpenAIUtils.java
|
package ai.driftkit.clients.openai.utils;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.SneakyThrows;
import java.io.ByteArrayOutputStream;
import java.util.Base64;
public class OpenAIUtils {
public static String imageToBase64(byte[] img) {
String base64Url = Base64.getEncoder().encodeToString(img);
return "data:image/jpeg;base64," + base64Url;
}
@SneakyThrows
public static ImageData base64toBytes(String mimeType, String b64json) {
String[] mime2base64 = b64json.split(",");
String mime = mimeType;
String base64Data = b64json;
if (mime2base64.length == 2) {
mime = mime2base64[0]
.replace("data:", "")
.replace(";base64,", "");
base64Data = mime2base64[1];
}
byte[] decodedBytes = Base64.getDecoder().decode(base64Data);
ByteArrayOutputStream bo = new ByteArrayOutputStream();
bo.write(decodedBytes);
return new ImageData(bo.toByteArray(), mime);
}
@Data
@AllArgsConstructor
public static class ImageData {
private byte[] image;
private String mimeType;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-spring-ai/0.8.1/ai/driftkit/clients
|
java-sources/ai/driftkit/driftkit-clients-spring-ai/0.8.1/ai/driftkit/clients/springai/SpringAIModelClient.java
|
package ai.driftkit.clients.springai;
import ai.driftkit.common.domain.client.*;
import lombok.extern.slf4j.Slf4j;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.model.Generation;
import org.springframework.ai.chat.prompt.ChatOptions;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.chat.messages.Message;
import org.springframework.ai.chat.messages.SystemMessage;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.messages.AssistantMessage;
import org.springframework.ai.content.Media;
import org.springframework.ai.converter.BeanOutputConverter;
import org.springframework.ai.tool.function.FunctionToolCallback;
import org.springframework.util.MimeType;
import org.springframework.core.io.ByteArrayResource;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* ModelClient implementation that wraps Spring AI's ChatModel.
* This allows using Spring AI models within DriftKit's workflow system
* with full tracing support.
*
* Usage example:
* <pre>
* @Configuration
* public class ModelClientConfig {
*
* @Bean
* public ModelClient springAIModelClient(ChatModel chatModel) {
* return new SpringAIModelClient(chatModel)
* .withModel("gpt-4")
* .withTemperature(0.7)
* .withMaxTokens(1000);
* }
* }
* </pre>
*/
@Slf4j
public class SpringAIModelClient extends ModelClient<Object> {
private final ChatModel chatModel;
private final ChatClient chatClient;
// Configuration fields
private String model;
private List<String> systemMessages = new ArrayList<>();
private Double temperature = 0.7;
private Double topP;
private List<String> stop;
private boolean jsonObjectSupport = true;
private Boolean logprobs;
private Integer topLogprobs;
private Integer maxTokens;
private Integer maxCompletionTokens;
public SpringAIModelClient(ChatModel chatModel) {
this.chatModel = chatModel;
this.chatClient = ChatClient.builder(chatModel).build();
}
@Override
public Set<Capability> getCapabilities() {
Set<Capability> capabilities = new HashSet<>();
capabilities.add(Capability.TEXT_TO_TEXT);
capabilities.add(Capability.IMAGE_TO_TEXT);
// Add more capabilities based on the specific ChatModel implementation
return capabilities;
}
@Override
public ModelTextResponse textToText(ModelTextRequest request) throws UnsupportedCapabilityException {
try {
// Convert DriftKit request to Spring AI format
List<Message> messages = convertToSpringAIMessages(request.getMessages());
// Build chat options
ChatOptions options = buildChatOptions(request);
// Create prompt
Prompt prompt = new Prompt(messages, options);
// Call the model
ChatResponse response = chatModel.call(prompt);
// Convert response back to DriftKit format
return convertToDriftKitResponse(response, request);
} catch (Exception e) {
log.error("Error in textToText call", e);
throw new RuntimeException("Failed to execute Spring AI model", e);
}
}
@Override
public ModelImageResponse textToImage(ModelImageRequest prompt) throws UnsupportedCapabilityException {
throw new UnsupportedCapabilityException("Text to image not supported by Spring AI ChatModel");
}
@Override
public ModelTextResponse imageToText(ModelTextRequest request) throws UnsupportedCapabilityException {
try {
// Convert messages including images
List<Message> messages = convertToSpringAIMessagesWithImages(request.getMessages());
// Build chat options
ChatOptions options = buildChatOptions(request);
// Create prompt
Prompt prompt = new Prompt(messages, options);
// Call the model
ChatResponse response = chatModel.call(prompt);
// Convert response back to DriftKit format
return convertToDriftKitResponse(response, request);
} catch (Exception e) {
log.error("Error in imageToText call", e);
throw new RuntimeException("Failed to execute Spring AI model with images", e);
}
}
// Conversion methods
private List<Message> convertToSpringAIMessages(List<ModelImageResponse.ModelContentMessage> driftKitMessages) {
List<Message> messages = new ArrayList<>();
// Add system messages first
for (String systemMessage : systemMessages) {
messages.add(new SystemMessage(systemMessage));
}
// Convert DriftKit messages
for (ModelImageResponse.ModelContentMessage msg : driftKitMessages) {
Message springAIMessage = convertMessage(msg);
if (springAIMessage != null) {
messages.add(springAIMessage);
}
}
return messages;
}
private List<Message> convertToSpringAIMessagesWithImages(List<ModelImageResponse.ModelContentMessage> driftKitMessages) {
List<Message> messages = new ArrayList<>();
// Add system messages first
for (String systemMessage : systemMessages) {
messages.add(new SystemMessage(systemMessage));
}
// Convert DriftKit messages including multimodal content
for (ModelImageResponse.ModelContentMessage msg : driftKitMessages) {
Message springAIMessage = convertMessageWithImages(msg);
if (springAIMessage != null) {
messages.add(springAIMessage);
}
}
return messages;
}
private Message convertMessage(ModelImageResponse.ModelContentMessage msg) {
String content = extractTextContent(msg);
switch (msg.getRole()) {
case system:
return new SystemMessage(content);
case user:
return new UserMessage(content);
case assistant:
return new AssistantMessage(content);
default:
log.warn("Unknown role: {}", msg.getRole());
return new UserMessage(content);
}
}
private Message convertMessageWithImages(ModelImageResponse.ModelContentMessage msg) {
if (msg.getContent() == null || msg.getContent().isEmpty()) {
return convertMessage(msg);
}
// Check if message contains images
List<Media> mediaList = new ArrayList<>();
String textContent = "";
for (ModelImageResponse.ModelContentMessage.ModelContentElement element : msg.getContent()) {
if (element.getType() == ModelTextRequest.MessageType.text) {
textContent += element.getText() + " ";
} else if (element.getType() == ModelTextRequest.MessageType.image && element.getImage() != null) {
// Convert to Spring AI Media using ByteArrayResource
ByteArrayResource resource = new ByteArrayResource(element.getImage().getImage());
Media media = new Media(
MimeType.valueOf(element.getImage().getMimeType()),
resource
);
mediaList.add(media);
}
}
// Create message with media if available
if (!mediaList.isEmpty() && msg.getRole() == Role.user) {
return UserMessage.builder().media(mediaList.toArray(new Media[0])).text(textContent.trim()).build();
} else {
return convertMessage(msg);
}
}
private String extractTextContent(ModelImageResponse.ModelContentMessage msg) {
if (msg.getContent() == null || msg.getContent().isEmpty()) {
return "";
}
return msg.getContent().stream()
.filter(element -> element.getType() == ModelTextRequest.MessageType.text)
.map(ModelImageResponse.ModelContentMessage.ModelContentElement::getText)
.collect(Collectors.joining(" "));
}
private ChatOptions buildChatOptions(ModelTextRequest request) {
// Create a dynamic ChatOptions implementation
return new ChatOptions() {
private final String modelValue = request.getModel() != null ? request.getModel() : model;
private final Double temperatureValue = request.getTemperature() != null ? request.getTemperature() : temperature;
private final Double topPValue = topP;
private final Integer topKValue = null; // Not supported in ModelTextRequest yet
private final Integer maxTokensValue = maxTokens;
private final List<String> stopSequencesValue = stop;
private final Map<String, Object> additionalOptions = new HashMap<>();
@Override
public String getModel() {
return modelValue;
}
@Override
public Double getFrequencyPenalty() {
return null; // Not supported in ModelTextRequest
}
@Override
public Integer getMaxTokens() {
return maxTokensValue;
}
@Override
public Double getPresencePenalty() {
return null; // Not supported in ModelTextRequest
}
@Override
public List<String> getStopSequences() {
return stopSequencesValue;
}
@Override
public Double getTemperature() {
return temperatureValue;
}
@Override
public Integer getTopK() {
return topKValue;
}
@Override
public Double getTopP() {
return topPValue;
}
@Override
public ChatOptions copy() {
return this; // Immutable, so return self
}
// Handle function calling if needed
private final List<FunctionToolCallback> functionCallbacks;
{
if (CollectionUtils.isNotEmpty(request.getTools())) {
functionCallbacks = convertToSpringAIFunctions(request.getTools());
// Store both original tools and converted callbacks
additionalOptions.put("tools", request.getTools());
additionalOptions.put("functionCallbacks", functionCallbacks);
log.debug("Converted {} tools to Spring AI function callbacks", request.getTools().size());
} else {
functionCallbacks = null;
}
}
// Note: getFunctionCallbacks() is not a method in ChatOptions interface
// Tool callbacks are handled differently in Spring AI 1.0.1
// Method to retrieve additional options
public Map<String, Object> getAdditionalOptions() {
return Collections.unmodifiableMap(additionalOptions);
}
};
}
private List<FunctionToolCallback> convertToSpringAIFunctions(List<ModelClient.Tool> tools) {
// Convert DriftKit tools to Spring AI 1.0.1 ToolCallback format
if (CollectionUtils.isEmpty(tools)) {
return new ArrayList<>();
}
List<FunctionToolCallback> callbacks = new ArrayList<>();
for (ModelClient.Tool tool : tools) {
try {
// Create a function implementation using Java Function interface
Function<Map<String, Object>, Map<String, Object>> functionImpl = params -> {
String toolName = tool.getFunction() != null ? tool.getFunction().getName() : "unknown";
log.debug("Executing tool: {} with params: {}", toolName, params);
// Tool execution would be handled by the workflow/client consumer
Map<String, Object> result = new HashMap<>();
result.put("tool", toolName);
result.put("params", params);
result.put("status", "pending");
result.put("message", "Tool execution not implemented in Spring AI adapter");
return result;
};
// Build the FunctionToolCallback with new API
String name = tool.getFunction() != null ? tool.getFunction().getName() : "unknown";
String description = tool.getFunction() != null ? tool.getFunction().getDescription() : "";
FunctionToolCallback callback = FunctionToolCallback.builder(name, functionImpl)
.description(description)
.build();
callbacks.add(callback);
} catch (Exception e) {
String toolName = tool.getFunction() != null ? tool.getFunction().getName() : "unknown";
log.error("Failed to convert tool {} to Spring AI format", toolName, e);
}
}
log.debug("Converted {} DriftKit tools to Spring AI FunctionToolCallbacks", callbacks.size());
return callbacks;
}
private ModelTextResponse convertToDriftKitResponse(ChatResponse springAIResponse, ModelTextRequest request) {
ModelTextResponse.ModelTextResponseBuilder builder = ModelTextResponse.builder();
// Set model
builder.model(request.getModel() != null ? request.getModel() : model);
// Convert generations to choices
List<ModelTextResponse.ResponseMessage> choices = new ArrayList<>();
if (springAIResponse.getResults() != null) {
for (Generation generation : springAIResponse.getResults()) {
String content = generation.getOutput().getText();
ModelTextResponse.ResponseMessage choice = ModelTextResponse.ResponseMessage.builder()
.message(ModelImageResponse.ModelMessage.builder()
.role(Role.assistant)
.content(content)
.build())
.build();
choices.add(choice);
}
}
builder.choices(choices);
// Extract usage if available
// Note: Spring AI 1.0.1 Usage API has changed
if (springAIResponse.getMetadata() != null && springAIResponse.getMetadata().getUsage() != null) {
var usage = springAIResponse.getMetadata().getUsage();
// Try to extract token counts - API may vary by provider
try {
Integer promptTokens = usage.getPromptTokens() != null ? usage.getPromptTokens().intValue() : 0;
Integer totalTokens = usage.getTotalTokens() != null ? usage.getTotalTokens().intValue() : 0;
Integer completionTokens = totalTokens - promptTokens;
builder.usage(ModelTextResponse.Usage.builder()
.promptTokens(promptTokens)
.completionTokens(completionTokens)
.totalTokens(totalTokens)
.build());
} catch (Exception e) {
log.debug("Unable to extract token usage from Spring AI response", e);
}
}
return builder.build();
}
// Builder pattern methods
public SpringAIModelClient withModel(String model) {
this.model = model;
return this;
}
public SpringAIModelClient withSystemMessage(String systemMessage) {
this.systemMessages.add(systemMessage);
return this;
}
public SpringAIModelClient withTemperature(Double temperature) {
this.temperature = temperature;
return this;
}
public SpringAIModelClient withTopP(Double topP) {
this.topP = topP;
return this;
}
public SpringAIModelClient withMaxTokens(Integer maxTokens) {
this.maxTokens = maxTokens;
return this;
}
public SpringAIModelClient withStop(List<String> stop) {
this.stop = stop;
return this;
}
// Getters and setters for ModelClient interface
@Override
public String getModel() {
return model;
}
@Override
public void setModel(String model) {
this.model = model;
}
@Override
public List<String> getSystemMessages() {
return systemMessages;
}
@Override
public void setSystemMessages(List<String> systemMessages) {
this.systemMessages = systemMessages;
}
@Override
public Double getTemperature() {
return temperature;
}
@Override
public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@Override
public Double getTopP() {
return topP;
}
@Override
public void setTopP(Double topP) {
this.topP = topP;
}
@Override
public List<String> getStop() {
return stop;
}
@Override
public void setStop(List<String> stop) {
this.stop = stop;
}
@Override
public boolean isJsonObjectSupport() {
return jsonObjectSupport;
}
@Override
public void setJsonObjectSupport(boolean jsonObjectSupport) {
this.jsonObjectSupport = jsonObjectSupport;
}
@Override
public Boolean getLogprobs() {
return logprobs;
}
@Override
public void setLogprobs(Boolean logprobs) {
this.logprobs = logprobs;
}
@Override
public Integer getTopLogprobs() {
return topLogprobs;
}
@Override
public void setTopLogprobs(Integer topLogprobs) {
this.topLogprobs = topLogprobs;
}
@Override
public Integer getMaxTokens() {
return maxTokens;
}
@Override
public void setMaxTokens(Integer maxTokens) {
this.maxTokens = maxTokens;
}
@Override
public Integer getMaxCompletionTokens() {
return maxCompletionTokens;
}
@Override
public void setMaxCompletionTokens(Integer maxCompletionTokens) {
this.maxCompletionTokens = maxCompletionTokens;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-clients-spring-ai-starter/0.8.1/ai/driftkit/clients/springai
|
java-sources/ai/driftkit/driftkit-clients-spring-ai-starter/0.8.1/ai/driftkit/clients/springai/autoconfigure/DriftKitClientsSpringAIAutoConfiguration.java
|
package ai.driftkit.clients.springai.autoconfigure;
import ai.driftkit.clients.springai.SpringAIModelClient;
import ai.driftkit.common.domain.client.ModelClient;
// Removed TraceService import - not needed
import lombok.extern.slf4j.Slf4j;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.AutoConfiguration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Primary;
import org.springframework.util.StringUtils;
import java.util.Map;
/**
* Auto-configuration for DriftKit Clients Spring AI integration.
*
* This configuration creates a SpringAIModelClient that wraps a Spring AI ChatModel,
* allowing Spring AI models to be used within the DriftKit framework.
*/
@Slf4j
@AutoConfiguration
@ConditionalOnClass({ChatModel.class, ModelClient.class})
@ConditionalOnProperty(prefix = "driftkit.clients.spring-ai", name = "enabled", havingValue = "true", matchIfMissing = true)
@EnableConfigurationProperties(DriftKitClientsSpringAIProperties.class)
public class DriftKitClientsSpringAIAutoConfiguration {
private final DriftKitClientsSpringAIProperties properties;
public DriftKitClientsSpringAIAutoConfiguration(DriftKitClientsSpringAIProperties properties) {
this.properties = properties;
log.info("Initializing DriftKit Clients Spring AI integration with properties: {}", properties);
}
/**
* Creates a SpringAIModelClient that wraps the Spring AI ChatModel.
*
* @param applicationContext Spring application context for bean lookup
* @param chatModelProvider Provider for ChatModel beans
* @param traceServiceProvider Provider for TraceService (optional)
* @return SpringAIModelClient instance
*/
@Bean
@ConditionalOnBean(ChatModel.class)
@ConditionalOnMissingBean(name = "springAIModelClient")
@ConditionalOnProperty(prefix = "driftkit.clients.spring-ai.model", name = "primary", havingValue = "false", matchIfMissing = true)
public SpringAIModelClient springAIModelClient(
ApplicationContext applicationContext,
ObjectProvider<ChatModel> chatModelProvider,
BeanFactory beanFactory) {
ChatModel chatModel = getChatModel(applicationContext, chatModelProvider);
log.info("Creating SpringAIModelClient with ChatModel: {}",
chatModel.getClass().getSimpleName());
// Validate configuration
validateConfiguration();
SpringAIModelClient client = new SpringAIModelClient(chatModel);
// Apply default configurations
applyDefaultConfigurations(client);
if (properties.isLoggingEnabled()) {
log.info("Created SpringAIModelClient with configuration: model={}, temperature={}, maxTokens={}, topP={}",
properties.getDefaultModel(),
properties.getDefaultTemperature(),
properties.getDefaultMaxTokens(),
properties.getDefaultTopP());
}
return client;
}
/**
* Creates a primary SpringAIModelClient when configured.
*
* @param applicationContext Spring application context for bean lookup
* @param chatModelProvider Provider for ChatModel beans
* @param traceServiceProvider Provider for TraceService (optional)
* @return Primary SpringAIModelClient instance
*/
@Bean
@Primary
@ConditionalOnBean(ChatModel.class)
@ConditionalOnMissingBean(name = "primarySpringAIModelClient")
@ConditionalOnProperty(prefix = "driftkit.clients.spring-ai.model", name = "primary", havingValue = "true")
public ModelClient primarySpringAIModelClient(
ApplicationContext applicationContext,
ObjectProvider<ChatModel> chatModelProvider,
BeanFactory beanFactory) {
ChatModel chatModel = getChatModel(applicationContext, chatModelProvider);
log.info("Creating primary SpringAIModelClient with ChatModel: {}",
chatModel.getClass().getSimpleName());
// Validate configuration
validateConfiguration();
SpringAIModelClient client = new SpringAIModelClient(chatModel);
// Apply default configurations
applyDefaultConfigurations(client);
if (properties.isLoggingEnabled()) {
log.info("Created primary SpringAIModelClient with configuration: model={}, temperature={}, maxTokens={}, topP={}",
properties.getDefaultModel(),
properties.getDefaultTemperature(),
properties.getDefaultMaxTokens(),
properties.getDefaultTopP());
}
return client;
}
/**
* Gets the ChatModel to use, either by bean name or the primary/unique bean.
*
* @param applicationContext Spring application context for bean lookup
* @param chatModelProvider Provider for ChatModel beans
* @return ChatModel instance
* @throws IllegalStateException if no suitable ChatModel is found
*/
private ChatModel getChatModel(ApplicationContext applicationContext, ObjectProvider<ChatModel> chatModelProvider) {
String beanName = properties.getModel().getBeanName();
if (StringUtils.hasText(beanName)) {
// Get specific named bean
try {
ChatModel namedModel = applicationContext.getBean(beanName, ChatModel.class);
log.debug("Using named ChatModel bean: {} of type: {}", beanName, namedModel.getClass().getName());
return namedModel;
} catch (NoSuchBeanDefinitionException e) {
// List available beans for better error message
Map<String, ChatModel> availableBeans = applicationContext.getBeansOfType(ChatModel.class);
String availableNames = String.join(", ", availableBeans.keySet());
throw new IllegalStateException(
String.format("No ChatModel bean found with name '%s'. Available beans: %s",
beanName, availableNames), e);
}
} else {
// Get primary or unique bean
ChatModel chatModel = chatModelProvider.getIfUnique();
if (chatModel == null) {
// Try to get any available bean
chatModel = chatModelProvider.getIfAvailable();
if (chatModel == null) {
throw new IllegalStateException(
"No ChatModel bean available. Please configure a Spring AI ChatModel bean.");
}
// If multiple beans exist, warn about ambiguity
Map<String, ChatModel> availableBeans = applicationContext.getBeansOfType(ChatModel.class);
if (availableBeans.size() > 1) {
log.warn("Multiple ChatModel beans found: {}. Using: {}. " +
"Consider specifying 'driftkit.clients.spring-ai.model.bean-name' property.",
availableBeans.keySet(), chatModel.getClass().getSimpleName());
}
}
log.debug("Using ChatModel bean of type: {}", chatModel.getClass().getName());
return chatModel;
}
}
/**
* Validates the configuration properties.
*
* @throws IllegalArgumentException if configuration is invalid
*/
private void validateConfiguration() {
// Validate temperature range
if (properties.getDefaultTemperature() != null) {
double temp = properties.getDefaultTemperature();
if (temp < 0.0 || temp > 2.0) {
throw new IllegalArgumentException(
String.format("Invalid temperature value: %f. Must be between 0.0 and 2.0", temp));
}
}
// Validate top-p range
if (properties.getDefaultTopP() != null) {
double topP = properties.getDefaultTopP();
if (topP < 0.0 || topP > 1.0) {
throw new IllegalArgumentException(
String.format("Invalid top-p value: %f. Must be between 0.0 and 1.0", topP));
}
}
// Validate max tokens
if (properties.getDefaultMaxTokens() != null) {
int maxTokens = properties.getDefaultMaxTokens();
if (maxTokens <= 0) {
throw new IllegalArgumentException(
String.format("Invalid max tokens value: %d. Must be positive", maxTokens));
}
}
}
/**
* Applies default configurations to the builder.
*
* @param builder SpringAIModelClient builder
*/
private void applyDefaultConfigurations(SpringAIModelClient client) {
if (properties.getDefaultModel() != null) {
client.withModel(properties.getDefaultModel());
log.debug("Applied default model: {}", properties.getDefaultModel());
}
if (properties.getDefaultTemperature() != null) {
client.withTemperature(properties.getDefaultTemperature());
log.debug("Applied default temperature: {}", properties.getDefaultTemperature());
}
if (properties.getDefaultMaxTokens() != null) {
client.withMaxTokens(properties.getDefaultMaxTokens());
log.debug("Applied default max tokens: {}", properties.getDefaultMaxTokens());
}
if (properties.getDefaultTopP() != null) {
client.withTopP(properties.getDefaultTopP());
log.debug("Applied default top-p: {}", properties.getDefaultTopP());
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.