index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/core/ValidationHelper.java
|
package ai.driftkit.workflow.test.core;
import org.apache.commons.lang3.StringUtils;
import java.util.Collection;
import java.util.Map;
import java.util.Objects;
/**
* Centralized validation helper for the test framework.
* Provides consistent validation and error messages.
*/
public class ValidationHelper {
/**
* Validates that an object is not null.
*
* @param obj the object to validate
* @param parameterName the parameter name for error message
* @throws IllegalArgumentException if obj is null
*/
public void requireNonNull(Object obj, String parameterName) {
if (obj == null) {
throw new IllegalArgumentException(parameterName + " cannot be null");
}
}
/**
* Validates that a string is not null or blank.
*
* @param str the string to validate
* @param parameterName the parameter name for error message
* @throws IllegalArgumentException if str is null or blank
*/
public void requireNonBlank(String str, String parameterName) {
if (StringUtils.isBlank(str)) {
throw new IllegalArgumentException(parameterName + " cannot be null or blank");
}
}
/**
* Validates that a collection is not null or empty.
*
* @param collection the collection to validate
* @param parameterName the parameter name for error message
* @throws IllegalArgumentException if collection is null or empty
*/
public void requireNonEmpty(Collection<?> collection, String parameterName) {
if (collection == null || collection.isEmpty()) {
throw new IllegalArgumentException(parameterName + " cannot be null or empty");
}
}
/**
* Validates that a map is not null or empty.
*
* @param map the map to validate
* @param parameterName the parameter name for error message
* @throws IllegalArgumentException if map is null or empty
*/
public void requireNonEmpty(Map<?, ?> map, String parameterName) {
if (map == null || map.isEmpty()) {
throw new IllegalArgumentException(parameterName + " cannot be null or empty");
}
}
/**
* Validates that an array is not null or empty.
*
* @param array the array to validate
* @param parameterName the parameter name for error message
* @throws IllegalArgumentException if array is null or empty
*/
public void requireNonEmpty(Object[] array, String parameterName) {
if (array == null || array.length == 0) {
throw new IllegalArgumentException(parameterName + " cannot be null or empty");
}
}
/**
* Validates that a number is positive.
*
* @param number the number to validate
* @param parameterName the parameter name for error message
* @throws IllegalArgumentException if number is not positive
*/
public void requirePositive(int number, String parameterName) {
if (number <= 0) {
throw new IllegalArgumentException(parameterName + " must be positive but was " + number);
}
}
/**
* Validates that a number is positive.
*
* @param number the number to validate
* @param parameterName the parameter name for error message
* @throws IllegalArgumentException if number is not positive
*/
public void requirePositive(long number, String parameterName) {
if (number <= 0) {
throw new IllegalArgumentException(parameterName + " must be positive but was " + number);
}
}
/**
* Validates that a number is non-negative.
*
* @param number the number to validate
* @param parameterName the parameter name for error message
* @throws IllegalArgumentException if number is negative
*/
public void requireNonNegative(int number, String parameterName) {
if (number < 0) {
throw new IllegalArgumentException(parameterName + " cannot be negative but was " + number);
}
}
/**
* Validates that a number is non-negative.
*
* @param number the number to validate
* @param parameterName the parameter name for error message
* @throws IllegalArgumentException if number is negative
*/
public void requireNonNegative(long number, String parameterName) {
if (number < 0) {
throw new IllegalArgumentException(parameterName + " cannot be negative but was " + number);
}
}
/**
* Validates that a condition is true.
*
* @param condition the condition to validate
* @param message the error message if condition is false
* @throws IllegalArgumentException if condition is false
*/
public void require(boolean condition, String message) {
if (!condition) {
throw new IllegalArgumentException(message);
}
}
/**
* Validates that an object is of a specific type.
*
* @param obj the object to validate
* @param expectedType the expected type
* @param parameterName the parameter name for error message
* @throws IllegalArgumentException if obj is not of expected type
*/
public void requireType(Object obj, Class<?> expectedType, String parameterName) {
requireNonNull(obj, parameterName);
requireNonNull(expectedType, "expectedType");
if (!expectedType.isInstance(obj)) {
throw new IllegalArgumentException(
parameterName + " must be of type " + expectedType.getName() +
" but was " + obj.getClass().getName()
);
}
}
/**
* Validates multiple non-null parameters at once.
*
* @param parameters pairs of objects and their names
* @throws IllegalArgumentException if any parameter is null
*/
public void requireNonNulls(Object... parameters) {
if (parameters.length % 2 != 0) {
throw new IllegalArgumentException("Parameters must be provided in pairs (object, name)");
}
for (int i = 0; i < parameters.length; i += 2) {
Object obj = parameters[i];
String name = (String) parameters[i + 1];
requireNonNull(obj, name);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/core/WorkflowAwaiter.java
|
package ai.driftkit.workflow.test.core;
import ai.driftkit.workflow.engine.persistence.WorkflowInstance;
import ai.driftkit.workflow.engine.persistence.WorkflowStateRepository;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.awaitility.Awaitility;
import org.awaitility.core.ConditionTimeoutException;
import java.time.Duration;
import java.util.Objects;
import java.util.concurrent.TimeoutException;
import java.util.function.Predicate;
/**
* Utility for waiting on workflow conditions without Thread.sleep() hacks.
* Uses Awaitility for proper polling and timeout handling.
*/
@Slf4j
@RequiredArgsConstructor
public class WorkflowAwaiter {
private final WorkflowStateRepository repository;
/**
* Default poll interval for checking conditions.
*/
private static final Duration DEFAULT_POLL_INTERVAL = Duration.ofMillis(50);
/**
* Waits for a workflow to reach a specific status.
*
* @param runId the workflow run ID
* @param expectedStatus the expected status
* @param timeout maximum time to wait
* @return the workflow instance when status is reached
* @throws TimeoutException if status is not reached within timeout
*/
public WorkflowInstance awaitStatus(String runId, WorkflowInstance.WorkflowStatus expectedStatus,
Duration timeout) throws TimeoutException {
Objects.requireNonNull(runId, "runId cannot be null");
Objects.requireNonNull(expectedStatus, "expectedStatus cannot be null");
Objects.requireNonNull(timeout, "timeout cannot be null");
log.debug("Waiting for workflow {} to reach status {}", runId, expectedStatus);
try {
return Awaitility.await("workflow status: " + expectedStatus)
.atMost(timeout)
.pollInterval(DEFAULT_POLL_INTERVAL)
.until(
() -> repository.load(runId).orElse(null),
instance -> instance != null && instance.getStatus() == expectedStatus
);
} catch (ConditionTimeoutException e) {
throw new TimeoutException(
"Workflow " + runId + " did not reach status " + expectedStatus + " within " + timeout
);
}
}
/**
* Waits for a workflow to complete (reach any terminal status).
*
* @param runId the workflow run ID
* @param timeout maximum time to wait
* @return the workflow instance when completed
* @throws TimeoutException if workflow doesn't complete within timeout
*/
public WorkflowInstance awaitCompletion(String runId, Duration timeout) throws TimeoutException {
Objects.requireNonNull(runId, "runId cannot be null");
Objects.requireNonNull(timeout, "timeout cannot be null");
log.debug("Waiting for workflow {} to complete", runId);
try {
return Awaitility.await("workflow completion")
.atMost(timeout)
.pollInterval(DEFAULT_POLL_INTERVAL)
.until(
() -> repository.load(runId).orElse(null),
instance -> instance != null && isTerminal(instance.getStatus())
);
} catch (ConditionTimeoutException e) {
throw new TimeoutException(
"Workflow " + runId + " did not complete within " + timeout
);
}
}
/**
* Waits for a workflow to exist in the repository.
*
* @param runId the workflow run ID
* @param timeout maximum time to wait
* @return the workflow instance when found
* @throws TimeoutException if workflow is not found within timeout
*/
public WorkflowInstance awaitExistence(String runId, Duration timeout) throws TimeoutException {
Objects.requireNonNull(runId, "runId cannot be null");
Objects.requireNonNull(timeout, "timeout cannot be null");
log.debug("Waiting for workflow {} to exist", runId);
try {
return Awaitility.await("workflow existence")
.atMost(timeout)
.pollInterval(DEFAULT_POLL_INTERVAL)
.until(
() -> repository.load(runId).orElse(null),
Objects::nonNull
);
} catch (ConditionTimeoutException e) {
throw new TimeoutException(
"Workflow " + runId + " was not found within " + timeout
);
}
}
/**
* Waits for a specific condition on a workflow.
*
* @param runId the workflow run ID
* @param condition the condition to wait for
* @param description description of what we're waiting for
* @param timeout maximum time to wait
* @return the workflow instance when condition is met
* @throws TimeoutException if condition is not met within timeout
*/
public WorkflowInstance awaitCondition(String runId,
Predicate<WorkflowInstance> condition,
String description,
Duration timeout) throws TimeoutException {
Objects.requireNonNull(runId, "runId cannot be null");
Objects.requireNonNull(condition, "condition cannot be null");
Objects.requireNonNull(description, "description cannot be null");
Objects.requireNonNull(timeout, "timeout cannot be null");
log.debug("Waiting for workflow {} to meet condition: {}", runId, description);
try {
return Awaitility.await(description)
.atMost(timeout)
.pollInterval(DEFAULT_POLL_INTERVAL)
.until(
() -> repository.load(runId).orElse(null),
instance -> instance != null && condition.test(instance)
);
} catch (ConditionTimeoutException e) {
throw new TimeoutException(
"Workflow " + runId + " did not meet condition '" + description + "' within " + timeout
);
}
}
/**
* Checks if a workflow status is terminal.
*
* @param status the workflow status
* @return true if the status is terminal
*/
private boolean isTerminal(WorkflowInstance.WorkflowStatus status) {
return status == WorkflowInstance.WorkflowStatus.COMPLETED
|| status == WorkflowInstance.WorkflowStatus.FAILED
|| status == WorkflowInstance.WorkflowStatus.CANCELLED;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/core/WorkflowExecutionException.java
|
package ai.driftkit.workflow.test.core;
/**
* Exception thrown when workflow execution fails during testing.
*/
public class WorkflowExecutionException extends WorkflowTestException {
public WorkflowExecutionException(String message) {
super(message);
}
public WorkflowExecutionException(String message, Throwable cause) {
super(message, cause);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/core/WorkflowTestBase.java
|
package ai.driftkit.workflow.test.core;
import ai.driftkit.workflow.engine.core.WorkflowEngine;
import ai.driftkit.workflow.engine.core.StepOutput;
import ai.driftkit.workflow.engine.persistence.WorkflowInstance;
import ai.driftkit.workflow.engine.persistence.WorkflowStateRepository;
import ai.driftkit.workflow.test.assertions.AssertionEngine;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.AfterEach;
import java.time.Duration;
import java.util.Objects;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
/**
* Base class for all workflow tests providing common functionality.
* This class is agnostic to workflow definition style (annotations vs fluent API).
*/
@Slf4j
public abstract class WorkflowTestBase {
@Getter
protected WorkflowEngine engine;
@Getter
protected WorkflowTestContext testContext;
@Getter
protected WorkflowTestInterceptor testInterceptor;
@Getter
protected WorkflowTestOrchestrator orchestrator;
@Getter
protected AssertionEngine assertions;
/**
* Default timeout for workflow execution.
*/
protected static final Duration DEFAULT_TIMEOUT = Duration.ofSeconds(30);
@BeforeEach
void setupBase() {
log.debug("Setting up workflow test base");
// Create test context
this.testContext = new WorkflowTestContext();
// Create test interceptor and share MockRegistry and ExecutionTracker
this.testInterceptor = new WorkflowTestInterceptor(
testContext.getMockRegistry(),
testContext.getExecutionTracker()
);
// Create engine with interceptor
this.engine = createAndConfigureEngine();
// Create orchestrator
this.orchestrator = new WorkflowTestOrchestrator(
testContext.getMockRegistry(),
testContext.getExecutionTracker(),
testInterceptor,
engine
);
// Create assertion engine
this.assertions = new AssertionEngine(testContext.getExecutionTracker());
log.debug("Workflow test base setup complete");
}
@AfterEach
void tearDownBase() {
log.debug("Tearing down workflow test base");
// Clear test context
if (testContext != null) {
testContext.clear();
}
// Clear interceptor state
if (testInterceptor != null) {
testInterceptor.clear();
}
// Shutdown engine
if (engine != null) {
engine.shutdown();
}
log.debug("Workflow test base teardown complete");
}
/**
* Creates and configures the workflow engine.
* Subclasses can override this to provide custom engine configuration.
* Default implementation returns a new WorkflowEngine instance.
*
* @return configured workflow engine
*/
protected WorkflowEngine createEngine() {
return new WorkflowEngine();
}
/**
* Creates and configures the workflow engine with test interceptor.
*/
private WorkflowEngine createAndConfigureEngine() {
WorkflowEngine engine = createEngine();
engine.addInterceptor(testInterceptor);
return engine;
}
// Common test utilities
/**
* Executes a workflow synchronously and waits for completion.
*
* @param workflowId the workflow to execute
* @param input the input data
* @param <T> input type
* @param <R> result type
* @return the workflow result
* @throws WorkflowExecutionException if workflow execution fails
*/
protected <T, R> R executeWorkflow(String workflowId, T input) throws WorkflowExecutionException {
return executeWorkflow(workflowId, input, DEFAULT_TIMEOUT);
}
/**
* Executes a workflow synchronously and waits for completion.
*
* @param workflowId the workflow to execute
* @param input the input data
* @param timeout maximum time to wait
* @param <T> input type
* @param <R> result type
* @return the workflow result
* @throws WorkflowExecutionException if workflow execution fails
*/
@SuppressWarnings("unchecked")
protected <T, R> R executeWorkflow(String workflowId, T input, Duration timeout) throws WorkflowExecutionException {
Objects.requireNonNull(workflowId, "workflowId cannot be null");
Objects.requireNonNull(timeout, "timeout cannot be null");
// Allow null input for workflows where initial step only uses WorkflowContext
var execution = engine.execute(workflowId, input);
try {
return (R) execution.get(timeout.toMillis(), TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new WorkflowExecutionException("Workflow execution interrupted", e);
} catch (TimeoutException e) {
throw new WorkflowExecutionException("Workflow " + workflowId + " did not complete within " + timeout, e);
} catch (Exception e) {
throw new WorkflowExecutionException("Workflow execution failed", e);
}
}
/**
* Executes a workflow asynchronously.
*
* @param workflowId the workflow to execute
* @param input the input data
* @param <T> input type
* @param <R> result type
* @return workflow execution handle
*/
protected <T, R> WorkflowEngine.WorkflowExecution<R> executeWorkflowAsync(String workflowId, T input) {
Objects.requireNonNull(workflowId, "workflowId cannot be null");
// Allow null input for workflows where initial step only uses WorkflowContext
return engine.execute(workflowId, input);
}
/**
* Executes a workflow with a specific chatId and expects it to suspend.
*
* @param workflowId the workflow to execute
* @param input the input data (can be null)
* @param chatId the chat ID to associate with the execution
* @param timeout maximum time to wait for suspend
* @param <T> input type
* @return the suspended workflow execution handle
* @throws WorkflowExecutionException if workflow doesn't suspend within timeout
*/
protected <T> WorkflowEngine.WorkflowExecution<?> executeAndExpectSuspendWithChat(
String workflowId, T input, String chatId, Duration timeout) throws WorkflowExecutionException {
Objects.requireNonNull(workflowId, "workflowId cannot be null");
Objects.requireNonNull(chatId, "chatId cannot be null");
Objects.requireNonNull(timeout, "timeout cannot be null");
// Allow null input for workflows where initial step only uses WorkflowContext
String instanceId = UUID.randomUUID().toString();
var execution = engine.execute(workflowId, input, instanceId, chatId);
String runId = execution.getRunId();
try {
// Wait for workflow to reach SUSPENDED status
waitForStatus(runId, WorkflowInstance.WorkflowStatus.SUSPENDED, timeout);
return execution;
} catch (WorkflowExecutionException e) {
// Check if workflow completed instead of suspending
if (execution.isDone()) {
try {
Object result = execution.get(1, TimeUnit.MILLISECONDS);
throw new WorkflowExecutionException("Expected workflow to suspend, but it completed with result: " + result, e);
} catch (Exception ex) {
throw new WorkflowExecutionException("Expected workflow to suspend, but it failed: " + ex.getMessage(), ex);
}
}
// Check current status for better error message
WorkflowInstance instance = getWorkflowInstance(runId);
if (instance != null) {
throw new WorkflowExecutionException("Expected workflow to suspend within " + timeout +
", but status is: " + instance.getStatus(), e);
} else {
throw new WorkflowExecutionException("Expected workflow to suspend within " + timeout +
", but workflow instance not found", e);
}
}
}
/**
* Executes a workflow that is expected to suspend.
* Waits for the workflow to reach SUSPENDED status.
*
* @param workflowId the workflow to execute
* @param input the input data
* @param <T> input type
* @return the suspended workflow execution handle
* @throws WorkflowExecutionException if workflow doesn't suspend within timeout
*/
protected <T> WorkflowEngine.WorkflowExecution<?> executeAndExpectSuspend(String workflowId, T input) throws WorkflowExecutionException {
return executeAndExpectSuspend(workflowId, input, DEFAULT_TIMEOUT);
}
/**
* Executes a workflow that is expected to suspend.
* Waits for the workflow to reach SUSPENDED status.
*
* @param workflowId the workflow to execute
* @param input the input data
* @param timeout maximum time to wait for suspend
* @param <T> input type
* @return the suspended workflow execution handle
* @throws WorkflowExecutionException if workflow doesn't suspend within timeout
*/
protected <T> WorkflowEngine.WorkflowExecution<?> executeAndExpectSuspend(String workflowId, T input, Duration timeout) throws WorkflowExecutionException {
Objects.requireNonNull(workflowId, "workflowId cannot be null");
Objects.requireNonNull(timeout, "timeout cannot be null");
// Allow null input for workflows where initial step only uses WorkflowContext
var execution = engine.execute(workflowId, input);
String runId = execution.getRunId();
try {
// Wait for workflow to reach SUSPENDED status
waitForStatus(runId, WorkflowInstance.WorkflowStatus.SUSPENDED, timeout);
return execution;
} catch (WorkflowExecutionException e) {
// Check if workflow completed instead of suspending
if (execution.isDone()) {
try {
Object result = execution.get(1, TimeUnit.MILLISECONDS);
throw new WorkflowExecutionException("Expected workflow to suspend, but it completed with result: " + result, e);
} catch (Exception ex) {
throw new WorkflowExecutionException("Expected workflow to suspend, but it failed: " + ex.getMessage(), ex);
}
}
// Check current status for better error message
WorkflowInstance instance = getWorkflowInstance(runId);
if (instance != null) {
throw new WorkflowExecutionException("Expected workflow to suspend within " + timeout +
", but status is: " + instance.getStatus(), e);
} else {
throw new WorkflowExecutionException("Expected workflow to suspend within " + timeout +
", but workflow instance not found", e);
}
}
}
/**
* Resumes a suspended workflow.
*
* @param runId the workflow run ID
* @param event the event to resume with
* @param <E> event type
* @param <R> result type
* @return the workflow result
* @throws WorkflowExecutionException if workflow execution fails
*/
protected <E, R> R resumeWorkflow(String runId, E event) throws WorkflowExecutionException {
return resumeWorkflow(runId, event, DEFAULT_TIMEOUT);
}
/**
* Resumes a suspended workflow.
*
* @param runId the workflow run ID
* @param event the event to resume with
* @param timeout maximum time to wait
* @param <E> event type
* @param <R> result type
* @return the workflow result
* @throws WorkflowExecutionException if workflow execution fails
*/
@SuppressWarnings("unchecked")
protected <E, R> R resumeWorkflow(String runId, E event, Duration timeout) throws WorkflowExecutionException {
Objects.requireNonNull(runId, "runId cannot be null");
Objects.requireNonNull(event, "event cannot be null");
Objects.requireNonNull(timeout, "timeout cannot be null");
var execution = engine.resume(runId, event);
// Wait for workflow to reach a terminal state (COMPLETED, FAILED, CANCELLED) or SUSPENDED
long deadline = System.currentTimeMillis() + timeout.toMillis();
while (System.currentTimeMillis() < deadline) {
WorkflowInstance instance = getWorkflowInstance(runId);
if (instance == null) {
throw new WorkflowExecutionException("Workflow instance not found: " + runId);
}
WorkflowInstance.WorkflowStatus status = instance.getStatus();
// If suspended, return the suspension data
if (status == WorkflowInstance.WorkflowStatus.SUSPENDED) {
var context = instance.getContext();
var lastStepId = context.getLastStepId();
if (lastStepId != null) {
var stepOutputs = context.getStepOutputs();
var stepOutput = stepOutputs.get(lastStepId);
if (stepOutput != null && stepOutput.hasValue()) {
return (R) stepOutput.getValue();
}
}
return null;
}
// If completed, get result from execution
if (status == WorkflowInstance.WorkflowStatus.COMPLETED) {
if (execution.isDone()) {
try {
return (R) execution.get();
} catch (Exception e) {
throw new WorkflowExecutionException("Failed to get workflow result", e);
}
}
}
// If failed or cancelled, throw exception
if (status == WorkflowInstance.WorkflowStatus.FAILED ||
status == WorkflowInstance.WorkflowStatus.CANCELLED) {
throw new WorkflowExecutionException("Workflow " + runId + " ended with status: " + status);
}
// Still running, wait a bit
try {
Thread.sleep(50);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new WorkflowExecutionException("Workflow resumption interrupted", e);
}
}
throw new WorkflowExecutionException("Workflow " + runId + " did not reach terminal state within " + timeout);
}
/**
* Gets workflow instance by run ID.
*
* @param runId the workflow run ID
* @return the workflow instance or null if not found
*/
protected WorkflowInstance getWorkflowInstance(String runId) {
Objects.requireNonNull(runId, "runId cannot be null");
return engine.getWorkflowInstance(runId).orElse(null);
}
/**
* Waits for a workflow to reach a specific status.
*
* @param runId the workflow run ID
* @param status the expected status
* @param timeout maximum time to wait
* @throws WorkflowExecutionException if status is not reached within timeout
*/
protected void waitForStatus(String runId, WorkflowInstance.WorkflowStatus status, Duration timeout)
throws WorkflowExecutionException {
Objects.requireNonNull(runId, "runId cannot be null");
Objects.requireNonNull(status, "status cannot be null");
Objects.requireNonNull(timeout, "timeout cannot be null");
long startTime = System.currentTimeMillis();
long timeoutMs = timeout.toMillis();
long sleepTime = 10; // Start with 10ms
while (System.currentTimeMillis() - startTime < timeoutMs) {
WorkflowInstance instance = getWorkflowInstance(runId);
if (instance != null && instance.getStatus() == status) {
return; // Success
}
// Exponential backoff up to 100ms
try {
Thread.sleep(Math.min(sleepTime, 100));
sleepTime = Math.min(sleepTime * 2, 100);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new WorkflowExecutionException("Interrupted while waiting for workflow status", e);
}
}
// Timeout reached
WorkflowInstance instance = getWorkflowInstance(runId);
WorkflowInstance.WorkflowStatus currentStatus = instance != null ? instance.getStatus() : null;
throw new WorkflowExecutionException(
"Workflow " + runId + " did not reach status " + status + " within " + timeout +
" (current status: " + currentStatus + ")"
);
}
/**
* Waits for a workflow to reach a specific status with default timeout.
*
* @param runId the workflow run ID
* @param status the expected status
* @throws WorkflowExecutionException if status is not reached within default timeout
*/
protected void waitForStatus(String runId, WorkflowInstance.WorkflowStatus status) throws WorkflowExecutionException {
waitForStatus(runId, status, DEFAULT_TIMEOUT);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/core/WorkflowTestContext.java
|
package ai.driftkit.workflow.test.core;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import java.util.concurrent.ConcurrentHashMap;
import java.util.Map;
import java.util.Objects;
/**
* Test context that holds shared state and configuration for workflow tests.
* Thread-safe implementation for use in concurrent test scenarios.
*/
@Slf4j
@Getter
public class WorkflowTestContext {
private final Map<String, Object> attributes = new ConcurrentHashMap<>();
private final MockRegistry mockRegistry = new MockRegistry();
private final ExecutionTracker executionTracker = new ExecutionTracker();
private WorkflowTestInterceptor testInterceptor;
/**
* Stores an attribute in the test context.
*
* @param key the attribute key
* @param value the attribute value
* @return this context for chaining
*/
public WorkflowTestContext setAttribute(String key, Object value) {
Objects.requireNonNull(key, "key cannot be null");
attributes.put(key, value);
return this;
}
/**
* Gets an attribute from the test context.
*
* @param key the attribute key
* @param <T> the expected type
* @return the attribute value or null if not found
*/
@SuppressWarnings("unchecked")
public <T> T getAttribute(String key) {
Objects.requireNonNull(key, "key cannot be null");
return (T) attributes.get(key);
}
/**
* Gets an attribute from the test context with a default value.
*
* @param key the attribute key
* @param defaultValue the default value if not found
* @param <T> the expected type
* @return the attribute value or default value
*/
@SuppressWarnings("unchecked")
public <T> T getAttribute(String key, T defaultValue) {
Objects.requireNonNull(key, "key cannot be null");
return (T) attributes.getOrDefault(key, defaultValue);
}
/**
* Removes an attribute from the test context.
*
* @param key the attribute key
* @return the removed value or null if not found
*/
@SuppressWarnings("unchecked")
public <T> T removeAttribute(String key) {
Objects.requireNonNull(key, "key cannot be null");
return (T) attributes.remove(key);
}
/**
* Checks if an attribute exists in the test context.
*
* @param key the attribute key
* @return true if the attribute exists
*/
public boolean hasAttribute(String key) {
Objects.requireNonNull(key, "key cannot be null");
return attributes.containsKey(key);
}
/**
* Clears all state in the test context.
*/
public void clear() {
log.debug("Clearing test context");
attributes.clear();
mockRegistry.clear();
executionTracker.clear();
}
/**
* Configures the test context using a fluent configuration lambda.
*
* @param configurer the configuration lambda
* @return this context for chaining
*/
public WorkflowTestContext configure(TestConfigurer configurer) {
Objects.requireNonNull(configurer, "configurer cannot be null");
configurer.configure(new TestConfiguration(this));
return this;
}
/**
* Interface for test configuration.
*/
@FunctionalInterface
public interface TestConfigurer {
void configure(TestConfiguration config);
}
/**
* Test configuration builder.
*/
public static class TestConfiguration {
private final WorkflowTestContext context;
TestConfiguration(WorkflowTestContext context) {
this.context = context;
}
/**
* Starts mock configuration.
*
* @return mock builder
*/
public MockBuilder mock() {
return new MockBuilder(context.mockRegistry);
}
/**
* Sets a context attribute.
*
* @param key the attribute key
* @param value the attribute value
* @return this configuration for chaining
*/
public TestConfiguration withAttribute(String key, Object value) {
context.setAttribute(key, value);
return this;
}
}
/**
* Gets the mock builder for fluent API.
*
* @return mock builder
*/
public MockBuilder getMockBuilder() {
return new MockBuilder(mockRegistry);
}
/**
* Sets the test interceptor.
*
* @param interceptor the interceptor to set
*/
public void setTestInterceptor(WorkflowTestInterceptor interceptor) {
this.testInterceptor = interceptor;
}
/**
* Resets the test context state.
* Alias for clear() for better API consistency.
*/
public void reset() {
clear();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/core/WorkflowTestException.java
|
package ai.driftkit.workflow.test.core;
/**
* Base exception for workflow test framework errors.
*/
public class WorkflowTestException extends RuntimeException {
/**
* Creates a new workflow test exception.
*
* @param message the error message
*/
public WorkflowTestException(String message) {
super(message);
}
/**
* Creates a new workflow test exception with a cause.
*
* @param message the error message
* @param cause the underlying cause
*/
public WorkflowTestException(String message, Throwable cause) {
super(message, cause);
}
/**
* Creates a new workflow test exception with a cause.
*
* @param cause the underlying cause
*/
public WorkflowTestException(Throwable cause) {
super(cause);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/core/WorkflowTestInterceptor.java
|
package ai.driftkit.workflow.test.core;
import ai.driftkit.workflow.engine.core.ExecutionInterceptor;
import ai.driftkit.workflow.engine.core.StepResult;
import ai.driftkit.workflow.engine.core.WorkflowContext;
import ai.driftkit.workflow.engine.graph.StepNode;
import ai.driftkit.workflow.engine.persistence.WorkflowInstance;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import java.util.Objects;
import java.util.Optional;
/**
* Test interceptor that integrates with the test framework.
* Provides mock execution and tracking capabilities.
*/
@Slf4j
public class WorkflowTestInterceptor implements ExecutionInterceptor {
@Getter
private final MockRegistry mockRegistry;
@Getter
private final ExecutionTracker executionTracker;
private final ThreadLocal<StepContext> currentStepContext = new ThreadLocal<>();
/**
* Creates an interceptor with the provided mock registry and execution tracker.
* This allows sharing of components between test context and interceptor.
*
* @param mockRegistry the mock registry to use
* @param executionTracker the execution tracker to use
*/
public WorkflowTestInterceptor(MockRegistry mockRegistry, ExecutionTracker executionTracker) {
this.mockRegistry = Objects.requireNonNull(mockRegistry, "mockRegistry cannot be null");
this.executionTracker = Objects.requireNonNull(executionTracker, "executionTracker cannot be null");
}
/**
* Creates an interceptor with the provided mock registry.
* This allows sharing of mocks between test context and interceptor.
*
* @param mockRegistry the mock registry to use
*/
public WorkflowTestInterceptor(MockRegistry mockRegistry) {
this(mockRegistry, new ExecutionTracker());
}
/**
* Creates an interceptor with a new mock registry.
* For backward compatibility.
*/
public WorkflowTestInterceptor() {
this(new MockRegistry(), new ExecutionTracker());
}
public void beforeWorkflowStart(WorkflowInstance instance, Object input) {
log.debug("Before workflow start: {} with input: {}", instance.getWorkflowId(), input);
// Set up InternalStepListener for branch/parallel testing
WorkflowContext context = instance.getContext();
if (context != null) {
log.debug("Setting up TestInternalStepListener for workflow: {}", instance.getWorkflowId());
TestInternalStepListener listener = new TestInternalStepListener(
instance, executionTracker, mockRegistry);
context.setInternalStepListener(listener);
log.debug("TestInternalStepListener installed successfully");
} else {
log.warn("WorkflowContext is null for instance: {}", instance.getInstanceId());
}
executionTracker.recordWorkflowStart(instance, input);
}
public void afterWorkflowComplete(WorkflowInstance instance, Object result) {
log.debug("After workflow complete: {} with result: {}", instance.getWorkflowId(), result);
executionTracker.recordWorkflowComplete(instance, result);
}
public void onWorkflowError(WorkflowInstance instance, Throwable error) {
log.debug("On workflow error: {} with error: {}", instance.getWorkflowId(), error.getMessage());
executionTracker.recordWorkflowError(instance, error);
}
@Override
public void beforeStep(WorkflowInstance instance, StepNode step, Object input) {
log.debug("Before step execution: {}.{} with input: {}",
instance.getWorkflowId(), step.id(), input);
// Set up InternalStepListener on first step if not already set
WorkflowContext workflowContext = instance.getContext();
if (workflowContext != null && workflowContext.getInternalStepListener() == null) {
log.debug("Setting up TestInternalStepListener on first step for workflow: {}", instance.getWorkflowId());
TestInternalStepListener listener = new TestInternalStepListener(
instance, executionTracker, mockRegistry);
workflowContext.setInternalStepListener(listener);
log.debug("TestInternalStepListener installed successfully");
}
StepContext context = new StepContext(instance, step, input);
currentStepContext.set(context);
executionTracker.recordStepStart(context);
}
@Override
public void afterStep(WorkflowInstance instance, StepNode step, StepResult<?> result) {
log.debug("After step execution: {}.{} with result: {}",
instance.getWorkflowId(), step.id(), result);
try {
StepContext context = currentStepContext.get();
if (context != null) {
executionTracker.recordStepComplete(context, result);
}
} finally {
// Always remove ThreadLocal even if exception occurs
currentStepContext.remove();
}
}
@Override
public void onStepError(WorkflowInstance instance, StepNode step, Exception error) {
log.debug("On step error: {}.{} with error: {}",
instance.getWorkflowId(), step.id(), error.getMessage());
try {
StepContext context = currentStepContext.get();
if (context != null) {
executionTracker.recordStepError(context, error);
}
} finally {
// Always remove ThreadLocal even if exception occurs
currentStepContext.remove();
}
}
@Override
public Optional<StepResult<?>> interceptExecution(WorkflowInstance instance, StepNode step, Object input) {
String workflowId = instance.getWorkflowId();
String stepId = step.id();
log.debug("Intercepting execution: {}.{} with input: {}", workflowId, stepId, input);
// Create step context
StepContext context = new StepContext(instance, step, input);
// Try to find a mock
Optional<MockDefinition<?>> mockOpt = mockRegistry.findMock(context);
if (mockOpt.isPresent()) {
MockDefinition<?> mock = mockOpt.get();
log.debug("Found mock for {}.{}", workflowId, stepId);
try {
StepResult<?> result = mock.execute(input, context);
log.debug("Mock execution successful for {}.{} returned: {}", workflowId, stepId, result);
// If the mock returns StepResult.fail(), we should NOT return it as a result.
// Instead, we should throw an exception so the RetryExecutor can handle retries properly.
// The interceptor should only return successful results.
if (result instanceof StepResult.Fail) {
StepResult.Fail<?> failResult = (StepResult.Fail<?>) result;
Throwable error = failResult.error();
log.debug("Mock returned StepResult.fail(), throwing exception: {}", error.getMessage());
if (error instanceof RuntimeException) {
throw (RuntimeException) error;
} else {
throw new RuntimeException("Mock returned fail result", error);
}
}
return Optional.of(result);
} catch (RuntimeException e) {
// For retry testing, we need to let the original exception through
log.debug("Mock execution threw exception for {}.{}", workflowId, stepId, e);
throw e;
} catch (Exception e) {
log.error("Mock execution failed for {}.{}", workflowId, stepId, e);
throw new WorkflowTestException(
"Mock execution failed for " + workflowId + "." + stepId, e
);
}
}
log.debug("No mock found for {}.{}, using real implementation", workflowId, stepId);
return Optional.empty();
}
/**
* Clears all mocks and execution history.
*/
public void clear() {
log.debug("Clearing test interceptor state");
mockRegistry.clear();
executionTracker.clear();
currentStepContext.remove();
}
/**
* Gets the current step context (for internal use).
*
* @return current step context or null
*/
StepContext getCurrentStepContext() {
return currentStepContext.get();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/core/WorkflowTestOrchestrator.java
|
package ai.driftkit.workflow.test.core;
import ai.driftkit.workflow.test.assertions.AssertionEngine;
import ai.driftkit.workflow.engine.core.WorkflowEngine;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import java.util.Objects;
/**
* Coordinates all test framework components.
* Provides a unified interface for workflow testing.
*/
@Slf4j
@Getter
public class WorkflowTestOrchestrator {
private final MockRegistry mockRegistry;
private final ExecutionTracker executionTracker;
private final AssertionEngine assertionEngine;
private final WorkflowTestInterceptor interceptor;
private final WorkflowEngine engine;
/**
* Creates an orchestrator with existing components.
*/
public WorkflowTestOrchestrator(MockRegistry mockRegistry,
ExecutionTracker executionTracker,
WorkflowTestInterceptor interceptor,
WorkflowEngine engine) {
this.mockRegistry = Objects.requireNonNull(mockRegistry, "mockRegistry cannot be null");
this.executionTracker = Objects.requireNonNull(executionTracker, "executionTracker cannot be null");
this.interceptor = Objects.requireNonNull(interceptor, "interceptor cannot be null");
this.engine = Objects.requireNonNull(engine, "engine cannot be null");
// Create assertion engine
this.assertionEngine = new AssertionEngine(executionTracker);
log.debug("WorkflowTestOrchestrator initialized");
}
/**
* Gets a mock builder for configuration.
*
* @return mock builder
*/
public MockBuilder mock() {
return new MockBuilder(mockRegistry);
}
/**
* Gets the assertion engine for verifications.
*
* @return assertion engine
*/
public AssertionEngine assertions() {
return assertionEngine;
}
/**
* Resets all test state.
*/
public void reset() {
log.debug("Resetting test orchestrator");
mockRegistry.clear();
executionTracker.clear();
}
/**
* Prepares for a new test scenario.
* Clears previous state but keeps configuration.
*/
public void prepare() {
log.debug("Preparing for new test scenario");
executionTracker.clear();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/junit/InjectMocks.java
|
package ai.driftkit.workflow.test.junit;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Marks a field for automatic mock injection.
* All @Mock annotated fields in the test class will be injected
* into matching fields in the target object.
*/
@Target(ElementType.FIELD)
@Retention(RetentionPolicy.RUNTIME)
public @interface InjectMocks {
/**
* Whether to inject only explicitly matched fields.
*/
boolean strictMatching() default false;
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/junit/Mock.java
|
package ai.driftkit.workflow.test.junit;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Marks a field for mock injection.
* This annotation is used as a compatibility layer with Mockito's @Mock.
* When Mockito is not available, this provides similar functionality.
*/
@Target(ElementType.FIELD)
@Retention(RetentionPolicy.RUNTIME)
public @interface Mock {
/**
* Optional name for the mock.
*/
String name() default "";
/**
* Extra interfaces to mock.
*/
Class<?>[] extraInterfaces() default {};
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/junit/WorkflowMock.java
|
package ai.driftkit.workflow.test.junit;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Marks a field as a workflow step mock.
* The field must be a Mockito mock that will be registered
* to handle specific workflow step execution.
*/
@Target(ElementType.FIELD)
@Retention(RetentionPolicy.RUNTIME)
public @interface WorkflowMock {
/**
* The workflow ID this mock applies to.
*/
String workflow();
/**
* The step ID this mock applies to.
*/
String step();
/**
* Optional description for documentation.
*/
String description() default "";
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/junit/WorkflowTestExtension.java
|
package ai.driftkit.workflow.test.junit;
import ai.driftkit.workflow.test.core.WorkflowTestBase;
import ai.driftkit.workflow.test.core.WorkflowTestContext;
import ai.driftkit.workflow.test.core.WorkflowTestOrchestrator;
import ai.driftkit.workflow.test.core.ExecutionTracker;
import ai.driftkit.workflow.test.core.MockRegistry;
import ai.driftkit.workflow.test.core.WorkflowTestInterceptor;
import ai.driftkit.workflow.engine.core.WorkflowEngine;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.extension.*;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.internal.util.MockUtil;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
/**
* JUnit 5 extension for workflow testing.
* Provides automatic setup and injection of test components.
*/
@Slf4j
public class WorkflowTestExtension implements BeforeEachCallback, AfterEachCallback, ParameterResolver, TestInstancePostProcessor {
private static final ExtensionContext.Namespace NAMESPACE = ExtensionContext.Namespace.create(WorkflowTestExtension.class);
private static final String TEST_CONTEXT_KEY = "workflow.test.context";
private static final String ORCHESTRATOR_KEY = "workflow.test.orchestrator";
private static final String ENGINE_KEY = "workflow.test.engine";
private static final String MOCKITO_SESSION_KEY = "mockito.session";
@Override
public void postProcessTestInstance(Object testInstance, ExtensionContext context) throws Exception {
Objects.requireNonNull(testInstance, "testInstance cannot be null");
Objects.requireNonNull(context, "context cannot be null");
// Initialize Mockito annotations
AutoCloseable mockitoSession = MockitoAnnotations.openMocks(testInstance);
context.getStore(NAMESPACE).put(MOCKITO_SESSION_KEY, mockitoSession);
log.debug("Initialized Mockito for test instance: {}", testInstance.getClass().getName());
}
@Override
public void beforeEach(ExtensionContext context) throws Exception {
Objects.requireNonNull(context, "context cannot be null");
Object testInstance = context.getRequiredTestInstance();
ExtensionContext.Store store = context.getStore(NAMESPACE);
try {
// Create test components
WorkflowTestContext testContext = createTestContext();
WorkflowEngine engine = createWorkflowEngine();
WorkflowTestOrchestrator orchestrator = createOrchestrator(testContext, engine);
// Store components
store.put(TEST_CONTEXT_KEY, testContext);
store.put(ENGINE_KEY, engine);
store.put(ORCHESTRATOR_KEY, orchestrator);
// Inject into test instance if it extends WorkflowTestBase
if (testInstance instanceof WorkflowTestBase base) {
injectTestBase(base, testContext, engine);
}
// Process workflow mock annotations
processWorkflowMockAnnotations(testInstance, testContext);
// Process workflow under test annotations
processWorkflowUnderTestAnnotations(testInstance, engine);
log.info("Workflow test setup completed for: {}", context.getDisplayName());
} catch (Exception e) {
log.error("Failed to setup workflow test", e);
throw new ExtensionConfigurationException("Failed to setup workflow test", e);
}
}
@Override
public void afterEach(ExtensionContext context) throws Exception {
Objects.requireNonNull(context, "context cannot be null");
ExtensionContext.Store store = context.getStore(NAMESPACE);
try {
// Cleanup test context
WorkflowTestContext testContext = store.get(TEST_CONTEXT_KEY, WorkflowTestContext.class);
if (testContext != null) {
testContext.reset();
log.debug("Test context reset completed");
}
// Cleanup Mockito session
AutoCloseable mockitoSession = store.get(MOCKITO_SESSION_KEY, AutoCloseable.class);
if (mockitoSession != null) {
mockitoSession.close();
log.debug("Mockito session closed");
}
// Clear store
store.remove(TEST_CONTEXT_KEY);
store.remove(ENGINE_KEY);
store.remove(ORCHESTRATOR_KEY);
store.remove(MOCKITO_SESSION_KEY);
} catch (Exception e) {
log.error("Failed to cleanup workflow test", e);
throw new RuntimeException("Failed to cleanup workflow test", e);
}
}
@Override
public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) {
Objects.requireNonNull(parameterContext, "parameterContext cannot be null");
Objects.requireNonNull(extensionContext, "extensionContext cannot be null");
Class<?> type = parameterContext.getParameter().getType();
return type == WorkflowTestContext.class ||
type == WorkflowTestOrchestrator.class ||
type == WorkflowEngine.class;
}
@Override
public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext) {
Objects.requireNonNull(parameterContext, "parameterContext cannot be null");
Objects.requireNonNull(extensionContext, "extensionContext cannot be null");
Class<?> type = parameterContext.getParameter().getType();
ExtensionContext.Store store = extensionContext.getStore(NAMESPACE);
if (type == WorkflowTestContext.class) {
return Objects.requireNonNull(
store.get(TEST_CONTEXT_KEY, WorkflowTestContext.class),
"WorkflowTestContext not initialized"
);
}
if (type == WorkflowTestOrchestrator.class) {
return Objects.requireNonNull(
store.get(ORCHESTRATOR_KEY, WorkflowTestOrchestrator.class),
"WorkflowTestOrchestrator not initialized"
);
}
if (type == WorkflowEngine.class) {
return Objects.requireNonNull(
store.get(ENGINE_KEY, WorkflowEngine.class),
"WorkflowEngine not initialized"
);
}
throw new ParameterResolutionException("Unsupported parameter type: " + type);
}
private WorkflowTestContext createTestContext() {
return new WorkflowTestContext();
}
private WorkflowEngine createWorkflowEngine() {
WorkflowEngine engine = new WorkflowEngine();
return engine;
}
private WorkflowTestOrchestrator createOrchestrator(WorkflowTestContext testContext, WorkflowEngine engine) {
// Create and set the test interceptor
WorkflowTestInterceptor interceptor = new WorkflowTestInterceptor(
testContext.getMockRegistry(),
testContext.getExecutionTracker()
);
testContext.setTestInterceptor(interceptor);
return new WorkflowTestOrchestrator(
testContext.getMockRegistry(),
testContext.getExecutionTracker(),
interceptor,
engine
);
}
private void injectTestBase(WorkflowTestBase base, WorkflowTestContext testContext, WorkflowEngine engine) {
Objects.requireNonNull(base, "base cannot be null");
Objects.requireNonNull(testContext, "testContext cannot be null");
Objects.requireNonNull(engine, "engine cannot be null");
try {
// Use reflection to call protected setup method
Method setupMethod = WorkflowTestBase.class.getDeclaredMethod("setup", WorkflowTestContext.class, WorkflowEngine.class);
setupMethod.setAccessible(true);
setupMethod.invoke(base, testContext, engine);
log.debug("Successfully injected test base for: {}", base.getClass().getName());
} catch (NoSuchMethodException e) {
throw new ExtensionConfigurationException("WorkflowTestBase.setup method not found", e);
} catch (Exception e) {
throw new ExtensionConfigurationException("Failed to inject test base", e);
}
}
private void processWorkflowMockAnnotations(Object testInstance, WorkflowTestContext testContext) {
Objects.requireNonNull(testInstance, "testInstance cannot be null");
Objects.requireNonNull(testContext, "testContext cannot be null");
Class<?> testClass = testInstance.getClass();
List<Field> mockFields = findAnnotatedFields(testClass, WorkflowMock.class);
for (Field field : mockFields) {
WorkflowMock mockAnnotation = field.getAnnotation(WorkflowMock.class);
try {
field.setAccessible(true);
Object fieldValue = field.get(testInstance);
// Verify it's a Mockito mock
if (fieldValue == null || !MockUtil.isMock(fieldValue)) {
throw new ExtensionConfigurationException(
"Field annotated with @WorkflowMock must be a Mockito mock: " + field.getName()
);
}
// Register workflow mock
String workflowId = mockAnnotation.workflow();
String stepId = mockAnnotation.step();
if (workflowId.isEmpty() || stepId.isEmpty()) {
throw new ExtensionConfigurationException(
"@WorkflowMock must specify both workflow and step: " + field.getName()
);
}
// Create mock configuration
testContext.getMockBuilder()
.workflow(workflowId)
.step(stepId)
.mockWith(fieldValue)
.register();
log.debug("Registered workflow mock: {}.{} -> {}", workflowId, stepId, field.getName());
} catch (IllegalAccessException e) {
throw new ExtensionConfigurationException(
"Failed to access @WorkflowMock field: " + field.getName(), e
);
}
}
}
private void processWorkflowUnderTestAnnotations(Object testInstance, WorkflowEngine engine) {
Objects.requireNonNull(testInstance, "testInstance cannot be null");
Objects.requireNonNull(engine, "engine cannot be null");
Class<?> testClass = testInstance.getClass();
List<Field> workflowFields = findAnnotatedFields(testClass, WorkflowUnderTest.class);
if (workflowFields.size() > 1) {
throw new ExtensionConfigurationException(
"Only one field can be annotated with @WorkflowUnderTest"
);
}
for (Field field : workflowFields) {
try {
field.setAccessible(true);
Object workflow = field.get(testInstance);
if (workflow == null) {
// Create instance if null
workflow = createWorkflowInstance(field.getType());
field.set(testInstance, workflow);
}
// Register with engine
engine.register(workflow);
// Inject mocks into workflow
injectMocksIntoWorkflow(workflow, testInstance);
log.debug("Registered workflow under test: {}", workflow.getClass().getName());
} catch (Exception e) {
throw new ExtensionConfigurationException(
"Failed to process @WorkflowUnderTest field: " + field.getName(), e
);
}
}
}
private Object createWorkflowInstance(Class<?> workflowClass) {
try {
return workflowClass.getDeclaredConstructor().newInstance();
} catch (NoSuchMethodException e) {
throw new ExtensionConfigurationException(
"Workflow class must have a no-args constructor: " + workflowClass.getName(), e
);
} catch (Exception e) {
throw new ExtensionConfigurationException(
"Failed to create workflow instance: " + workflowClass.getName(), e
);
}
}
private void injectMocksIntoWorkflow(Object workflow, Object testInstance) {
Objects.requireNonNull(workflow, "workflow cannot be null");
Objects.requireNonNull(testInstance, "testInstance cannot be null");
Class<?> workflowClass = workflow.getClass();
Class<?> testClass = testInstance.getClass();
// Find all mocks in test instance
for (Field testField : testClass.getDeclaredFields()) {
if (testField.isAnnotationPresent(Mock.class)) {
try {
testField.setAccessible(true);
Object mockValue = testField.get(testInstance);
if (mockValue != null) {
// Try to inject into workflow
injectMockIntoWorkflowField(workflow, workflowClass, testField.getType(), mockValue);
}
} catch (IllegalAccessException e) {
log.warn("Failed to access mock field: {}", testField.getName(), e);
}
}
}
}
private void injectMockIntoWorkflowField(Object workflow, Class<?> workflowClass, Class<?> mockType, Object mockValue) {
// Look for matching field in workflow
for (Field workflowField : workflowClass.getDeclaredFields()) {
if (workflowField.getType().isAssignableFrom(mockType)) {
try {
workflowField.setAccessible(true);
workflowField.set(workflow, mockValue);
log.debug("Injected mock into workflow field: {}", workflowField.getName());
return;
} catch (IllegalAccessException e) {
log.warn("Failed to inject mock into workflow field: {}", workflowField.getName(), e);
}
}
}
}
private List<Field> findAnnotatedFields(Class<?> clazz, Class<? extends java.lang.annotation.Annotation> annotationClass) {
List<Field> fields = new ArrayList<>();
Class<?> current = clazz;
while (current != null && current != Object.class) {
for (Field field : current.getDeclaredFields()) {
if (field.isAnnotationPresent(annotationClass)) {
fields.add(field);
}
}
current = current.getSuperclass();
}
return fields;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/junit/WorkflowUnderTest.java
|
package ai.driftkit.workflow.test.junit;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Marks a field as the workflow under test.
* The extension will automatically register this workflow with the test engine
* and inject any mocks declared in the test class.
*/
@Target(ElementType.FIELD)
@Retention(RetentionPolicy.RUNTIME)
public @interface WorkflowUnderTest {
/**
* Optional workflow ID. If not specified, will be derived from the class name.
*/
String id() default "";
/**
* Whether to automatically inject @Mock annotated fields from test class.
*/
boolean autoInjectMocks() default true;
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/mock/AsyncMockDefinition.java
|
package ai.driftkit.workflow.test.mock;
import ai.driftkit.workflow.engine.core.StepResult;
import ai.driftkit.workflow.test.core.MockDefinition;
import ai.driftkit.workflow.test.core.StepContext;
import lombok.extern.slf4j.Slf4j;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.concurrent.ForkJoinPool;
import java.util.function.Function;
/**
* Mock definition that returns async results.
* Supports delayed responses and async workflows.
*/
@Slf4j
public class AsyncMockDefinition<I> extends MockDefinition<I> {
private final Function<I, CompletableFuture<StepResult<?>>> asyncFunction;
private final Executor executor;
private final long delayMs;
/**
* Creates an async mock with custom executor.
*/
public AsyncMockDefinition(String workflowId,
String stepId,
Class<I> inputType,
Function<I, CompletableFuture<StepResult<?>>> asyncFunction,
Executor executor,
long delayMs) {
super(workflowId, stepId, inputType, (input, context) -> {
throw new IllegalStateException("AsyncMockDefinition.execute should handle execution");
});
this.asyncFunction = asyncFunction;
this.executor = executor != null ? executor : ForkJoinPool.commonPool();
this.delayMs = delayMs;
}
/**
* Creates an async mock with default executor.
*/
public static <I> AsyncMockDefinition<I> async(String workflowId,
String stepId,
Class<I> inputType,
Function<I, CompletableFuture<StepResult<?>>> asyncFunction) {
return new AsyncMockDefinition<>(workflowId, stepId, inputType, asyncFunction, null, 0);
}
/**
* Creates an async mock with delay.
*/
public static <I> AsyncMockDefinition<I> asyncWithDelay(String workflowId,
String stepId,
Class<I> inputType,
Function<I, StepResult<?>> function,
long delayMs) {
Function<I, CompletableFuture<StepResult<?>>> asyncFunction = input ->
CompletableFuture.supplyAsync(() -> {
try {
Thread.sleep(delayMs);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException("Async delay interrupted", e);
}
return function.apply(input);
});
return new AsyncMockDefinition<>(workflowId, stepId, inputType, asyncFunction, null, delayMs);
}
@Override
public StepResult<?> execute(Object input, StepContext context) {
log.debug("Executing async mock for {}.{} with delay {}ms",
workflowId, stepId, delayMs);
I typedInput = inputType.cast(input);
// For async mocks, we need to block and get the result
// In a real async scenario, this would return StepResult.async()
// But for testing, we simulate async behavior and return the result
try {
CompletableFuture<StepResult<?>> future = asyncFunction.apply(typedInput);
return future.get(); // Block for testing purposes
} catch (Exception e) {
return StepResult.fail(e);
}
}
/**
* Creates a builder for async mocks.
*/
public static <I> AsyncMockBuilder<I> builder(String workflowId,
String stepId,
Class<I> inputType) {
return new AsyncMockBuilder<>(workflowId, stepId, inputType);
}
/**
* Builder for async mocks.
*/
public static class AsyncMockBuilder<I> {
private final String workflowId;
private final String stepId;
private final Class<I> inputType;
private Executor executor;
private long delayMs = 0;
AsyncMockBuilder(String workflowId, String stepId, Class<I> inputType) {
this.workflowId = workflowId;
this.stepId = stepId;
this.inputType = inputType;
}
public AsyncMockBuilder<I> withExecutor(Executor executor) {
this.executor = executor;
return this;
}
public AsyncMockBuilder<I> withDelay(long delayMs) {
this.delayMs = delayMs;
return this;
}
public AsyncMockDefinition<I> thenReturn(Function<I, StepResult<?>> function) {
Function<I, CompletableFuture<StepResult<?>>> asyncFunction;
if (delayMs > 0) {
asyncFunction = input ->
CompletableFuture.supplyAsync(() -> {
try {
Thread.sleep(delayMs);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException("Async delay interrupted", e);
}
return function.apply(input);
}, executor != null ? executor : ForkJoinPool.commonPool());
} else {
asyncFunction = input ->
CompletableFuture.completedFuture(function.apply(input));
}
return new AsyncMockDefinition<>(workflowId, stepId, inputType,
asyncFunction, executor, delayMs);
}
public AsyncMockDefinition<I> thenReturnAsync(Function<I, CompletableFuture<StepResult<?>>> asyncFunction) {
return new AsyncMockDefinition<>(workflowId, stepId, inputType,
asyncFunction, executor, delayMs);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/mock/CompositeMockDefinition.java
|
package ai.driftkit.workflow.test.mock;
import ai.driftkit.workflow.engine.core.StepResult;
import ai.driftkit.workflow.test.core.MockDefinition;
import ai.driftkit.workflow.test.core.StepContext;
import lombok.extern.slf4j.Slf4j;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
/**
* Composite mock that delegates to multiple mocks based on conditions.
* Enables complex mocking scenarios.
*/
@Slf4j
public class CompositeMockDefinition extends MockDefinition<Object> {
private final List<ConditionalMock> mocks = new ArrayList<>();
private MockDefinition<?> defaultMock;
public CompositeMockDefinition(String workflowId, String stepId) {
super(workflowId, stepId, Object.class, (input, context) -> {
throw new IllegalStateException("CompositeMockDefinition.execute should handle execution");
});
}
/**
* Adds a conditional mock.
*
* @param condition the condition to check
* @param mock the mock to use when condition is true
* @param <I> input type
* @return this for chaining
*/
public <I> CompositeMockDefinition when(Class<I> inputType,
java.util.function.Predicate<I> condition,
MockDefinition<?> mock) {
Objects.requireNonNull(condition, "condition cannot be null");
Objects.requireNonNull(mock, "mock cannot be null");
mocks.add(new ConditionalMock(inputType, condition, mock));
return this;
}
/**
* Sets the default mock when no conditions match.
*
* @param mock the default mock
* @return this for chaining
*/
public CompositeMockDefinition otherwise(MockDefinition<?> mock) {
this.defaultMock = Objects.requireNonNull(mock, "mock cannot be null");
return this;
}
@Override
public StepResult<?> execute(Object input, StepContext context) {
log.debug("Executing composite mock for {}.{}", workflowId, stepId);
// Try each conditional mock
for (ConditionalMock conditional : mocks) {
if (conditional.matches(input)) {
log.debug("Condition matched, delegating to mock");
return conditional.mock.execute(input, context);
}
}
// Use default if no condition matched
if (defaultMock != null) {
log.debug("No condition matched, using default mock");
return defaultMock.execute(input, context);
}
// No mock matched
throw new IllegalStateException(
"No mock matched for input in composite mock: " + workflowId + "." + stepId
);
}
/**
* Creates a builder for composite mocks.
*/
public static Builder builder(String workflowId, String stepId) {
return new Builder(workflowId, stepId);
}
/**
* Builder for composite mocks.
*/
public static class Builder {
private final CompositeMockDefinition composite;
Builder(String workflowId, String stepId) {
this.composite = new CompositeMockDefinition(workflowId, stepId);
}
public <I> Builder when(Class<I> inputType,
java.util.function.Predicate<I> condition,
java.util.function.Function<I, StepResult<?>> function) {
MockDefinition<I> mock = MockDefinition.ofAny(
composite.workflowId,
composite.stepId,
inputType,
function
);
composite.when(inputType, condition, mock);
return this;
}
public Builder otherwise(java.util.function.Function<Object, StepResult<?>> function) {
composite.otherwise(MockDefinition.ofAny(
composite.workflowId,
composite.stepId,
Object.class,
function
));
return this;
}
public CompositeMockDefinition build() {
return composite;
}
}
/**
* Internal class for conditional mocks.
*/
private static class ConditionalMock {
private final Class<?> inputType;
private final java.util.function.Predicate<?> condition;
private final MockDefinition<?> mock;
ConditionalMock(Class<?> inputType,
java.util.function.Predicate<?> condition,
MockDefinition<?> mock) {
this.inputType = inputType;
this.condition = condition;
this.mock = mock;
}
@SuppressWarnings("unchecked")
boolean matches(Object input) {
if (!inputType.isInstance(input)) {
return false;
}
try {
return ((java.util.function.Predicate<Object>) condition).test(input);
} catch (ClassCastException e) {
log.warn("Failed to apply condition", e);
return false;
}
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/mock/MockAIClient.java
|
package ai.driftkit.workflow.test.mock;
import ai.driftkit.common.domain.client.ModelClient;
import ai.driftkit.common.domain.client.ModelTextRequest;
import ai.driftkit.common.domain.client.ModelTextResponse;
import ai.driftkit.common.domain.client.ModelImageResponse.ModelContentMessage;
import ai.driftkit.common.domain.client.ModelImageResponse.ModelMessage;
import ai.driftkit.common.domain.client.Role;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.*;
import lombok.extern.slf4j.Slf4j;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.regex.Pattern;
/**
* Mock AI client for testing workflows that use AI models.
* Provides various response strategies and interaction tracking.
*/
@Slf4j
@Builder
@AllArgsConstructor
public class MockAIClient extends ModelClient<Void> {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
@Builder.Default
private final Map<String, ResponseStrategy> responseStrategies = new ConcurrentHashMap<>();
@Builder.Default
private final List<CallRecord> callHistory = Collections.synchronizedList(new ArrayList<>());
@Builder.Default
private final ResponseStrategy defaultStrategy = ResponseStrategy.fixed("Default response");
@Builder.Default
private final AtomicInteger callCount = new AtomicInteger(0);
@Builder.Default
private final boolean recordCalls = true;
public ModelTextResponse textToText(ModelTextRequest request) {
return textToText(request, null);
}
public ModelTextResponse textToText(ModelTextRequest request, Map<String, Object> options) {
int callNumber = callCount.incrementAndGet();
// Record the call
if (recordCalls) {
callHistory.add(new CallRecord(callNumber, request, options, System.currentTimeMillis()));
}
// Find matching strategy
ResponseStrategy strategy = findStrategy(request);
// Generate response
String response = strategy.generateResponse(request, callNumber);
log.debug("Mock AI call #{}: Generated response: {}", callNumber, response);
return ModelTextResponse.builder()
.id("mock-" + UUID.randomUUID())
.model(request.getModel())
.choices(List.of(
ModelTextResponse.ResponseMessage.builder()
.message(ModelMessage.builder()
.role(Role.assistant)
.content(response)
.build())
.build()
))
.usage(ModelTextResponse.Usage.builder()
.promptTokens(10)
.completionTokens(15)
.totalTokens(25)
.build())
.build();
}
@Override
public Set<Capability> getCapabilities() {
return Set.of(Capability.TEXT_TO_TEXT);
}
/**
* Add a response strategy for specific conditions.
*/
public MockAIClient withStrategy(String name, ResponseStrategy strategy) {
responseStrategies.put(name, strategy);
return this;
}
/**
* Add a fixed response for prompts containing specific text.
*/
public MockAIClient whenPromptContains(String text, String response) {
String strategyName = "contains_" + text;
responseStrategies.put(strategyName, ResponseStrategy.conditional(
request -> containsText(request, text),
ResponseStrategy.fixed(response)
));
return this;
}
/**
* Add a pattern-based response.
*/
public MockAIClient whenPromptMatches(String pattern, String response) {
Pattern regex = Pattern.compile(pattern);
String strategyName = "matches_" + pattern;
responseStrategies.put(strategyName, ResponseStrategy.conditional(
request -> matchesPattern(request, regex),
ResponseStrategy.fixed(response)
));
return this;
}
/**
* Add responses that vary by call count.
*/
public MockAIClient withSequentialResponses(String... responses) {
responseStrategies.put("sequential", ResponseStrategy.sequential(responses));
return this;
}
/**
* Add a response that simulates tool/function calling.
*/
public MockAIClient withFunctionCall(String functionName, Map<String, Object> args) {
try {
Map<String, Object> functionCall = Map.of(
"function", functionName,
"arguments", args
);
String jsonResponse = OBJECT_MAPPER.writeValueAsString(functionCall);
responseStrategies.put("function_" + functionName, ResponseStrategy.fixed(jsonResponse));
} catch (Exception e) {
log.error("Failed to create function call response", e);
}
return this;
}
/**
* Get call history.
*/
public List<CallRecord> getCallHistory() {
return new ArrayList<>(callHistory);
}
/**
* Get call count.
*/
public int getCallCount() {
return callCount.get();
}
/**
* Reset the mock state.
*/
public void reset() {
callCount.set(0);
callHistory.clear();
}
/**
* Verify that a call was made with specific content.
*/
public boolean wasCalledWith(Predicate<ModelTextRequest> predicate) {
return callHistory.stream()
.anyMatch(record -> predicate.test(record.request()));
}
/**
* Get the last call made.
*/
public CallRecord getLastCall() {
if (callHistory.isEmpty()) {
return null;
}
return callHistory.get(callHistory.size() - 1);
}
private ResponseStrategy findStrategy(ModelTextRequest request) {
// Try conditional strategies first
for (ResponseStrategy strategy : responseStrategies.values()) {
if (strategy.matches(request)) {
return strategy;
}
}
return defaultStrategy;
}
private boolean containsText(ModelTextRequest request, String text) {
String content = extractContent(request);
return content.toLowerCase().contains(text.toLowerCase());
}
private boolean matchesPattern(ModelTextRequest request, Pattern pattern) {
String content = extractContent(request);
return pattern.matcher(content).find();
}
private String extractContent(ModelTextRequest request) {
StringBuilder content = new StringBuilder();
if (request.getMessages() != null) {
for (ModelContentMessage message : request.getMessages()) {
content.append(message.toString()).append(" ");
}
}
return content.toString();
}
/**
* Record of an AI client call.
*/
public record CallRecord(
int callNumber,
ModelTextRequest request,
Map<String, Object> options,
long timestamp
) {}
/**
* Response generation strategy.
*/
@FunctionalInterface
public interface ResponseStrategy {
String generateResponse(ModelTextRequest request, int callNumber);
default boolean matches(ModelTextRequest request) {
return true;
}
/**
* Create a fixed response strategy.
*/
static ResponseStrategy fixed(String response) {
return (request, callNumber) -> response;
}
/**
* Create a sequential response strategy.
*/
static ResponseStrategy sequential(String... responses) {
return (request, callNumber) -> {
int index = (callNumber - 1) % responses.length;
return responses[index];
};
}
/**
* Create a random response strategy.
*/
static ResponseStrategy random(String... responses) {
Random random = new Random();
return (request, callNumber) ->
responses[random.nextInt(responses.length)];
}
/**
* Create a conditional response strategy.
*/
static ResponseStrategy conditional(Predicate<ModelTextRequest> condition,
ResponseStrategy strategy) {
return new ResponseStrategy() {
@Override
public String generateResponse(ModelTextRequest request, int callNumber) {
return strategy.generateResponse(request, callNumber);
}
@Override
public boolean matches(ModelTextRequest request) {
return condition.test(request);
}
};
}
/**
* Create a function-based response strategy.
*/
static ResponseStrategy dynamic(Function<ModelTextRequest, String> generator) {
return (request, callNumber) -> generator.apply(request);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/mock/SequentialMockDefinition.java
|
package ai.driftkit.workflow.test.mock;
import ai.driftkit.workflow.engine.core.StepResult;
import ai.driftkit.workflow.test.core.MockDefinition;
import ai.driftkit.workflow.test.core.StepContext;
import lombok.extern.slf4j.Slf4j;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Mock that returns different results on sequential calls.
* Useful for testing retry behavior and state changes.
*/
@Slf4j
public class SequentialMockDefinition extends MockDefinition<Object> {
private final List<MockDefinition<?>> sequence;
private final AtomicInteger callCount = new AtomicInteger(0);
private final Object lock = new Object();
private volatile MockDefinition<?> afterSequence;
public SequentialMockDefinition(String workflowId, String stepId) {
super(workflowId, stepId, Object.class, (input, context) -> {
throw new IllegalStateException("SequentialMockDefinition.execute should handle execution");
});
this.sequence = new ArrayList<>();
}
/**
* Adds a mock to the sequence.
*
* @param mock the mock to add
* @return this for chaining
*/
public SequentialMockDefinition then(MockDefinition<?> mock) {
synchronized (lock) {
sequence.add(mock);
}
return this;
}
/**
* Sets the mock to use after sequence is exhausted.
*
* @param mock the mock to use
* @return this for chaining
*/
public SequentialMockDefinition thenAlways(MockDefinition<?> mock) {
this.afterSequence = mock;
return this;
}
@Override
public StepResult<?> execute(Object input, StepContext context) {
int index = callCount.getAndIncrement();
log.debug("Executing sequential mock for {}.{}, call #{}",
workflowId, stepId, index + 1);
MockDefinition<?> mockToExecute;
synchronized (lock) {
if (index < sequence.size()) {
mockToExecute = sequence.get(index);
} else if (afterSequence != null) {
mockToExecute = afterSequence;
} else {
throw new IllegalStateException(
"Sequential mock exhausted for " + workflowId + "." + stepId +
" after " + sequence.size() + " calls"
);
}
}
return mockToExecute.execute(input, context);
}
/**
* Resets the call count.
*/
public void reset() {
callCount.set(0);
}
/**
* Creates a builder for sequential mocks.
*/
public static <I> Builder<I> builder(String workflowId, String stepId, Class<I> inputType) {
return new Builder<>(workflowId, stepId, inputType);
}
/**
* Builder for sequential mocks.
*/
public static class Builder<I> {
private final String workflowId;
private final String stepId;
private final Class<I> inputType;
private final SequentialMockDefinition sequential;
Builder(String workflowId, String stepId, Class<I> inputType) {
this.workflowId = workflowId;
this.stepId = stepId;
this.inputType = inputType;
this.sequential = new SequentialMockDefinition(workflowId, stepId);
}
/**
* Adds a successful response to the sequence.
*/
public Builder<I> thenReturn(java.util.function.Function<I, StepResult<?>> function) {
sequential.then(MockDefinition.ofAny(workflowId, stepId, inputType, function));
return this;
}
/**
* Adds a failure to the sequence.
*/
public Builder<I> thenFail(Exception exception) {
sequential.then(MockDefinition.throwing(workflowId, stepId, inputType, exception));
return this;
}
/**
* Adds multiple failures to the sequence.
*/
public Builder<I> thenFailTimes(int times, Exception exception) {
MockDefinition<I> failMock = MockDefinition.throwing(workflowId, stepId, inputType, exception);
for (int i = 0; i < times; i++) {
sequential.then(failMock);
}
return this;
}
/**
* Sets the behavior after sequence is exhausted.
*/
public Builder<I> thenAlwaysReturn(java.util.function.Function<I, StepResult<?>> function) {
sequential.thenAlways(MockDefinition.ofAny(workflowId, stepId, inputType, function));
return this;
}
/**
* Sets failure after sequence is exhausted.
*/
public Builder<I> thenAlwaysFail(Exception exception) {
sequential.thenAlways(MockDefinition.throwing(workflowId, stepId, inputType, exception));
return this;
}
public SequentialMockDefinition build() {
return sequential;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent/Agent.java
|
package ai.driftkit.workflows.core.agent;
import java.util.List;
import java.util.Map;
/**
* Base interface for all agents in the simplified DriftKit agent system.
* Agents are simplified wrappers around complex DriftKit workflows that provide
* easy-to-use interfaces for common AI operations.
*/
public interface Agent {
/**
* Execute the agent with a simple text input.
*
* @param input The text input to process
* @return The agent's response as a string
*/
String execute(String input);
/**
* Execute the agent with text and image input.
*
* @param text The text input to process
* @param imageData Raw image data as byte array
* @return The agent's response as a string
*/
String execute(String text, byte[] imageData);
/**
* Execute the agent with text and multiple images.
*
* @param text The text input to process
* @param imageDataList List of raw image data as byte arrays
* @return The agent's response as a string
*/
String execute(String text, List<byte[]> imageDataList);
/**
* Execute the agent with input and context variables.
*
* @param input The text input to process
* @param variables Context variables for template processing
* @return The agent's response as a string
*/
String execute(String input, Map<String, Object> variables);
/**
* Get the agent's name/identifier.
*
* @return The agent's name
*/
String getName();
/**
* Get the agent's description.
*
* @return The agent's description
*/
String getDescription();
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent/AgentResponse.java
|
package ai.driftkit.workflows.core.agent;
import ai.driftkit.common.domain.client.ModelImageResponse.ModelContentMessage.ModelContentElement;
import ai.driftkit.common.tools.ToolCall;
import lombok.Builder;
import lombok.Data;
import org.apache.commons.collections4.CollectionUtils;
import java.util.List;
/**
* Unified response wrapper for LLMAgent operations.
* Supports text, images, tool calls, and structured data.
*/
@Data
@Builder
public class AgentResponse<T> {
// Response content
private final String text;
private final List<ModelContentElement.ImageData> images;
private final T structuredData;
private final List<ToolCall> toolCalls;
private final List<ToolExecutionResult> toolResults;
// Response type
private final ResponseType type;
public enum ResponseType {
TEXT,
IMAGES,
STRUCTURED_DATA,
TOOL_CALLS,
TOOL_RESULTS,
MULTIMODAL
}
// Convenience methods
public boolean hasText() {
return text != null;
}
public boolean hasImages() {
return CollectionUtils.isNotEmpty(images);
}
public boolean hasStructuredData() {
return structuredData != null;
}
public boolean hasToolCalls() {
return CollectionUtils.isNotEmpty(toolCalls);
}
public boolean hasToolResults() {
return CollectionUtils.isNotEmpty(toolResults);
}
// Factory methods
public static AgentResponse<String> text(String text) {
return AgentResponse.<String>builder()
.text(text)
.type(ResponseType.TEXT)
.build();
}
public static AgentResponse<ModelContentElement.ImageData> image(ModelContentElement.ImageData image) {
return AgentResponse.<ModelContentElement.ImageData>builder()
.images(List.of(image))
.type(ResponseType.IMAGES)
.build();
}
public static AgentResponse<List<ModelContentElement.ImageData>> images(List<ModelContentElement.ImageData> images) {
return AgentResponse.<List<ModelContentElement.ImageData>>builder()
.images(images)
.type(ResponseType.IMAGES)
.build();
}
public static <T> AgentResponse<T> structured(T data) {
return AgentResponse.<T>builder()
.structuredData(data)
.type(ResponseType.STRUCTURED_DATA)
.build();
}
public static AgentResponse<List<ToolCall>> toolCalls(List<ToolCall> toolCalls) {
return AgentResponse.<List<ToolCall>>builder()
.toolCalls(toolCalls)
.type(ResponseType.TOOL_CALLS)
.build();
}
public static AgentResponse<List<ToolExecutionResult>> toolResults(List<ToolExecutionResult> results) {
return AgentResponse.<List<ToolExecutionResult>>builder()
.toolResults(results)
.type(ResponseType.TOOL_RESULTS)
.build();
}
public static AgentResponse<String> multimodal(String text, List<ModelContentElement.ImageData> images) {
return AgentResponse.<String>builder()
.text(text)
.images(images)
.type(ResponseType.MULTIMODAL)
.build();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent/EvaluationResult.java
|
package ai.driftkit.workflows.core.agent;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* POJO for evaluation result from evaluator agent.
* This ensures type-safe parsing of evaluation responses.
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class EvaluationResult {
/**
* The status of the evaluation.
*/
private LoopStatus status;
/**
* Optional feedback message for revision.
*/
private String feedback;
/**
* Optional reason for the evaluation result.
*/
private String reason;
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent/LLMAgent.java
|
package ai.driftkit.workflows.core.agent;
import ai.driftkit.common.domain.ChatMessageType;
import ai.driftkit.common.domain.Language;
import ai.driftkit.common.domain.MessageType;
import ai.driftkit.workflows.core.chat.Message;
import ai.driftkit.common.domain.client.*;
import ai.driftkit.common.domain.client.ModelImageRequest;
import ai.driftkit.common.domain.client.ModelImageResponse;
import ai.driftkit.common.domain.client.ModelImageResponse.ModelContentMessage;
import ai.driftkit.common.domain.client.ModelImageResponse.ModelContentMessage.ModelContentElement;
import ai.driftkit.common.domain.client.ModelTextRequest;
import ai.driftkit.common.domain.client.ModelTextResponse.ResponseMessage;
import ai.driftkit.workflows.core.chat.ChatMemory;
import ai.driftkit.common.tools.ToolCall;
import ai.driftkit.common.tools.ToolInfo;
import ai.driftkit.common.tools.ToolRegistry;
import ai.driftkit.common.utils.JsonUtils;
import ai.driftkit.common.utils.AIUtils;
import ai.driftkit.common.domain.Prompt;
import ai.driftkit.context.core.registry.PromptServiceRegistry;
import ai.driftkit.context.core.service.PromptService;
import ai.driftkit.context.core.util.PromptUtils;
import lombok.Builder;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import java.util.*;
import java.util.stream.Collectors;
/**
* A simplified LLM agent that wraps the complex DriftKit ModelClient interface
* with an easy-to-use builder pattern and unified execute() methods.
*
* Features:
* - Unified execution interface with typed responses
* - Tool/function calling with automatic execution
* - Structured output extraction
* - Multi-modal support (text + images)
* - Conversation memory management
* - Prompt template support
*/
@Slf4j
@Data
public class LLMAgent implements Agent {
private final ModelClient modelClient;
private final String name;
private final String description;
private final String systemMessage;
private final Double temperature;
private final Integer maxTokens;
private final String model;
private final String imageModel;
// Unique agent identifier
private final String agentId;
// Core components
private final ChatMemory chatMemory;
private final PromptService promptService;
private final ToolRegistry toolRegistry;
// Tracing support
private final RequestTracingProvider tracingProvider;
// Enable automatic tool execution
private final boolean autoExecuteTools;
// Default temperature for structured extraction
private static final double STRUCTURED_EXTRACTION_TEMPERATURE = 0.1;
// Constructor
protected LLMAgent(ModelClient modelClient, String name, String description, String systemMessage,
Double temperature, Integer maxTokens, String model, String imageModel, String agentId,
ChatMemory chatMemory, PromptService promptService, ToolRegistry toolRegistry,
RequestTracingProvider tracingProvider, boolean autoExecuteTools) {
this.modelClient = modelClient;
this.name = name;
this.description = description;
this.systemMessage = systemMessage;
this.temperature = temperature;
this.maxTokens = maxTokens;
this.model = model;
this.imageModel = imageModel;
this.agentId = agentId != null ? agentId : AIUtils.generateId();
this.chatMemory = chatMemory;
this.promptService = promptService;
this.toolRegistry = toolRegistry;
this.tracingProvider = tracingProvider;
this.autoExecuteTools = autoExecuteTools;
}
/**
* Create a new LLMAgent builder.
*
* @return A new LLMAgent builder
*/
public static CustomLLMAgentBuilder builder() {
return new CustomLLMAgentBuilder();
}
/**
* Execute with simple text input
*/
public AgentResponse<String> executeText(String message) {
return executeText(message, Collections.emptyMap());
}
/**
* Execute with text and context variables
*/
public AgentResponse<String> executeText(String message, Map<String, Object> variables) {
try {
// Process message with variables
String processedMessage = processMessageWithVariables(message, variables);
// Add user message to memory
addUserMessage(processedMessage);
// Build and execute request
ModelTextRequest request = buildChatRequest();
ModelTextResponse response = modelClient.textToText(request);
// Trace if provider is available
RequestTracingProvider provider = getTracingProvider();
if (provider != null) {
String contextType = buildContextType("TEXT");
RequestTracingProvider.RequestContext traceContext = RequestTracingProvider.RequestContext.builder()
.contextId(agentId)
.contextType(contextType)
.variables(variables)
.build();
provider.traceTextRequest(request, response, traceContext);
}
// Extract response
return extractResponse(response);
} catch (Exception e) {
log.error("Error in executeText", e);
throw new RuntimeException("Failed to execute text", e);
}
}
/**
* Execute with tools - returns tool calls for manual execution
*/
public AgentResponse<List<ToolCall>> executeForToolCalls(String message) {
return executeForToolCalls(message, Collections.emptyMap());
}
/**
* Execute with tools - returns tool calls for manual execution with variables
*/
public AgentResponse<List<ToolCall>> executeForToolCalls(String message, Map<String, Object> variables) {
try {
// Process message with variables
String processedMessage = processMessageWithVariables(message, variables);
// Add user message to memory
addUserMessage(processedMessage);
// Build and execute request with tools
ModelTextRequest request = buildChatRequestWithTools();
ModelTextResponse response = modelClient.textToText(request);
// Trace if provider is available
RequestTracingProvider provider = getTracingProvider();
if (provider != null) {
String contextType = buildContextType("TOOL_CALLS");
RequestTracingProvider.RequestContext traceContext = RequestTracingProvider.RequestContext.builder()
.contextId(agentId)
.contextType(contextType)
.variables(variables)
.build();
provider.traceTextRequest(request, response, traceContext);
}
// Extract tool calls
List<ToolCall> toolCalls = extractToolCalls(response);
return AgentResponse.toolCalls(toolCalls);
} catch (Exception e) {
log.error("Error getting tool calls", e);
throw new RuntimeException("Failed to get tool calls", e);
}
}
/**
* Execute with tools and automatic execution - returns typed results
*/
public AgentResponse<List<ToolExecutionResult>> executeWithTools(String message) {
return executeWithTools(message, Collections.emptyMap());
}
/**
* Execute with tools and automatic execution - returns typed results with variables
*/
public AgentResponse<List<ToolExecutionResult>> executeWithTools(String message, Map<String, Object> variables) {
try {
// Process message with variables
String processedMessage = processMessageWithVariables(message, variables);
// Add user message to memory
addUserMessage(processedMessage);
// Build and execute request with tools
ModelTextRequest request = buildChatRequestWithTools();
ModelTextResponse response = modelClient.textToText(request);
// Trace if provider is available
RequestTracingProvider provider = getTracingProvider();
if (provider != null) {
String contextType = buildContextType("TOOLS_EXEC");
RequestTracingProvider.RequestContext traceContext = RequestTracingProvider.RequestContext.builder()
.contextId(agentId)
.contextType(contextType)
.variables(variables)
.build();
provider.traceTextRequest(request, response, traceContext);
}
// Check for tool calls
if (hasToolCalls(response)) {
// Execute tools and get typed results
List<ToolExecutionResult> results = executeToolsAndGetResults(response);
// Get final response from model
ModelTextRequest followUpRequest = buildChatRequest();
ModelTextResponse finalResponse = modelClient.textToText(followUpRequest);
// Trace follow-up request if provider is available
if (provider != null) {
String contextType = buildContextType("TOOLS_FOLLOWUP");
RequestTracingProvider.RequestContext traceContext = RequestTracingProvider.RequestContext.builder()
.contextId(agentId)
.contextType(contextType)
.variables(variables)
.build();
provider.traceTextRequest(followUpRequest, finalResponse, traceContext);
}
// Add final response to memory
String finalText = extractResponseText(finalResponse);
addAssistantMessage(finalText);
return AgentResponse.toolResults(results);
}
// No tool calls, return regular response
return AgentResponse.toolResults(Collections.emptyList());
} catch (Exception e) {
log.error("Error in executeWithTools", e);
throw new RuntimeException("Failed to execute with tools", e);
}
}
/**
* Execute with structured output extraction
*/
public <T> AgentResponse<T> executeStructured(String userMessage, Class<T> targetClass) {
try {
// Create response format for structured output
ResponseFormat responseFormat = ResponseFormat.jsonSchema(targetClass);
// Build messages
List<ModelContentMessage> messages = buildBaseMessages();
messages.add(ModelContentMessage.create(Role.user, userMessage));
// Build request with structured output
ModelTextRequest request = ModelTextRequest.builder()
.model(getEffectiveModel())
.temperature(temperature != null ? temperature : STRUCTURED_EXTRACTION_TEMPERATURE)
.messages(messages)
.responseFormat(responseFormat)
.build();
// Execute request
ModelTextResponse response = modelClient.textToText(request);
// Trace if provider is available
RequestTracingProvider provider = getTracingProvider();
if (provider != null) {
String contextType = buildContextType("STRUCTURED");
RequestTracingProvider.RequestContext traceContext = RequestTracingProvider.RequestContext.builder()
.contextId(agentId)
.contextType(contextType)
.build();
provider.traceTextRequest(request, response, traceContext);
}
// Extract and parse response
String jsonResponse = extractResponseText(response);
T result = JsonUtils.fromJson(jsonResponse, targetClass);
return AgentResponse.structured(result);
} catch (Exception e) {
log.error("Error extracting structured data", e);
throw new RuntimeException("Failed to extract structured data", e);
}
}
/**
* Execute structured extraction using prompt template by ID
*/
public <T> AgentResponse<T> executeStructuredWithPrompt(String promptId, Map<String, Object> variables, Class<T> targetClass) {
return executeStructuredWithPrompt(promptId, variables, targetClass, Language.GENERAL);
}
/**
* Execute structured extraction using prompt template by ID with language
*/
public <T> AgentResponse<T> executeStructuredWithPrompt(String promptId, Map<String, Object> variables,
Class<T> targetClass, Language language) {
try {
// Use injected PromptService or fall back to registry
PromptService effectivePromptService = promptService != null ?
promptService : PromptServiceRegistry.getInstance();
if (effectivePromptService == null) {
throw new IllegalStateException("PromptService not configured. " +
"Please ensure PromptService is available in your application context " +
"or register one via PromptServiceRegistry.register()");
}
// Get prompt by ID
Optional<Prompt> promptOpt = effectivePromptService.getPromptById(promptId);
if (promptOpt.isEmpty()) {
throw new IllegalArgumentException("Prompt not found: " + promptId);
}
Prompt prompt = promptOpt.get();
// Apply variables to prompt
String processedMessage = PromptUtils.applyVariables(prompt.getMessage(), variables);
// Create response format for structured output
ResponseFormat responseFormat = ResponseFormat.jsonSchema(targetClass);
// Build messages with prompt system message if available
List<ModelContentMessage> messages = new ArrayList<>();
String promptSystemMessage = prompt.getSystemMessage();
if (StringUtils.isNotBlank(promptSystemMessage)) {
promptSystemMessage = PromptUtils.applyVariables(promptSystemMessage, variables);
messages.add(ModelContentMessage.create(Role.system, promptSystemMessage));
} else if (StringUtils.isNotBlank(systemMessage)) {
messages.add(ModelContentMessage.create(Role.system, systemMessage));
}
// Add conversation history if needed
messages.addAll(convertMemoryToMessages());
// Add user message
messages.add(ModelContentMessage.create(Role.user, processedMessage));
// Build request
ModelTextRequest request = ModelTextRequest.builder()
.model(getEffectiveModel())
.temperature(prompt.getTemperature() != null ? prompt.getTemperature() : STRUCTURED_EXTRACTION_TEMPERATURE)
.messages(messages)
.responseFormat(responseFormat)
.build();
// Execute request
ModelTextResponse response = modelClient.textToText(request);
// Trace if provider is available
RequestTracingProvider provider = getTracingProvider();
if (provider != null) {
String contextType = buildContextType("STRUCTURED_PROMPT");
RequestTracingProvider.RequestContext traceContext = RequestTracingProvider.RequestContext.builder()
.contextId(agentId)
.contextType(contextType)
.promptId(promptId)
.variables(variables)
.build();
provider.traceTextRequest(request, response, traceContext);
}
// Extract and parse response
String jsonResponse = extractResponseText(response);
T result = JsonUtils.fromJson(jsonResponse, targetClass);
return AgentResponse.structured(result);
} catch (Exception e) {
log.error("Error in executeStructuredWithPrompt", e);
throw new RuntimeException("Failed to execute structured with prompt", e);
}
}
/**
* Execute using prompt template by ID
*/
public AgentResponse<String> executeWithPrompt(String promptId, Map<String, Object> variables) {
return executeWithPrompt(promptId, variables, Language.GENERAL);
}
/**
* Execute using prompt template by ID with language
*/
public AgentResponse<String> executeWithPrompt(String promptId, Map<String, Object> variables, Language language) {
try {
// Use injected PromptService or fall back to registry
PromptService effectivePromptService = promptService != null ?
promptService : PromptServiceRegistry.getInstance();
if (effectivePromptService == null) {
throw new IllegalStateException("PromptService not configured. " +
"Please ensure PromptService is available in your application context " +
"or register one via PromptServiceRegistry.register()");
}
// Get prompt by ID
Optional<Prompt> promptOpt = effectivePromptService.getPromptById(promptId);
if (promptOpt.isEmpty()) {
throw new IllegalArgumentException("Prompt not found: " + promptId);
}
Prompt prompt = promptOpt.get();
// Apply variables to prompt
String processedMessage = PromptUtils.applyVariables(prompt.getMessage(), variables);
// Use system message from prompt if available
String promptSystemMessage = prompt.getSystemMessage();
if (StringUtils.isNotBlank(promptSystemMessage)) {
promptSystemMessage = PromptUtils.applyVariables(promptSystemMessage, variables);
}
// Add messages to memory
addUserMessage(processedMessage);
// Build messages with prompt system message
List<ModelContentMessage> messages = new ArrayList<>();
if (StringUtils.isNotBlank(promptSystemMessage)) {
messages.add(ModelContentMessage.create(Role.system, promptSystemMessage));
} else if (StringUtils.isNotBlank(systemMessage)) {
messages.add(ModelContentMessage.create(Role.system, systemMessage));
}
// Add conversation history
messages.addAll(convertMemoryToMessages());
// Build request
ModelTextRequest request = ModelTextRequest.builder()
.model(getEffectiveModel())
.temperature(prompt.getTemperature() != null ? prompt.getTemperature() : getEffectiveTemperature())
.messages(messages)
.build();
// Execute request
ModelTextResponse response = modelClient.textToText(request);
// Trace if provider is available
RequestTracingProvider provider = getTracingProvider();
if (provider != null) {
String contextType = buildContextType("PROMPT");
RequestTracingProvider.RequestContext traceContext = RequestTracingProvider.RequestContext.builder()
.contextId(agentId)
.contextType(contextType)
.promptId(promptId)
.variables(variables)
.build();
provider.traceTextRequest(request, response, traceContext);
}
// Extract response
return extractResponse(response);
} catch (Exception e) {
log.error("Error in executeWithPrompt", e);
throw new RuntimeException("Failed to execute with prompt", e);
}
}
/**
* Execute image generation using the agent's imageModel field
*/
public AgentResponse<ModelContentElement.ImageData> executeImageGeneration(String prompt) {
try {
// Build image request using agent's imageModel field
ModelImageRequest request = ModelImageRequest.builder()
.prompt(prompt)
.model(imageModel) // Use the agent's imageModel field!
.build();
// Execute request
ModelImageResponse response = modelClient.textToImage(request);
// Trace if provider is available
RequestTracingProvider provider = getTracingProvider();
if (provider != null) {
String contextType = buildContextType("IMAGE_GEN");
RequestTracingProvider.RequestContext traceContext = RequestTracingProvider.RequestContext.builder()
.contextId(agentId)
.contextType(contextType)
.build();
provider.traceImageRequest(request, response, traceContext);
}
// Extract first image
if (response != null && response.getBytes() != null && !response.getBytes().isEmpty()) {
ModelContentElement.ImageData imageData = response.getBytes().get(0);
return AgentResponse.image(imageData);
}
throw new RuntimeException("No image generated");
} catch (Exception e) {
log.error("Error generating image", e);
throw new RuntimeException("Failed to generate image", e);
}
}
/**
* Execute with images
*/
public AgentResponse<String> executeWithImages(String text, byte[] imageData) {
return executeWithImages(text, Collections.singletonList(imageData));
}
/**
* Execute with multiple images
*/
public AgentResponse<String> executeWithImages(String text, List<byte[]> imageDataList) {
try {
// Convert byte arrays to image data objects
List<ModelContentElement.ImageData> imageDataObjects = imageDataList.stream()
.map(bytes -> new ModelContentElement.ImageData(bytes, "image/jpeg"))
.collect(Collectors.toList());
// Build multimodal content
List<ModelContentElement> content = buildMultimodalContent(text, imageDataObjects);
// Create multimodal message
ModelContentMessage userMessage = ModelContentMessage.builder()
.role(Role.user)
.content(content)
.build();
// Add to memory
addUserMessage(text); // Add text version to memory
// Build messages with system and multimodal content
List<ModelContentMessage> messages = buildBaseMessages();
messages.add(userMessage);
// Build request
ModelTextRequest request = ModelTextRequest.builder()
.model(getEffectiveModel())
.temperature(getEffectiveTemperature())
.messages(messages)
.build();
// Execute request
ModelTextResponse response = modelClient.imageToText(request);
// Trace if provider is available
RequestTracingProvider provider = getTracingProvider();
if (provider != null) {
String contextType = buildContextType("IMAGE");
RequestTracingProvider.RequestContext traceContext = RequestTracingProvider.RequestContext.builder()
.contextId(agentId)
.contextType(contextType)
.build();
provider.traceImageToTextRequest(request, response, traceContext);
}
// Extract response
return extractResponse(response);
} catch (Exception e) {
log.error("Error executing with images", e);
throw new RuntimeException("Failed to execute with images", e);
}
}
/**
* Execute a single tool call manually
*/
public ToolExecutionResult executeToolCall(ToolCall toolCall) {
try {
Object result = toolRegistry.executeToolCall(toolCall);
return ToolExecutionResult.success(toolCall.getFunction().getName(), result);
} catch (Exception e) {
log.error("Error executing tool: {}", toolCall.getFunction().getName(), e);
return ToolExecutionResult.failure(toolCall.getFunction().getName(), e.getMessage());
}
}
/**
* Register a tool function using instance method
*/
public LLMAgent registerTool(String methodName, Object instance) {
toolRegistry.registerInstanceMethod(instance, methodName);
return this;
}
/**
* Register a tool function with description
*/
public LLMAgent registerTool(String methodName, Object instance, String description) {
toolRegistry.registerInstanceMethod(instance, methodName, description);
return this;
}
/**
* Register a static method as a tool
*/
public LLMAgent registerStaticTool(String methodName, Class<?> clazz) {
toolRegistry.registerStaticMethod(clazz, methodName);
return this;
}
/**
* Register all annotated tools from an instance
*/
public LLMAgent registerTools(Object instance) {
toolRegistry.registerClass(instance);
return this;
}
/**
* Register all static annotated tools from a class
*/
public LLMAgent registerStaticTools(Class<?> clazz) {
toolRegistry.registerStaticClass(clazz);
return this;
}
/**
* Clear conversation history
*/
public void clearHistory() {
if (chatMemory != null) {
chatMemory.clear();
}
}
/**
* Get conversation history
*/
public List<Message> getConversationHistory() {
if (chatMemory != null) {
return new ArrayList<>(chatMemory.messages());
}
return Collections.emptyList();
}
@Override
public String getName() {
return name;
}
@Override
public String getDescription() {
return description;
}
@Override
public String execute(String input) {
return executeText(input).getText();
}
@Override
public String execute(String text, byte[] imageData) {
return executeWithImages(text, imageData).getText();
}
@Override
public String execute(String text, List<byte[]> imageDataList) {
return executeWithImages(text, imageDataList).getText();
}
@Override
public String execute(String input, Map<String, Object> variables) {
return executeText(input, variables).getText();
}
// Private helper methods
private String processMessageWithVariables(String message, Map<String, Object> variables) {
if (variables != null && !variables.isEmpty()) {
return PromptUtils.applyVariables(message, variables);
}
return message;
}
private void addUserMessage(String content) {
if (chatMemory != null) {
Message message = Message.builder()
.messageId(UUID.randomUUID().toString())
.message(content)
.type(ChatMessageType.USER)
.messageType(MessageType.TEXT)
.createdTime(System.currentTimeMillis())
.requestInitTime(System.currentTimeMillis())
.build();
chatMemory.add(message);
}
}
private void addAssistantMessage(String content) {
if (chatMemory != null) {
Message message = Message.builder()
.messageId(UUID.randomUUID().toString())
.message(content)
.type(ChatMessageType.AI)
.messageType(MessageType.TEXT)
.createdTime(System.currentTimeMillis())
.requestInitTime(System.currentTimeMillis())
.build();
chatMemory.add(message);
}
}
private ModelTextRequest buildChatRequest() {
List<ModelContentMessage> messages = buildBaseMessages();
messages.addAll(convertMemoryToMessages());
return ModelTextRequest.builder()
.model(getEffectiveModel())
.temperature(getEffectiveTemperature())
.messages(messages)
.build();
}
private ModelTextRequest buildChatRequestWithTools() {
List<ModelContentMessage> messages = buildBaseMessages();
messages.addAll(convertMemoryToMessages());
ModelClient.Tool[] tools = toolRegistry.getTools();
return ModelTextRequest.builder()
.model(getEffectiveModel())
.temperature(getEffectiveTemperature())
.messages(messages)
.tools(tools.length > 0 ? Arrays.asList(tools) : null)
.build();
}
private List<ModelContentMessage> buildBaseMessages() {
List<ModelContentMessage> messages = new ArrayList<>();
// Add system message if present
if (StringUtils.isNotBlank(systemMessage)) {
messages.add(ModelContentMessage.create(Role.system, systemMessage));
}
return messages;
}
private List<ModelContentMessage> convertMemoryToMessages() {
if (chatMemory == null) {
return Collections.emptyList();
}
return chatMemory.messages().stream()
.map(this::convertMessageToModelMessage)
.collect(Collectors.toList());
}
private ModelContentMessage convertMessageToModelMessage(Message message) {
Role role = switch (message.getType()) {
case USER -> Role.user;
case AI -> Role.assistant;
case SYSTEM -> Role.system;
default -> Role.user;
};
return ModelContentMessage.create(role, message.getMessage());
}
private boolean hasToolCalls(ModelTextResponse response) {
return response != null &&
CollectionUtils.isNotEmpty(response.getChoices()) &&
response.getChoices().get(0).getMessage() != null &&
CollectionUtils.isNotEmpty(response.getChoices().get(0).getMessage().getToolCalls());
}
private List<ToolCall> extractToolCalls(ModelTextResponse response) {
if (!hasToolCalls(response)) {
return Collections.emptyList();
}
return response.getChoices().get(0).getMessage().getToolCalls();
}
private AgentResponse<String> extractResponse(ModelTextResponse response) {
String responseText = extractResponseText(response);
addAssistantMessage(responseText);
// Check if response contains images
List<ModelContentElement.ImageData> images = extractImages(response);
if (CollectionUtils.isNotEmpty(images)) {
return AgentResponse.multimodal(responseText, images);
}
return AgentResponse.text(responseText);
}
private List<ModelContentElement.ImageData> extractImages(ModelTextResponse response) {
// For now, text-to-text responses don't contain images
// This is a placeholder for future multimodal responses
return Collections.emptyList();
}
private List<ToolExecutionResult> executeToolsAndGetResults(ModelTextResponse response) {
List<ToolCall> toolCalls = extractToolCalls(response);
List<ToolExecutionResult> results = new ArrayList<>();
// Add assistant message with tool calls to memory
String assistantContent = extractResponseText(response);
if (StringUtils.isNotBlank(assistantContent)) {
addAssistantMessage(assistantContent);
}
// Execute each tool call
for (ToolCall toolCall : toolCalls) {
ToolExecutionResult result = executeToolCall(toolCall);
results.add(result);
// Add tool result to memory as user message
String resultStr = result.isSuccess() ?
String.format("[Tool: %s]\nResult: %s", result.getToolName(), convertResultToString(result.getResult())) :
String.format("[Tool: %s]\nError: %s", result.getToolName(), result.getError());
addUserMessage(resultStr);
}
return results;
}
private String convertResultToString(Object result) {
if (result == null) {
return "null";
}
if (result instanceof String) {
return (String) result;
}
// For complex objects, serialize as JSON
try {
return JsonUtils.toJson(result);
} catch (Exception e) {
return result.toString();
}
}
private List<ModelContentElement> buildMultimodalContent(String text,
List<ModelContentElement.ImageData> imageDataList) {
List<ModelContentElement> content = new ArrayList<>();
// Add text content if present
if (StringUtils.isNotBlank(text)) {
content.add(ModelContentElement.builder()
.type(ModelTextRequest.MessageType.text)
.text(text)
.build());
}
// Add image content
if (CollectionUtils.isNotEmpty(imageDataList)) {
for (ModelContentElement.ImageData imageData : imageDataList) {
content.add(ModelContentElement.builder()
.type(ModelTextRequest.MessageType.image)
.image(imageData)
.build());
}
}
return content;
}
private String extractResponseText(ModelTextResponse response) {
if (response == null || CollectionUtils.isEmpty(response.getChoices())) {
return "";
}
ResponseMessage choice = response.getChoices().get(0);
if (choice.getMessage() == null) {
return "";
}
return choice.getMessage().getContent();
}
/**
* Get effective model (from agent config or client default).
*/
private String getEffectiveModel() {
return StringUtils.isNotBlank(model) ? model : modelClient.getModel();
}
/**
* Get effective temperature (from agent config or client default).
*/
private Double getEffectiveTemperature() {
return temperature != null ? temperature : modelClient.getTemperature();
}
/**
* Get effective max tokens (from agent config or client default).
*/
private Integer getEffectiveMaxTokens() {
return maxTokens != null ? maxTokens : modelClient.getMaxTokens();
}
/**
* Get tracing provider - first check injected provider, then registry
*/
private RequestTracingProvider getTracingProvider() {
if (tracingProvider != null) {
return tracingProvider;
}
return RequestTracingRegistry.getInstance();
}
/**
* Build context type based on agent name/description and operation type
*/
private String buildContextType(String operationType) {
if (StringUtils.isNotBlank(name)) {
return String.format("%s_%s", name.toUpperCase().replace(" ", "_"), operationType);
} else if (StringUtils.isNotBlank(description)) {
// Use first few words of description
String[] words = description.split("\\s+");
String shortDesc = words[0].toUpperCase();
return String.format("%s_%s", shortDesc, operationType);
} else {
// Default to agent ID and operation
return String.format("AGENT_%s_%s", agentId, operationType);
}
}
/**
* Custom builder to set default values and validation.
*/
public static class CustomLLMAgentBuilder {
private ModelClient modelClient;
private String name;
private String description;
private String systemMessage;
private Double temperature;
private Integer maxTokens;
private String model;
private String imageModel;
private String agentId;
private ChatMemory chatMemory;
private PromptService promptService;
private ToolRegistry toolRegistry;
private RequestTracingProvider tracingProvider;
private boolean autoExecuteTools = true;
private List<ToolInfo> pendingTools = new ArrayList<>();
public CustomLLMAgentBuilder() {
// Set defaults
this.autoExecuteTools = true;
}
public CustomLLMAgentBuilder modelClient(ModelClient modelClient) {
this.modelClient = modelClient;
return this;
}
public CustomLLMAgentBuilder name(String name) {
this.name = name;
return this;
}
public CustomLLMAgentBuilder description(String description) {
this.description = description;
return this;
}
public CustomLLMAgentBuilder systemMessage(String systemMessage) {
this.systemMessage = systemMessage;
return this;
}
public CustomLLMAgentBuilder temperature(Double temperature) {
this.temperature = temperature;
return this;
}
public CustomLLMAgentBuilder maxTokens(Integer maxTokens) {
this.maxTokens = maxTokens;
return this;
}
public CustomLLMAgentBuilder model(String model) {
this.model = model;
return this;
}
public CustomLLMAgentBuilder imageModel(String imageModel) {
this.imageModel = imageModel;
return this;
}
public CustomLLMAgentBuilder agentId(String agentId) {
this.agentId = agentId;
return this;
}
public CustomLLMAgentBuilder chatMemory(ChatMemory chatMemory) {
this.chatMemory = chatMemory;
return this;
}
public CustomLLMAgentBuilder promptService(PromptService promptService) {
this.promptService = promptService;
return this;
}
public CustomLLMAgentBuilder toolRegistry(ToolRegistry toolRegistry) {
this.toolRegistry = toolRegistry;
return this;
}
public CustomLLMAgentBuilder tracingProvider(RequestTracingProvider tracingProvider) {
this.tracingProvider = tracingProvider;
return this;
}
public CustomLLMAgentBuilder autoExecuteTools(boolean autoExecuteTools) {
this.autoExecuteTools = autoExecuteTools;
return this;
}
/**
* Add a tool to the agent
*/
public CustomLLMAgentBuilder addTool(ToolInfo toolInfo) {
pendingTools.add(toolInfo);
return this;
}
public LLMAgent build() {
// Auto-discover PromptService if not explicitly set
if (promptService == null) {
promptService = PromptServiceRegistry.getInstance();
if (promptService != null) {
log.debug("Auto-discovered PromptService from registry: {}",
promptService.getClass().getSimpleName());
}
}
// Create tool registry if tools were added
if (!pendingTools.isEmpty()) {
if (toolRegistry == null) {
toolRegistry = new ToolRegistry();
}
for (ToolInfo toolInfo : pendingTools) {
toolRegistry.registerTool(toolInfo);
}
} else if (toolRegistry == null) {
// Set default empty registry
toolRegistry = new ToolRegistry();
}
return new LLMAgent(modelClient, name, description, systemMessage,
temperature, maxTokens, model, imageModel, agentId,
chatMemory, promptService, toolRegistry,
tracingProvider, autoExecuteTools);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent/LoopAgent.java
|
package ai.driftkit.workflows.core.agent;
import ai.driftkit.common.domain.client.ResponseFormat;
import ai.driftkit.common.utils.JsonUtils;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import java.util.List;
import java.util.Map;
/**
* Agent that executes a worker agent in a loop until a stop condition is met.
* The evaluator agent determines whether to continue or stop the loop.
*/
@Slf4j
@Builder
@Getter
@AllArgsConstructor
public class LoopAgent implements Agent {
private final Agent worker;
private final Agent evaluator;
private final LoopStatus stopCondition;
@Builder.Default
private final String name = "LoopAgent";
@Builder.Default
private final String description = "Agent that executes work in a loop until condition is met";
@Builder.Default
private final int maxIterations = 10;
@Override
public String execute(String input) {
return runLoop(input, null);
}
@Override
public String execute(String text, byte[] imageData) {
return worker.execute(text, imageData);
}
@Override
public String execute(String text, List<byte[]> imageDataList) {
return worker.execute(text, imageDataList);
}
@Override
public String execute(String input, Map<String, Object> variables) {
return runLoop(input, variables);
}
/**
* Execute the loop with worker and evaluator agents.
*/
private String runLoop(String input, Map<String, Object> variables) {
String currentResult = input;
int iteration = 0;
while (iteration < maxIterations) {
iteration++;
log.debug("LoopAgent '{}' - iteration {}/{}", getName(), iteration, maxIterations);
try {
// Execute worker agent
String workerResult;
if (variables != null) {
workerResult = worker.execute(currentResult, variables);
} else {
workerResult = worker.execute(currentResult);
}
// Evaluate the result
EvaluationResult evaluationResult;
// If evaluator is an LLMAgent, use structured output
if (evaluator instanceof LLMAgent) {
LLMAgent llmEvaluator = (LLMAgent) evaluator;
String evaluationInput = buildStructuredEvaluationInput(currentResult, workerResult);
AgentResponse<EvaluationResult> response = llmEvaluator.executeStructured(
evaluationInput,
EvaluationResult.class
);
evaluationResult = response.getStructuredData();
} else {
// Fallback to traditional JSON parsing approach
String evaluationInput = buildEvaluationInput(currentResult, workerResult);
String evaluationResponse;
if (variables != null) {
evaluationResponse = evaluator.execute(evaluationInput, variables);
} else {
evaluationResponse = evaluator.execute(evaluationInput);
}
evaluationResult = parseEvaluationResult(evaluationResponse);
}
log.debug("LoopAgent '{}' - evaluation status: {}", getName(), evaluationResult.getStatus());
// Check stop condition
if (evaluationResult.getStatus() == stopCondition) {
log.debug("LoopAgent '{}' - stop condition met after {} iterations", getName(), iteration);
return workerResult;
}
// Handle different statuses
switch (evaluationResult.getStatus()) {
case REVISE:
currentResult = buildRevisionInput(workerResult, evaluationResult.getFeedback());
break;
case RETRY:
// Keep the same input for retry
break;
case FAILED:
throw new RuntimeException("Evaluator indicated failure: " + evaluationResult.getReason());
case CONTINUE:
default:
currentResult = workerResult;
break;
}
} catch (Exception e) {
log.error("Error in LoopAgent '{}' iteration {}", getName(), iteration, e);
throw new RuntimeException("LoopAgent execution failed at iteration " + iteration, e);
}
}
log.warn("LoopAgent '{}' reached maximum iterations ({})", getName(), maxIterations);
return currentResult;
}
/**
* Build input for the evaluator agent with structured output.
* The evaluator agent should have its own system message that defines evaluation criteria.
*/
private String buildStructuredEvaluationInput(String originalInput, String workerResult) {
// Simply pass the original request and result as structured data
// The evaluator agent's system message should define how to evaluate
return String.format(
"Original request: %s\n\n" +
"Generated result: %s",
originalInput, workerResult
);
}
/**
* Build input for the evaluator agent (legacy JSON format).
* The evaluator agent should have its own system message that defines expected JSON format.
*/
private String buildEvaluationInput(String originalInput, String workerResult) {
// Simply pass the data - evaluator agent's system message should define the expected JSON format
return String.format("Original request: %s\n\nGenerated result: %s",
originalInput, workerResult);
}
/**
* Build input for revision based on evaluator feedback.
*/
private String buildRevisionInput(String workerResult, String feedback) {
if (StringUtils.isNotBlank(feedback)) {
return String.format("Previous result: %s\n\nFeedback for improvement: %s", workerResult, feedback);
} else {
return String.format("Previous result needs revision: %s", workerResult);
}
}
/**
* Parse the evaluation response as JSON to get typed result.
*/
private EvaluationResult parseEvaluationResult(String evaluationResponse) {
try {
// Try to parse as JSON first
EvaluationResult result = JsonUtils.safeParse(evaluationResponse, EvaluationResult.class);
if (result != null && result.getStatus() != null) {
return result;
}
} catch (Exception e) {
log.debug("Failed to parse evaluation response as JSON, falling back to enum analysis", e);
}
// Fallback to enum name analysis if JSON parsing fails
return fallbackEnumAnalysis(evaluationResponse);
}
/**
* Fallback method to analyze text response using enum names if JSON parsing fails.
*/
private EvaluationResult fallbackEnumAnalysis(String response) {
if (StringUtils.isBlank(response)) {
return EvaluationResult.builder()
.status(LoopStatus.CONTINUE)
.build();
}
String upperResponse = response.toUpperCase();
// Check for each enum value by name
for (LoopStatus status : LoopStatus.values()) {
if (upperResponse.contains(status.name())) {
return EvaluationResult.builder()
.status(status)
.feedback(response)
.build();
}
}
// Default to CONTINUE if no enum match found
return EvaluationResult.builder()
.status(LoopStatus.CONTINUE)
.feedback(response)
.build();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent/LoopStatus.java
|
package ai.driftkit.workflows.core.agent;
/**
* Status enumeration for loop execution results.
*/
public enum LoopStatus {
/**
* Loop should continue with the current result.
*/
CONTINUE,
/**
* Loop should stop - condition has been met.
*/
COMPLETE,
/**
* Loop should continue but with revision based on feedback.
*/
REVISE,
/**
* Loop should retry the current iteration.
*/
RETRY,
/**
* Loop failed due to an error.
*/
FAILED
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent/RequestTracingProvider.java
|
package ai.driftkit.workflows.core.agent;
import ai.driftkit.common.domain.client.ModelTextRequest;
import ai.driftkit.common.domain.client.ModelTextResponse;
import ai.driftkit.common.domain.client.ModelImageRequest;
import ai.driftkit.common.domain.client.ModelImageResponse;
import lombok.Builder;
import lombok.Data;
import java.util.Map;
/**
* Interface for providing request tracing capabilities to LLMAgent.
* Implementations can provide tracing via Spring (ModelRequestService),
* REST API, or other mechanisms.
*/
public interface RequestTracingProvider {
/**
* Trace a text-to-text request and response
*/
void traceTextRequest(ModelTextRequest request, ModelTextResponse response, RequestContext context);
/**
* Trace a text-to-image request and response
*/
void traceImageRequest(ModelImageRequest request, ModelImageResponse response, RequestContext context);
/**
* Trace an image-to-text request and response
*/
void traceImageToTextRequest(ModelTextRequest request, ModelTextResponse response, RequestContext context);
/**
* Context information for request tracing
*/
@Data
@Builder
class RequestContext {
private final String contextId;
private final String contextType;
private final String promptId;
private final Map<String, Object> variables;
private final String workflowId;
private final String workflowType;
private final String workflowStep;
private final String chatId;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent/RequestTracingRegistry.java
|
package ai.driftkit.workflows.core.agent;
import lombok.extern.slf4j.Slf4j;
/**
* Registry for RequestTracingProvider instances.
* Allows Spring-based implementations to register themselves automatically.
*/
@Slf4j
public class RequestTracingRegistry {
private static volatile RequestTracingProvider instance;
/**
* Register a tracing provider (typically called by Spring components)
*/
public static void register(RequestTracingProvider provider) {
instance = provider;
log.info("Registered RequestTracingProvider: {}", provider.getClass().getSimpleName());
}
/**
* Get the current tracing provider instance
*/
public static RequestTracingProvider getInstance() {
return instance;
}
/**
* Check if a tracing provider is available
*/
public static boolean isAvailable() {
return instance != null;
}
/**
* Unregister the current provider (useful for testing)
*/
public static void unregister() {
RequestTracingProvider old = instance;
instance = null;
if (old != null) {
log.info("Unregistered RequestTracingProvider: {}", old.getClass().getSimpleName());
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent/SequentialAgent.java
|
package ai.driftkit.workflows.core.agent;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Getter;
import lombok.Singular;
import lombok.extern.slf4j.Slf4j;
import java.util.List;
import java.util.Map;
/**
* Agent that executes a sequence of agents one after another.
* The output of each agent becomes the input for the next agent.
*/
@Slf4j
@Builder
@Getter
@AllArgsConstructor
public class SequentialAgent implements Agent {
@Singular
private final List<Agent> agents;
@Builder.Default
private final String name = "SequentialAgent";
@Builder.Default
private final String description = "Agent that executes multiple agents in sequence";
@Override
public String execute(String input) {
return runSequence(input, null);
}
@Override
public String execute(String text, byte[] imageData) {
if (agents.isEmpty()) {
return text;
}
// For multimodal input, only the first agent can handle images
// Subsequent agents work with text output
String result = agents.get(0).execute(text, imageData);
for (int i = 1; i < agents.size(); i++) {
Agent agent = agents.get(i);
log.debug("SequentialAgent '{}' - executing step {}/{}: {}",
getName(), i + 1, agents.size(), agent.getName());
result = agent.execute(result);
}
return result;
}
@Override
public String execute(String text, List<byte[]> imageDataList) {
if (agents.isEmpty()) {
return text;
}
// For multimodal input, only the first agent can handle images
// Subsequent agents work with text output
String result = agents.get(0).execute(text, imageDataList);
for (int i = 1; i < agents.size(); i++) {
Agent agent = agents.get(i);
log.debug("SequentialAgent '{}' - executing step {}/{}: {}",
getName(), i + 1, agents.size(), agent.getName());
result = agent.execute(result);
}
return result;
}
@Override
public String execute(String input, Map<String, Object> variables) {
return runSequence(input, variables);
}
/**
* Execute the sequence of agents.
*/
private String runSequence(String input, Map<String, Object> variables) {
if (agents.isEmpty()) {
log.warn("SequentialAgent '{}' has no agents to execute", getName());
return input;
}
String result = input;
for (int i = 0; i < agents.size(); i++) {
Agent agent = agents.get(i);
log.debug("SequentialAgent '{}' - executing step {}/{}: {}",
getName(), i + 1, agents.size(), agent.getName());
try {
if (variables != null) {
result = agent.execute(result, variables);
} else {
result = agent.execute(result);
}
log.debug("SequentialAgent '{}' - step {} completed", getName(), i + 1);
} catch (Exception e) {
log.error("SequentialAgent '{}' - step {} failed: {}",
getName(), i + 1, agent.getName(), e);
throw new RuntimeException(
String.format("SequentialAgent step %d failed: %s", i + 1, agent.getName()), e);
}
}
log.debug("SequentialAgent '{}' completed all {} steps", getName(), agents.size());
return result;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent/ToolExecutionResult.java
|
package ai.driftkit.workflows.core.agent;
import lombok.Builder;
import lombok.Data;
/**
* Result of tool execution with typed data.
*/
@Data
@Builder
public class ToolExecutionResult {
private final String toolName;
private final Object result;
private final Class<?> resultType;
private final boolean success;
private final String error;
// Convenience method to get typed result
public <T> T getTypedResult(Class<T> type) {
if (result == null) {
return null;
}
if (type.isInstance(result)) {
return type.cast(result);
}
throw new ClassCastException("Cannot cast result of type " + result.getClass().getName() + " to " + type.getName());
}
// Legacy method for backward compatibility
@SuppressWarnings("unchecked")
public <T> T getTypedResult() {
return (T) result;
}
// Factory methods
public static ToolExecutionResult success(String toolName, Object result) {
return ToolExecutionResult.builder()
.toolName(toolName)
.result(result)
.resultType(result != null ? result.getClass() : Void.class)
.success(true)
.build();
}
public static ToolExecutionResult failure(String toolName, String error) {
return ToolExecutionResult.builder()
.toolName(toolName)
.success(false)
.error(error)
.build();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent/tool/AgentAsTool.java
|
package ai.driftkit.workflows.core.agent.tool;
import ai.driftkit.common.domain.client.ModelClient;
import ai.driftkit.common.tools.ToolInfo;
import ai.driftkit.workflows.core.agent.Agent;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Wrapper that allows any Agent to be used as a Tool.
* This enables composition of agents where one agent can call another agent as a tool.
*/
@Slf4j
@RequiredArgsConstructor
public class AgentAsTool implements Tool<SimpleToolArguments> {
private final Agent agent;
private final String name;
private final String description;
/**
* Create a new AgentAsTool wrapper as ToolInfo.
*
* @param name The name of the tool
* @param description The description of what the tool does
* @param agent The agent to wrap as a tool
* @return A new ToolInfo instance representing the agent as a tool
*/
public static ToolInfo create(String name, String description, Agent agent) {
AgentAsTool agentAsTool = new AgentAsTool(agent, name, description);
// Create ToolInfo from the AgentAsTool instance
return ToolInfo.builder()
.functionName(name)
.description(description)
.parameterNames(Arrays.asList("arguments"))
.parameterTypes(Arrays.asList(SimpleToolArguments.class))
.returnType(String.class)
.method(null) // No method for Tool<?> objects
.instance(agentAsTool) // Store the AgentAsTool instance
.isStatic(false)
.toolDefinition(createToolDefinition(agentAsTool))
.build();
}
/**
* Creates a ModelClient.Tool definition from an AgentAsTool instance
*/
private static ModelClient.Tool createToolDefinition(AgentAsTool agentAsTool) {
return ModelClient.Tool.builder()
.type(ModelClient.ResponseFormatType.function)
.function(ModelClient.ToolFunction.builder()
.name(agentAsTool.getName())
.description(agentAsTool.getDescription())
.parameters(convertToFunctionParameters(agentAsTool.getParametersSchema()))
.build())
.build();
}
/**
* Converts ToolParameterSchema to FunctionParameters
*/
private static ModelClient.ToolFunction.FunctionParameters convertToFunctionParameters(ToolParameterSchema schema) {
Map<String, ModelClient.Property> properties = new HashMap<>();
// Convert each property in the schema
if (schema.getProperties() != null) {
for (Map.Entry<String, ToolParameterSchema.PropertySchema> entry : schema.getProperties().entrySet()) {
ToolParameterSchema.PropertySchema propSchema = entry.getValue();
ModelClient.Property property = new ModelClient.Property();
property.setType(ModelClient.ResponseFormatType.fromType(propSchema.getType()));
property.setDescription(propSchema.getDescription());
properties.put(entry.getKey(), property);
}
}
ModelClient.ToolFunction.FunctionParameters params = new ModelClient.ToolFunction.FunctionParameters();
params.setType(ModelClient.ResponseFormatType.Object);
params.setProperties(properties);
params.setRequired(schema.getRequired());
return params;
}
@Override
public String getName() {
return name;
}
@Override
public String getDescription() {
return description;
}
@Override
public ToolParameterSchema getParametersSchema() {
// Create schema POJO
ToolParameterSchema.PropertySchema inputProperty = ToolParameterSchema.PropertySchema.builder()
.type("string")
.description("The input text to process")
.build();
return ToolParameterSchema.builder()
.type("object")
.properties(Map.of("input", inputProperty))
.required(List.of("input"))
.build();
}
@Override
public Class<SimpleToolArguments> getArgumentType() {
return SimpleToolArguments.class;
}
@Override
public String execute(SimpleToolArguments arguments) throws Exception {
// Execute the wrapped agent
String result = agent.execute(arguments.getInput());
log.debug("AgentAsTool '{}' executed successfully", name);
return result;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent/tool/SimpleToolArguments.java
|
package ai.driftkit.workflows.core.agent.tool;
import lombok.Data;
import lombok.EqualsAndHashCode;
/**
* Simple tool arguments with a single input field.
* Used for AgentAsTool and other simple tools.
*/
@Data
@EqualsAndHashCode(callSuper = true)
public class SimpleToolArguments extends ToolArguments {
private String input;
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent/tool/Tool.java
|
package ai.driftkit.workflows.core.agent.tool;
/**
* Interface for tools that can be used by agents.
* Tools provide functionality that agents can call to perform specific tasks.
*/
public interface Tool<T extends ToolArguments> {
/**
* Get the name of the tool.
*
* @return The tool's name
*/
String getName();
/**
* Get the description of what the tool does.
*
* @return The tool's description
*/
String getDescription();
/**
* Get the parameter schema for the tool.
*
* @return The parameters schema as POJO
*/
ToolParameterSchema getParametersSchema();
/**
* Get the argument type class for this tool.
* Used by the framework to parse JSON arguments into typed objects.
*
* @return The argument type class
*/
Class<T> getArgumentType();
/**
* Execute the tool with the given arguments.
*
* @param arguments The parsed arguments as a typed object
* @return The result of the tool execution
* @throws Exception if tool execution fails
*/
String execute(T arguments) throws Exception;
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent/tool/ToolArguments.java
|
package ai.driftkit.workflows.core.agent.tool;
import lombok.Data;
import java.util.Map;
/**
* Base class for tool arguments.
* Specific tools should extend this class or use the generic version.
*/
@Data
public class ToolArguments {
/**
* Raw arguments as a map for flexible tools.
*/
private Map<String, Object> arguments;
/**
* Get a specific argument value.
*
* @param key The argument key
* @return The argument value
*/
public Object get(String key) {
return arguments != null ? arguments.get(key) : null;
}
/**
* Get a specific argument value as string.
*
* @param key The argument key
* @return The argument value as string
*/
public String getString(String key) {
Object value = get(key);
return value != null ? value.toString() : null;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/agent/tool/ToolParameterSchema.java
|
package ai.driftkit.workflows.core.agent.tool;
import lombok.Builder;
import lombok.Data;
import java.util.List;
import java.util.Map;
/**
* POJO for tool parameter schema definition.
* This class represents the JSON schema for tool parameters.
*/
@Data
@Builder
public class ToolParameterSchema {
private String type;
private Map<String, PropertySchema> properties;
private List<String> required;
@Data
@Builder
public static class PropertySchema {
private String type;
private String description;
private List<String> enumValues;
private PropertySchema items; // For array types
private Map<String, PropertySchema> properties; // For object types
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/chat/ChatMemory.java
|
package ai.driftkit.workflows.core.chat;
import java.util.List;
/**
* ChatMemory defines the contract for a chat memory system.
*/
public interface ChatMemory {
Object id();
void add(Message message);
List<Message> messages();
void clear();
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/chat/ChatMemoryStore.java
|
package ai.driftkit.workflows.core.chat;
import java.util.List;
/**
* ChatMemoryStore defines the interface for persisting chat messages.
*/
public interface ChatMemoryStore {
List<Message> getMessages(String id, int limit);
void updateMessages(String id, List<Message> messages);
void deleteMessages(String id);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/chat/Grade.java
|
package ai.driftkit.workflows.core.chat;
public enum Grade {
EXCELLENT,
GOOD,
FAIR,
POOR,
UNACCEPTABLE
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/chat/InMemoryChatMemoryStore.java
|
package ai.driftkit.workflows.core.chat;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* InMemoryChatMemoryStore is a simple in-memory implementation of ChatMemoryStore.
*/
public class InMemoryChatMemoryStore implements ChatMemoryStore {
private final Map<String, List<Message>> store = new HashMap<>();
@Override
public List<Message> getMessages(String id, int limit) {
return store.getOrDefault(id, new LinkedList<>());
}
@Override
public void updateMessages(String id, List<Message> messages) {
store.put(id, new LinkedList<>(messages));
}
@Override
public void deleteMessages(String id) {
store.remove(id);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/chat/Message.java
|
package ai.driftkit.workflows.core.chat;
import ai.driftkit.common.domain.ChatMessageType;
import ai.driftkit.common.domain.MessageType;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import jakarta.validation.constraints.NotNull;
@Data
@NoArgsConstructor
@JsonInclude(Include.NON_NULL)
public class Message {
@Builder
public Message(
String messageId,
String message,
ChatMessageType type,
MessageType messageType,
String imageTaskId,
Grade grade,
String gradeComment,
String workflow,
String context,
long createdTime,
long requestInitTime,
Long responseTime) {
this.messageId = messageId;
this.message = message;
this.type = type;
this.messageType = messageType;
this.imageTaskId = imageTaskId;
this.grade = grade;
this.gradeComment = gradeComment;
this.workflow = workflow;
this.context = context;
this.createdTime = createdTime;
this.requestInitTime = requestInitTime;
this.responseTime = responseTime;
}
@NotNull
String messageId;
@NotNull
String message;
@NotNull
ChatMessageType type;
@NotNull
MessageType messageType = MessageType.TEXT;
String imageTaskId;
Grade grade;
String gradeComment;
String workflow;
String context;
@NotNull
long createdTime;
@NotNull
long requestInitTime;
Long responseTime;
public ChatMessageType type() {
return type;
}
public String text() {
return message;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/chat/SimpleTokenizer.java
|
package ai.driftkit.workflows.core.chat;
import org.apache.commons.lang3.StringUtils;
import java.util.List;
/**
* A simple Tokenizer implementation for demonstration purposes.
*/
public class SimpleTokenizer implements Tokenizer {
public static final double DEFAULT_TOKEN_COST = 0.7;
@Override
public int estimateTokenCountInMessages(List<Message> messages) {
return messages.stream()
.mapToInt(this::estimateTokenCountInMessage)
.sum();
}
@Override
public int estimateTokenCountInMessage(Message message) {
if (StringUtils.isBlank(message.text())) {
return 0;
}
return (int) (message.text().length() * DEFAULT_TOKEN_COST);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/chat/TokenWindowChatMemory.java
|
package ai.driftkit.workflows.core.chat;
import ai.driftkit.common.domain.ChatMessageType;
import ai.driftkit.common.utils.ValidationUtils;
import lombok.extern.slf4j.Slf4j;
import java.util.*;
/**
* TokenWindowChatMemory is a custom chat memory implementation that retains the most recent messages
* within a fixed token window.
*/
@Slf4j
public class TokenWindowChatMemory implements ChatMemory {
public static final int MESSAGES_LIMIT = 200;
private final String id;
private final Integer maxTokens;
private final Tokenizer tokenizer;
private final ChatMemoryStore store;
private TokenWindowChatMemory(String id, int maxTokens, Tokenizer tokenizer, ChatMemoryStore memoryStore) {
this.id = ValidationUtils.ensureNotNull(id, "id");
this.maxTokens = ValidationUtils.ensureGreaterThanZero(maxTokens, "maxTokens");
this.tokenizer = ValidationUtils.ensureNotNull(tokenizer, "tokenizer");
this.store = ValidationUtils.ensureNotNull(memoryStore, "store");
}
@Override
public String id() {
return id;
}
@Override
public void add(Message message) {
List<Message> messages = messages();
if (message.type() == ChatMessageType.SYSTEM) {
Optional<Message> maybeSystemMessage = findSystemMessage(messages);
if (maybeSystemMessage.isPresent()) {
if (maybeSystemMessage.get().equals(message)) {
return; // Do not add the same system message twice.
} else {
messages.remove(maybeSystemMessage.get());
}
}
}
messages.add(message);
ensureCapacity(messages, maxTokens, tokenizer);
store.updateMessages(id, messages);
}
private static Optional<Message> findSystemMessage(List<Message> messages) {
return messages.stream()
.filter(msg -> msg.type() == ChatMessageType.SYSTEM)
.findAny();
}
@Override
//TODO: remove constant
public List<Message> messages() {
List<Message> messages = new LinkedList<>(store.getMessages(id, MESSAGES_LIMIT));
ensureCapacity(messages, maxTokens, tokenizer);
return messages;
}
private static void ensureCapacity(List<Message> messages, int maxTokens, Tokenizer tokenizer) {
int currentTokenCount = tokenizer.estimateTokenCountInMessages(messages);
while (currentTokenCount > maxTokens) {
int messageToEvictIndex = 0;
if (messages.get(0).type() == ChatMessageType.SYSTEM) {
messageToEvictIndex = 1;
}
Message evictedMessage = messages.remove(messageToEvictIndex);
int tokenCountOfEvictedMessage = tokenizer.estimateTokenCountInMessage(evictedMessage);
log.trace("Evicting message ({} tokens) to meet capacity: {}", tokenCountOfEvictedMessage, evictedMessage);
currentTokenCount -= tokenCountOfEvictedMessage;
//TODO: tools support
// if (evictedMessage.getType() == ChatMessageType.AI && evictedMessage.hasToolExecutionRequests()) {
// while (messages.size() > messageToEvictIndex
// && messages.get(messageToEvictIndex) instanceof ToolExecutionResultMessage) {
// ChatMessage orphanToolExecutionResultMessage = messages.remove(messageToEvictIndex);
// log.trace("Evicting orphan message: {}", orphanToolExecutionResultMessage);
// currentTokenCount -= tokenizer.estimateTokenCountInMessage(orphanToolExecutionResultMessage);
// }
// }
}
}
@Override
public void clear() {
store.deleteMessages(id);
}
public static TokenWindowChatMemory withMaxTokens(int maxTokens, Tokenizer tokenizer) {
return new TokenWindowChatMemory(
UUID.randomUUID().toString(),
maxTokens,
tokenizer,
new InMemoryChatMemoryStore()
);
}
public static TokenWindowChatMemory withMaxTokens(String chatId, int maxTokens, Tokenizer tokenizer, ChatMemoryStore memoryStore) {
return new TokenWindowChatMemory(
chatId,
maxTokens,
tokenizer,
memoryStore
);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/chat/Tokenizer.java
|
package ai.driftkit.workflows.core.chat;
import java.util.List;
/**
* Tokenizer is responsible for estimating the number of tokens in chat messages.
*/
public interface Tokenizer {
int estimateTokenCountInMessages(List<Message> messages);
int estimateTokenCountInMessage(Message message);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/CombinedEvent.java
|
package ai.driftkit.workflows.core.domain;
import lombok.AllArgsConstructor;
import lombok.Data;
/**
* Class representing a combined event that includes both the result of the data processing
* and the result of the condition evaluation.
*/
@Data
@AllArgsConstructor
public class CombinedEvent implements WorkflowEvent {
private Object dataResult; // Result from data processing
private boolean conditionResult; // Result of condition evaluation
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/DataEvent.java
|
package ai.driftkit.workflows.core.domain;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class DataEvent<T> implements WorkflowEvent {
private T result;
private String nextStepName;
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/ExecutableWorkflow.java
|
package ai.driftkit.workflows.core.domain;
import ai.driftkit.workflows.core.service.ExecutableWorkflowGraph;
import ai.driftkit.workflows.core.service.WorkflowAnalyzer;
import com.fasterxml.jackson.databind.JsonNode;
public class ExecutableWorkflow<I extends StartEvent, O> {
ExecutableWorkflowGraph graph;
public ExecutableWorkflow() {
this.graph = WorkflowAnalyzer.buildExecutableWorkflowGraph(this);
}
public Class<I> getInputType() {
return (Class<I>) StartEvent.class;
}
public Class<O> getOutputType() {
return (Class<O>) JsonNode.class;
}
public StopEvent<O> execute(StartEvent startEvent, WorkflowContext workflowContext) throws Exception {
return graph.execute(this, startEvent, workflowContext);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/ExternalEvent.java
|
package ai.driftkit.workflows.core.domain;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class ExternalEvent<T extends StartEvent> implements WorkflowEvent {
private Class<?> workflowCls;
private T startEvent;
private String nextStepName;
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/FinalStep.java
|
package ai.driftkit.workflows.core.domain;
public @interface FinalStep {
String expression() default ""; // The logic to execute
int invocationLimit() default 1;
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/InlineStep.java
|
package ai.driftkit.workflows.core.domain;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation for inline steps with expressions.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface InlineStep {
String expression() default ""; // The logic to execute
String nextStep() default ""; // The default next step to execute
String condition() default ""; // Condition to determine the next step
String trueStep() default ""; // Next step if condition is true
String falseStep() default ""; // Next step if condition is false
int invocationLimit() default 5;
OnInvocationsLimit onInvocationsLimit() default OnInvocationsLimit.STOP;
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/LLMRequest.java
|
package ai.driftkit.workflows.core.domain;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation for methods representing LLM requests.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface LLMRequest {
String prompt(); // The prompt to send to the LLM
String modelName(); // The model to use for the LLM request
String nextStep() default ""; // The default next step to execute after this LLM request
String condition() default ""; // Condition to determine the next step
String trueStep() default ""; // Next step if condition is true
String falseStep() default ""; // Next step if condition is false
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/LLMRequestEvent.java
|
package ai.driftkit.workflows.core.domain;
import ai.driftkit.common.domain.MessageTask;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class LLMRequestEvent extends StartEvent {
private MessageTask task;
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/MethodInfo.java
|
package ai.driftkit.workflows.core.domain;
import ai.driftkit.workflows.core.domain.OnInvocationsLimit;
import ai.driftkit.workflows.core.domain.RetryPolicy;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.lang.reflect.Type;
import java.util.List;
/**
* Class to store information about methods.
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public class MethodInfo {
private String methodName; // Method name
private List<Type> inputEvents; // Input events (excluding WorkflowContext)
private List<Type> outputEvents; // Output events (return types)
private RetryPolicy retryPolicy; // Retry policy for the method
private String description; // Description from @StepInfo
private String category; // Category from @StepInfo
private String conditionExpression; // Condition expression
private String trueStep; // Next step if condition is true
private String falseStep; // Next step if condition is false
// Fields for @LLMRequest
private String prompt; // Prompt template
private String modelName; // Model name
private String nextStep; // Default next step
// Fields for @InlineStep and @FinalStep
private String expression; // Expression to evaluate
private List<String> parameterNames; // Parameter names
private List<Type> allParamTypes; // All parameter types (including WorkflowContext)
private boolean isAbstract; // Indicates if the method is abstract
private boolean isFinal; // Indicates if the method is final
private int invocationsLimit;
private OnInvocationsLimit stepOnInvocationsLimit;
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/OnInvocationsLimit.java
|
package ai.driftkit.workflows.core.domain;
public enum OnInvocationsLimit {
ERROR,
STOP,
CONTINUE;
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/RetryPolicy.java
|
package ai.driftkit.workflows.core.domain;
public @interface RetryPolicy {
int delay() default 5;
int maximumAttempts() default 10;
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/StartEvent.java
|
package ai.driftkit.workflows.core.domain;
import lombok.Data;
@Data
public class StartEvent implements WorkflowEvent {
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/StartQueryEvent.java
|
package ai.driftkit.workflows.core.domain;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class StartQueryEvent extends StartEvent {
private String query;
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/Step.java
|
package ai.driftkit.workflows.core.domain;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation for workflow steps.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface Step {
String name() default "";
RetryPolicy retryPolicy() default @RetryPolicy;
int invocationLimit() default 5;
OnInvocationsLimit onInvocationsLimit() default OnInvocationsLimit.STOP;
String nextStep() default ""; // The default next step to execute
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/StepInfo.java
|
package ai.driftkit.workflows.core.domain;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation for additional step information.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface StepInfo {
String description() default "";
String category() default "";
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/StopEvent.java
|
package ai.driftkit.workflows.core.domain;
import ai.driftkit.common.utils.JsonUtils;
import ai.driftkit.workflows.core.service.WorkflowAnalyzer;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class StopEvent<T> implements WorkflowEvent {
private String result;
@JsonIgnore
private transient Class<T> cls;
public static <T> StopEvent<T> ofObject(T obj) throws JsonProcessingException {
return (StopEvent<T>) new StopEvent<>(JsonUtils.toJson(obj), obj.getClass());
}
public static StopEvent ofJson(String obj) throws JsonProcessingException {
return new StopEvent<>(obj, String.class);
}
public static StopEvent ofString(String obj) throws JsonProcessingException {
return new StopEvent<>(obj, String.class);
}
public T get() throws JsonProcessingException {
return get(cls);
}
public <T> T get(Class<T> cls) throws JsonProcessingException {
return WorkflowAnalyzer.objectMapper.readValue(result, cls);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/WorkflowContext.java
|
package ai.driftkit.workflows.core.domain;
import ai.driftkit.common.domain.MessageTask;
import ai.driftkit.common.utils.Counter;
import lombok.AllArgsConstructor;
import lombok.Data;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
@Data
@AllArgsConstructor
public class WorkflowContext {
private MessageTask task;
private Map<String, Object> context;
private Counter<String> counter;
private String workflowId;
private String currentStep;
public WorkflowContext() {
this(null);
}
public WorkflowContext(MessageTask task) {
this(task, task != null ? task.getMessageId() : UUID.randomUUID().toString());
}
public WorkflowContext(MessageTask task, String workflowId) {
this.task = task;
this.context = new ConcurrentHashMap<>();
this.counter = new Counter<>();
this.workflowId = workflowId;
}
public void addCounter(String name, int amount) {
onCounterChange(name, amount);
this.counter.add(name, amount);
}
public int getCounter(String name) {
return this.counter.get(name);
}
public void onCounterChange(String name, int amount) {
}
public void onContextChange(String name, Object value) {
}
public void onStepInvocation(String method, WorkflowEvent event) {
addCounter(getMethodInvocationsCounterName(method), 1);
addCounter(event.getClass().getSimpleName() + "Event", 1);
this.currentStep = method;
add(method, event);
}
public int getStepInvocationCount(String method) {
return counter.get(getMethodInvocationsCounterName(method));
}
public static String getMethodInvocationsCounterName(String method) {
return method + "Invocation";
}
public void put(String name, Object value) {
onContextChange(name, value);
this.context.put(name, value);
}
public <T> T getLastContext(String name) {
List<T> list = (List<T>) this.context.computeIfAbsent(name, e -> new CopyOnWriteArrayList<>());
if (list.isEmpty()) {
return null;
}
return list.getLast();
}
public <T> void add(String name, T result) {
onContextChange(name, result);
List<T> list = (List<T>) this.context.computeIfAbsent(name, e -> new CopyOnWriteArrayList<>());
list.add(result);
}
public <T> T get(String name) {
return (T) this.context.get(name);
}
public <T> T getOrDefault(String name, T def) {
Object value = get(name);
if (value == null) {
return def;
}
return (T) value;
}
public String getAsString(String promptId) {
return (String) this.context.get(promptId);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/WorkflowEvent.java
|
package ai.driftkit.workflows.core.domain;
public interface WorkflowEvent {
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/enhanced/EnhancedReasoningPlan.java
|
package ai.driftkit.workflows.core.domain.enhanced;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.JsonNode;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
/**
* Represents a planning and execution plan with a checklist for validation
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(Include.NON_NULL)
@JsonIgnoreProperties(ignoreUnknown = true)
public class EnhancedReasoningPlan {
/**
* The original user query
*/
private String query;
/**
* The result of processing the query
*/
@JsonProperty("result")
private JsonNode result;
/**
* List of checklist items to validate the result
*/
@JsonProperty("checklist")
private List<ChecklistItem> checklist;
/**
* Represents a single checklist item for validation
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class ChecklistItem {
/**
* Description of the validation criterion
*/
@JsonProperty("description")
private String description;
/**
* The importance level of this criterion
* (critical, high, medium, low)
*/
@JsonProperty("severity")
private ChecklistSeverity severity;
}
/**
* Enum representing severity levels for checklist items
*/
public enum ChecklistSeverity {
CRITICAL,
HIGH,
MEDIUM,
LOW;
public boolean isHigherThan(ChecklistSeverity other) {
return this.ordinal() < other.ordinal();
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/enhanced/EnhancedReasoningResult.java
|
package ai.driftkit.workflows.core.domain.enhanced;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Standardized result format for EnhancedReasoningWorkflow
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(Include.NON_NULL)
@JsonIgnoreProperties(ignoreUnknown = true)
public class EnhancedReasoningResult {
private static final ObjectMapper mapper = new ObjectMapper();
/**
* The original user query
*/
@JsonProperty("query")
private String query;
/**
* The final result content
*/
@JsonProperty("result")
private JsonNode result;
/**
* Checklist items used for validation
*/
@JsonProperty("checklist")
private List<EnhancedReasoningPlan.ChecklistItem> checklist;
/**
* Validation results for each checklist item
*/
@JsonProperty("validation")
private List<EnhancedReasoningValidation.ValidationItem> validationItems;
/**
* Overall confidence score (0.0 to 1.0)
*/
@JsonProperty("confidence")
private double confidence;
/**
* Whether the result is satisfactory based on validation
*/
@JsonProperty("is_satisfactory")
private boolean isSatisfactory;
/**
* Whether the fallback workflow was used
*/
@JsonProperty("used_fallback")
private boolean usedFallback;
/**
* History of all attempts (not serialized in JSON response)
*/
@JsonIgnore
private List<AttemptRecord> attemptHistory;
/**
* Create result object from normal execution path
*/
@SneakyThrows
public static EnhancedReasoningResult fromEnhancedReasoning(
String query,
JsonNode result,
List<EnhancedReasoningPlan.ChecklistItem> checklist,
EnhancedReasoningValidation validation) {
return EnhancedReasoningResult.builder()
.query(query)
.result(result)
.checklist(checklist)
.validationItems(validation != null ? validation.getItems() : Collections.emptyList())
.confidence(validation != null ? validation.getConfidence() : 0.0)
.isSatisfactory(validation != null ? validation.isSatisfactory() : false)
.usedFallback(false)
.attemptHistory(new ArrayList<>())
.build();
}
/**
* Create result object from fallback execution path
*/
@SneakyThrows
public static EnhancedReasoningResult fromFallback(String query, String result) {
return EnhancedReasoningResult.builder()
.query(query)
.result(mapper.readTree(result))
.checklist(Collections.emptyList())
.validationItems(Collections.emptyList())
.confidence(1.0) // We assume the fallback workflow produced correct results
.isSatisfactory(true)
.usedFallback(true)
.attemptHistory(new ArrayList<>())
.build();
}
/**
* Class to store information about each attempt
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class AttemptRecord {
private int attemptNumber;
private EnhancedReasoningPlan plan;
private EnhancedReasoningValidation validation;
private double confidence;
private boolean satisfactory;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/domain/enhanced/EnhancedReasoningValidation.java
|
package ai.driftkit.workflows.core.domain.enhanced;
import ai.driftkit.workflows.core.domain.enhanced.EnhancedReasoningPlan.ChecklistSeverity;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.JsonNode;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
/**
* Represents the result of validating a reasoning result against a checklist
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(Include.NON_NULL)
@JsonIgnoreProperties(ignoreUnknown = true)
public class EnhancedReasoningValidation {
/**
* Overall satisfaction level (0.0 to 1.0)
*/
@JsonProperty("confidence")
private double confidence;
/**
* Whether the result is satisfactory based on the checklist
*/
@JsonProperty("is_satisfactory")
private boolean isSatisfactory;
/**
* Improved result if the original result needed fixing
*/
@JsonProperty("result")
private JsonNode result;
/**
* Validation results for each checklist item
*/
@JsonProperty("items")
private List<ValidationItem> items;
/**
* Feedback for the result that failed validation
*/
@JsonProperty("feedback")
private String feedback;
/**
* Represents validation results for a single checklist item
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class ValidationItem {
/**
* Reference to the original checklist item description
*/
@JsonProperty("description")
private String description;
/**
* The severity level from the checklist item
*/
@JsonProperty("severity")
private EnhancedReasoningPlan.ChecklistSeverity severity;
/**
* Score for this validation item (0.0 to 1.0)
*/
@JsonProperty("rating")
private double rating;
/**
* Whether this item passed validation
*/
@JsonProperty("passed")
private boolean passed;
/**
* Specific feedback for this item
*/
@JsonProperty("feedback")
private String feedback;
}
/**
* Calculate overall satisfaction rating based on severity weighted scores
*
* @return true if the result meets the satisfaction threshold
*/
public boolean calculateOverallSatisfaction(int iterations, double threshold) {
if (items == null || items.isEmpty()) {
return confidence >= threshold;
}
// Check if any critical items failed
boolean anyCriticalFailed = items.stream()
.anyMatch(item -> {
if (item.severity == EnhancedReasoningPlan.ChecklistSeverity.CRITICAL && !item.passed) {
return true;
}
if (iterations <= 1) {
return item.severity == ChecklistSeverity.HIGH && !item.passed;
}
return false;
});
if (anyCriticalFailed) {
return false;
}
// Calculate weighted score
double totalWeight = 0;
double weightedSum = 0;
for (ValidationItem item : items) {
double weight = getWeightForSeverity(item.severity);
totalWeight += weight;
weightedSum += weight * item.rating;
}
confidence = totalWeight > 0 ? weightedSum / totalWeight : 0.5;
return confidence >= threshold;
}
/**
* Get weight value based on severity level
*/
private double getWeightForSeverity(EnhancedReasoningPlan.ChecklistSeverity severity) {
switch (severity) {
case CRITICAL:
return 4.0;
case HIGH:
return 2.0;
case MEDIUM:
return 1.0;
case LOW:
return 0.5;
default:
return 1.0;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/service/ExecutableWorkflowGraph.java
|
package ai.driftkit.workflows.core.service;
import ai.driftkit.common.domain.client.*;
import ai.driftkit.common.domain.client.ResponseFormat;
import ai.driftkit.common.utils.ModelUtils;
import ai.driftkit.workflows.core.domain.*;
import ai.driftkit.common.utils.JsonUtils;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.jexl2.Expression;
import org.apache.commons.jexl2.JexlContext;
import org.apache.commons.jexl2.JexlEngine;
import org.apache.commons.jexl2.MapContext;
import org.apache.commons.lang3.StringUtils;
import java.lang.reflect.*;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
/**
* Class extending WorkflowGraph to execute the workflow.
*/
@Slf4j
@Data
@NoArgsConstructor
public class ExecutableWorkflowGraph extends WorkflowGraph {
private static final int MAX_DEPTH = 1000;
private static Map<Class<?>, WorkflowGraphInstance> workflowGraphMap;
static {
workflowGraphMap = new ConcurrentHashMap<>();
}
static void register(Class<?> workflowCls, Object instance, ExecutableWorkflowGraph graph) {
workflowGraphMap.put(workflowCls, new WorkflowGraphInstance(graph, instance));
}
public <T> StopEvent<T> execute(Object workflowInstance, StartEvent startEvent, WorkflowContext workflowContext) throws Exception {
return execute(workflowInstance, startEvent, workflowContext, null);
}
/**
* Executes the workflow starting from StartEvent.
*/
public <T> StopEvent<T> execute(Object workflowInstance, StartEvent startEvent,
WorkflowContext workflowContext,
ModelClient modelClient) throws Exception {
List<MethodInfo> startingMethods = getStartMethodsInfo();
if (startingMethods.isEmpty()) {
throw new IllegalStateException(
"No starting step found (methods accepting StartEvent)");
}
MethodInfo startingMethodInfo = startingMethods.get(0);
return executeStep(workflowInstance, startingMethodInfo, startEvent,
workflowContext, 0, modelClient);
}
private <T> StopEvent<T> executeStep(Object workflowInstance, MethodInfo methodInfo,
WorkflowEvent currentEvent,
WorkflowContext workflowContext,
int depth, ModelClient modelClient) throws Exception {
if (depth > MAX_DEPTH) {
throw new StackOverflowError("Maximum execution depth exceeded");
}
String methodName = methodInfo.getMethodName();
int counter = workflowContext.getCounter(WorkflowContext.getMethodInvocationsCounterName(methodName));
if (methodInfo.getInvocationsLimit() <= counter) {
switch (methodInfo.getStepOnInvocationsLimit()) {
case ERROR -> {
throw new StackOverflowError("Maximum invocations limit exceeded [%s] for method [%s]".formatted(counter, methodName));
}
case STOP -> {
String finalResult = ((DataEvent) currentEvent).getResult().toString();
return StopEvent.ofString(finalResult);
}
case CONTINUE -> {
String nextStepName = methodInfo.getNextStep();
MethodInfo nextMethodInfo = getMethodInfo(nextStepName);
if (nextMethodInfo == null) {
throw new IllegalStateException("No method found for step name: " + nextStepName);
}
return executeStep(workflowInstance, nextMethodInfo, currentEvent,
workflowContext, depth + 1, modelClient);
}
}
}
workflowContext.onStepInvocation(methodName, currentEvent);
// Build arguments for the method
Object[] args = buildMethodArguments(methodInfo,
currentEvent, workflowContext);
try {
Object returnValue;
if (methodInfo.getPrompt() != null
&& !methodInfo.getPrompt().isEmpty()) {
// Handling methods annotated with @LLMRequest
if (modelClient == null) {
throw new IllegalArgumentException("ModelClient is required for methods annotated with @LLMRequest");
}
returnValue = invokeLLMRequest(methodInfo, args,
workflowContext, modelClient);
} else if (methodInfo.getExpression() != null
&& !methodInfo.getExpression().isEmpty()) {
// Handling methods annotated with @InlineStep and @FinalStep
returnValue = executeInlineStep(methodInfo, args, workflowContext);
} else {
// Invoke the method on the workflow instance
if (methodInfo.isAbstract()) {
if (methodInfo.isFinal()) {
return StopEvent.ofString(((DataEvent) currentEvent).getResult().toString());
} else {
throw new UnsupportedOperationException("Cannot invoke abstract method: " + methodInfo.getMethodName());
}
}
if (workflowInstance == null) {
throw new IllegalArgumentException("Workflow instance is required for invoking methods");
}
Method currentMethod = findMethod(workflowInstance.getClass(), methodInfo);
if (currentMethod == null) {
currentMethod = findMethod(workflowInstance.getClass(), methodInfo);
throw new NoSuchMethodException("Method not found: " + methodName);
}
currentMethod.setAccessible(true);
returnValue = invokeWithRetry(workflowInstance, currentMethod, args,
methodInfo.getRetryPolicy());
}
if (returnValue == null) {
throw new IllegalStateException("Method "
+ methodInfo.getMethodName() + " returned null");
}
if (returnValue instanceof StopEvent) {
return (StopEvent<T>) returnValue;
} else if (returnValue instanceof ExternalEvent externalEvent) {
WorkflowGraphInstance relatedWorkflow = workflowGraphMap.get(externalEvent.getWorkflowCls());
if (relatedWorkflow == null) {
throw new IllegalStateException("External workflow " + externalEvent.getWorkflowCls().getSimpleName() + " is unregistered, workflow step failed");
}
StopEvent<?> executed = relatedWorkflow.getGraph().execute(relatedWorkflow.getInstance(), externalEvent.getStartEvent(), workflowContext);
MethodInfo nextMethodInfo = getMethodInfo(externalEvent.getNextStepName());
DataEvent event = new DataEvent(executed.get(), externalEvent.getNextStepName());
// Proceed to the next step
return executeStep(workflowInstance, nextMethodInfo, event,
workflowContext, depth + 1, modelClient);
} else if (returnValue instanceof DataEvent) {
DataEvent<?> dataEvent = (DataEvent<?>) returnValue;
// Determine the next step based on condition
String nextStepName = determineNextStep(methodInfo, args, dataEvent, workflowContext);
if (nextStepName == null || nextStepName.isEmpty()) {
return StopEvent.ofString(dataEvent.getResult().toString());
}
MethodInfo nextMethodInfo = getMethodInfo(nextStepName);
if (nextMethodInfo == null) {
throw new IllegalStateException("No method found for step name: "
+ nextStepName);
}
// Prepare the event for the next step
WorkflowEvent nextEvent;
if (methodInfo.getConditionExpression() != null && !methodInfo.getConditionExpression().isEmpty()) {
// If there was a condition, create a CombinedEvent
nextEvent = new CombinedEvent(dataEvent.getResult(), lastConditionResult);
} else {
// No condition; pass the data result directly
nextEvent = dataEvent;
}
// Proceed to the next step
return executeStep(workflowInstance, nextMethodInfo, nextEvent,
workflowContext, depth + 1, modelClient);
} else {
throw new IllegalStateException(
"Unexpected return type from method: "
+ methodInfo.getMethodName());
}
} catch (Exception e) {
log.error("Error executing method {}: {}",
methodInfo.getMethodName(), e.getMessage());
throw e;
}
}
private boolean lastConditionResult = false; // Store the last condition result
private String determineNextStep(MethodInfo methodInfo, Object[] args, Object dataResult, WorkflowContext context) throws Exception {
if (methodInfo.getConditionExpression() != null && !methodInfo.getConditionExpression().isEmpty()) {
// Prepare variables for condition evaluation
Map<String, Object> variables = prepareVariables(methodInfo, args, dataResult, context);
// Evaluate condition
boolean conditionResult = evaluateConditionExpression(methodInfo.getConditionExpression(), variables);
lastConditionResult = conditionResult;
if (conditionResult) {
return methodInfo.getTrueStep();
} else {
return methodInfo.getFalseStep();
}
} else if (dataResult instanceof DataEvent && StringUtils.isNotBlank(((DataEvent<?>) dataResult).getNextStepName())) {
String nextStepName = ((DataEvent<?>) dataResult).getNextStepName();
if (nextStepName.equals(methodInfo.getMethodName()) && methodInfo.isFinal()) {
return null;
}
return nextStepName;
} else if (methodInfo.getNextStep() != null && !methodInfo.getNextStep().isEmpty()) {
// Default next step specified
return methodInfo.getNextStep();
} else {
// Attempt to find next step based on the workflow graph
List<String> nextSteps = getAdjacencyList().get(methodInfo.getMethodName());
if (nextSteps != null && !nextSteps.isEmpty()) {
return nextSteps.get(0); // Proceed to the next connected step
} else {
return null;
}
}
}
private Map<String, Object> prepareVariables(MethodInfo methodInfo, Object[] args, Object dataResult, WorkflowContext context) throws JsonProcessingException {
Map<String, Object> variables = new HashMap<>();
// Add variables from method arguments
List<String> parameterNames = methodInfo.getParameterNames();
for (int i = 0; i < parameterNames.size(); i++) {
String paramName = parameterNames.get(i);
Object argValue = args[i];
if (argValue instanceof WorkflowContext) {
continue;
}
variables.put(paramName, argValue);
}
// Add dataResult variable
variables.put("dataResult", dataResult);
// If dataResult is a DataEvent, get its result
Object result;
if (dataResult instanceof DataEvent) {
result = ((DataEvent<?>) dataResult).getResult();
} else {
result = dataResult;
}
variables.put("result", result);
// If result is JsonNode, make it accessible in expression
if (result instanceof JsonNode) {
Map<String, Object> response = ModelUtils.OBJECT_MAPPER.convertValue(result, Map.class);
variables.put("response", response);
}
// Add variables from context if needed
// variables.putAll(context.getVariables());
return variables;
}
private boolean evaluateConditionExpression(String expression, Map<String, Object> variables) throws Exception {
JexlEngine jexl = new JexlEngine();
Expression jexlExpression = jexl.createExpression(expression);
JexlContext jexlContext = new MapContext(variables);
Object result = jexlExpression.evaluate(jexlContext);
if (result instanceof Boolean) {
return (Boolean) result;
} else {
throw new IllegalArgumentException("Condition expression did not return a boolean value: " + expression);
}
}
private Object invokeLLMRequest(MethodInfo methodInfo,
Object[] args,
WorkflowContext workflowContext,
ModelClient modelClient)
throws Exception {
// Prepare the prompt with placeholder substitution
String prompt = methodInfo.getPrompt();
prompt = substituteVariables(prompt, methodInfo, args, workflowContext);
String modelName = methodInfo.getModelName();
// Extract configuration from arguments if present
Map<String, Object> config = extractConfigFromArgs(args);
// Send request to the model using the modelClient
ModelTextResponse modelTextResponse = sendLLMRequest(prompt, modelName, config, modelClient);
Object response;
if (JsonUtils.isJSON(modelTextResponse.getResponse())) {
response = modelTextResponse.getResponseJson();
} else {
response = modelTextResponse.getResponse();
}
return new DataEvent<>(response, methodInfo.getNextStep());
}
private Object executeInlineStep(MethodInfo methodInfo,
Object[] args,
WorkflowContext workflowContext)
throws Exception {
// Evaluate the expression
String expression = methodInfo.getExpression();
if ("{event}".equals(expression)) {
return args[0];
}
Object result = evaluateExpression(expression, methodInfo, args, workflowContext);
return new DataEvent<>(result, methodInfo.getNextStep());
}
private Object evaluateExpression(String expression,
MethodInfo methodInfo,
Object[] args,
WorkflowContext context)
throws Exception {
// Substitute variables in the expression
String substitutedExpression = substituteVariables(expression, methodInfo, args, context);
// For simplicity, return the substituted expression
return substitutedExpression;
}
private ModelTextResponse sendLLMRequest(String prompt, String modelName, Map<String, Object> config, ModelClient modelClient) {
// Use the model client to send the request
List<ModelImageResponse.ModelContentMessage> messages = new ArrayList<>();
messages.add(ModelImageResponse.ModelContentMessage.create(Role.user, prompt));
ModelTextRequest request = ModelTextRequest.builder()
.temperature((Double) config.getOrDefault("temperature", 0.7))
.model(modelName)
.messages(messages)
.responseFormat(prompt.toLowerCase().contains("json") ? ResponseFormat.jsonObject() : null)
.build();
return modelClient.textToText(request);
}
private String substituteVariables(String text, MethodInfo methodInfo,
Object[] args,
WorkflowContext context)
throws Exception {
// Map parameter names to their values
List<String> parameterNames = methodInfo.getParameterNames();
Map<String, Object> variables = new HashMap<>();
for (int i = 0; i < parameterNames.size(); i++) {
String paramName = parameterNames.get(i);
Object argValue = args[i];
// Exclude WorkflowContext from variables
if (argValue instanceof WorkflowContext) {
continue;
}
variables.put(paramName, argValue);
}
// Substitute placeholders in the text
Pattern pattern = Pattern.compile("\\{(\\w+(?:\\.\\w+)*)\\}");
Matcher matcher = pattern.matcher(text);
StringBuffer result = new StringBuffer();
while (matcher.find()) {
String placeholder = matcher.group(1);
Object value = resolvePlaceholderValue(placeholder, variables,
context);
matcher.appendReplacement(result,
Matcher.quoteReplacement(
value != null ? value.toString() : ""));
}
matcher.appendTail(result);
return result.toString();
}
private Object resolvePlaceholderValue(String placeholder,
Map<String, Object> variables,
WorkflowContext context)
throws Exception {
String[] parts = placeholder.split("\\.");
Object value = null;
if (variables.containsKey(parts[0])) {
value = variables.get(parts[0]);
} else if (context.get(parts[0]) != null) {
value = context.get(parts[0]);
}
for (int i = 1; i < parts.length; i++) {
if (value == null) {
return null;
}
String propertyName = parts[i];
value = getPropertyValue(value, propertyName);
}
return value;
}
private Object getPropertyValue(Object obj, String propertyName)
throws Exception {
if (obj instanceof Map) {
return ((Map<?, ?>) obj).get(propertyName);
} else if (obj instanceof JsonNode) {
return ((JsonNode) obj).get(propertyName);
} else {
Class<?> clazz = obj.getClass();
try {
Field field = clazz.getDeclaredField(propertyName);
field.setAccessible(true);
return field.get(obj);
} catch (NoSuchFieldException e) {
// Try to get the value via getter
String methodName = "get"
+ Character.toUpperCase(propertyName.charAt(0))
+ propertyName.substring(1);
Method method = clazz.getMethod(methodName);
return method.invoke(obj);
}
}
}
private Map<String, Object> extractConfigFromArgs(Object[] args) {
for (Object arg : args) {
if (arg instanceof Map) {
return (Map<String, Object>) arg;
}
}
return new HashMap<>();
}
private Object[] buildMethodArguments(MethodInfo methodInfo,
Object event,
WorkflowContext context)
throws Exception {
List<Type> paramTypes = methodInfo.getAllParamTypes();
List<String> parameterNames = methodInfo.getParameterNames();
Object[] args = new Object[paramTypes.size()];
int index = 0;
for (Type paramType : paramTypes) {
Class<?> paramClass = getClassFromType(paramType);
if (paramClass.isAssignableFrom(event.getClass())) {
args[index++] = event;
} else if (paramClass.isAssignableFrom(WorkflowContext.class)) {
args[index++] = context;
} else if (paramClass.isAssignableFrom(Map.class)) {
args[index++] = new HashMap<String, Object>();
} else if (paramClass == String.class && event instanceof DataEvent) {
args[index++] = ((DataEvent<?>) event).getResult();
} else {
throw new IllegalArgumentException("Cannot match parameter of type %s".formatted(paramClass.getName()));
}
}
return args;
}
private Class<?> getClassFromType(Type type)
throws ClassNotFoundException {
if (type instanceof Class<?>) {
return (Class<?>) type;
} else if (type instanceof ParameterizedType) {
ParameterizedType pt = (ParameterizedType) type;
Type rawType = pt.getRawType();
if (rawType instanceof Class<?>) {
return (Class<?>) rawType;
}
}
throw new ClassNotFoundException("Cannot determine class from type: %s".formatted(type.getTypeName()));
}
private Method findMethod(Class<?> clazz, MethodInfo methodInfo) throws ClassNotFoundException {
if (methodInfo.isAbstract()) {
// No need to find abstract methods in the instance; they are handled internally
return null;
}
Class<?>[] methodParamClasses = getParameterClasses(methodInfo.getAllParamTypes());
for (Method method : clazz.getMethods()) {
if (method.getName().equals(methodInfo.getMethodName())
&& Arrays.equals(method.getParameterTypes(), methodParamClasses)) {
return method;
}
}
return null;
}
private Class<?>[] getParameterClasses(List<Type> types) throws ClassNotFoundException {
List<Class<?>> classes = new ArrayList<>();
for (Type type : types) {
classes.add(getClassFromType(type));
}
return classes.toArray(new Class<?>[0]);
}
private Object invokeWithRetry(Object workflowInstance, Method method,
Object[] args,
RetryPolicy retryPolicy)
throws Exception {
int attempts = 0;
int maxAttempts = retryPolicy.maximumAttempts();
int delay = retryPolicy.delay();
while (true) {
try {
return method.invoke(workflowInstance, args);
} catch (InvocationTargetException e) {
Throwable cause = e.getCause();
attempts++;
if (attempts >= maxAttempts) {
throw (cause != null) ? new Exception(cause) : e;
}
log.error(
"Method {} failed with exception: {}. Retrying {}/{}",
method.getName(), cause.getMessage(), attempts,
maxAttempts, e);
Thread.sleep(delay * 1000L);
}
}
}
private List<MethodInfo> getStartMethodsInfo() {
return getMethods().values().stream()
.filter(mi -> mi.getInputEvents().stream()
.anyMatch(e -> isAssignableFrom(e,
StartEvent.class)))
.collect(Collectors.toList());
}
private boolean isAssignableFrom(Type type, Class<?> clazz) {
if (type instanceof Class<?>) {
return clazz.isAssignableFrom((Class<?>) type);
} else if (type instanceof ParameterizedType) {
Type rawType = ((ParameterizedType) type).getRawType();
if (rawType instanceof Class<?>) {
return clazz.isAssignableFrom((Class<?>) rawType);
}
}
return false;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class WorkflowGraphInstance {
ExecutableWorkflowGraph graph;
Object instance;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/service/WorkflowAnalyzer.java
|
package ai.driftkit.workflows.core.service;
import ai.driftkit.workflows.core.domain.*;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.extern.slf4j.Slf4j;
import java.lang.annotation.*;
import java.lang.reflect.*;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
/**
* WorkflowAnalyzer is responsible for building and executing workflows
* based on annotations and method signatures.
*/
@Slf4j
public class WorkflowAnalyzer {
public static final ObjectMapper objectMapper = new ObjectMapper();
/**
* Builds the executable workflow graph by analyzing the specified Workflow class.
*/
public static ExecutableWorkflowGraph buildExecutableWorkflowGraph(Object instance) {
Class<?> workflowClass = instance.getClass();
ExecutableWorkflowGraph graph = new ExecutableWorkflowGraph();
graph.setWorkflowClass(workflowClass); // Store the workflow class
Method[] methods = workflowClass.getMethods();
List<MethodInfo> stepMethods = new ArrayList<>();
// Collect information about methods to include in the workflow
for (Method method : methods) {
boolean isWorkflowMethod = false;
Type returnType = method.getGenericReturnType();
boolean isStopEvent = isAssignableFrom(returnType, StopEvent.class);
// Check if method is annotated with LLMRequest, InlineStep, or Step
if (method.isAnnotationPresent(LLMRequest.class)
|| method.isAnnotationPresent(InlineStep.class)
|| method.isAnnotationPresent(FinalStep.class)
|| method.isAnnotationPresent(Step.class)) {
isWorkflowMethod = true;
} else {
if (isAssignableFrom(returnType, DataEvent.class)
|| isStopEvent) {
isWorkflowMethod = true;
}
}
if (isWorkflowMethod) {
// Proceed to collect method information
// Get annotations
LLMRequest llmRequestAnnotation = method.getAnnotation(LLMRequest.class);
InlineStep inlineStepAnnotation = method.getAnnotation(InlineStep.class);
FinalStep finalStepAnnotation = method.getAnnotation(FinalStep.class);
Step stepAnnotation = method.getAnnotation(Step.class);
// Get RetryPolicy
RetryPolicy retryPolicy = (stepAnnotation != null)
? stepAnnotation.retryPolicy()
: new RetryPolicy() {
public int delay() {
return 5;
}
public int maximumAttempts() {
return 10;
}
public Class<? extends Annotation> annotationType() {
return RetryPolicy.class;
}
};
// Collect all parameter types (including WorkflowContext)
Type[] paramTypes = method.getGenericParameterTypes();
List<Type> allParamTypes = Arrays.asList(paramTypes);
// Collect input events (excluding WorkflowContext)
List<Type> inputEvents = Arrays.stream(paramTypes)
.filter(type -> !isWorkflowContext(type))
.collect(Collectors.toList());
// Get parameter names
Parameter[] parameters = method.getParameters();
List<String> parameterNames = new ArrayList<>();
for (Parameter parameter : parameters) {
parameterNames.add(parameter.getName());
}
// Get output events
List<Type> outputEvents = new ArrayList<>();
outputEvents.add(returnType);
// Determine the step name
String stepName = method.getName();
// Process additional annotations
String description = "";
String category = "";
String conditionExpression = "";
String prompt = "";
String modelName = "";
String nextStep = "";
String trueStep = "";
String falseStep = "";
String expression = "";
int invocationsLimit = 0;
OnInvocationsLimit onInvocationsLimit = OnInvocationsLimit.STOP;
if (method.isAnnotationPresent(StepInfo.class)) {
StepInfo stepInfo = method.getAnnotation(StepInfo.class);
description = stepInfo.description();
category = stepInfo.category();
}
if (llmRequestAnnotation != null) {
prompt = llmRequestAnnotation.prompt();
modelName = llmRequestAnnotation.modelName();
nextStep = llmRequestAnnotation.nextStep();
conditionExpression = llmRequestAnnotation.condition();
trueStep = llmRequestAnnotation.trueStep();
falseStep = llmRequestAnnotation.falseStep();
}
if (inlineStepAnnotation != null) {
expression = inlineStepAnnotation.expression();
nextStep = inlineStepAnnotation.nextStep();
conditionExpression = inlineStepAnnotation.condition();
trueStep = inlineStepAnnotation.trueStep();
falseStep = inlineStepAnnotation.falseStep();
invocationsLimit = inlineStepAnnotation.invocationLimit();
onInvocationsLimit = inlineStepAnnotation.onInvocationsLimit();
}
if (finalStepAnnotation != null) {
expression = finalStepAnnotation.expression();
invocationsLimit = finalStepAnnotation.invocationLimit();
}
if (stepAnnotation != null && stepAnnotation.name() != null && !stepAnnotation.name().isEmpty()) {
stepName = stepAnnotation.name();
invocationsLimit = stepAnnotation.invocationLimit();
onInvocationsLimit = stepAnnotation.onInvocationsLimit();
nextStep = stepAnnotation.nextStep();
}
// Determine if the method is abstract
boolean isAbstractMethod = Modifier.isAbstract(method.getModifiers());
// Create MethodInfo object
MethodInfo methodInfo = new MethodInfo(
stepName,
inputEvents,
outputEvents,
retryPolicy,
description,
category,
conditionExpression,
trueStep,
falseStep,
prompt,
modelName,
nextStep,
expression,
parameterNames,
allParamTypes,
isAbstractMethod, // Include abstract flag
finalStepAnnotation != null || isStopEvent,
invocationsLimit == 0 ? 5 : invocationsLimit,
onInvocationsLimit
);
stepMethods.add(methodInfo);
graph.addMethod(methodInfo);
}
}
// Establish connections between methods based on specified next steps
for (MethodInfo method : stepMethods) {
// If the method specifies next steps, add edges accordingly
if (method.getTrueStep() != null && !method.getTrueStep().isEmpty()) {
graph.addEdge(method.getMethodName(), method.getTrueStep());
}
if (method.getFalseStep() != null && !method.getFalseStep().isEmpty()) {
graph.addEdge(method.getMethodName(), method.getFalseStep());
}
if (method.getNextStep() != null && !method.getNextStep().isEmpty()) {
graph.addEdge(method.getMethodName(), method.getNextStep());
}
}
ExecutableWorkflowGraph.register(workflowClass, instance, graph);
return graph;
}
/**
* Checks if the type is WorkflowContext.
*/
private static boolean isWorkflowContext(Type type) {
if (type instanceof Class<?>) {
return WorkflowContext.class
.isAssignableFrom((Class<?>) type);
} else if (type instanceof ParameterizedType) {
Type rawType = ((ParameterizedType) type).getRawType();
if (rawType instanceof Class<?>) {
return WorkflowContext.class
.isAssignableFrom((Class<?>) rawType);
}
}
return false;
}
private static boolean isAssignableFrom(Type type, Class<?> clazz) {
if (type instanceof Class<?>) {
return clazz.isAssignableFrom((Class<?>) type);
} else if (type instanceof ParameterizedType) {
Type rawType = ((ParameterizedType) type).getRawType();
if (rawType instanceof Class<?>) {
return clazz.isAssignableFrom((Class<?>) rawType);
}
}
return false;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/service/WorkflowGraph.java
|
package ai.driftkit.workflows.core.service;
import ai.driftkit.workflows.core.domain.MethodInfo;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Class representing the workflow graph of methods.
*/
@Data
@NoArgsConstructor
public class WorkflowGraph {
private Class<?> workflowClass; // The workflow class or interface
// Graph nodes: method name -> MethodInfo
private Map<String, MethodInfo> methods = new HashMap<>();
// Graph edges: from method -> list of methods it connects to
private Map<String, List<String>> adjacencyList = new HashMap<>();
/**
* Adds a method to the graph.
*/
public void addMethod(MethodInfo methodInfo) {
methods.put(methodInfo.getMethodName(), methodInfo);
}
/**
* Adds an edge between two methods in the graph.
*/
public void addEdge(String fromMethod, String toMethod) {
adjacencyList.computeIfAbsent(fromMethod, k -> new ArrayList<>())
.add(toMethod);
}
/**
* Retrieves the MethodInfo for a given method name.
*/
public MethodInfo getMethodInfo(String methodName) {
return methods.get(methodName);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core
|
java-sources/ai/driftkit/driftkit-workflows-core/0.8.1/ai/driftkit/workflows/core/service/WorkflowRegistry.java
|
package ai.driftkit.workflows.core.service;
import ai.driftkit.workflows.core.domain.ExecutableWorkflow;
import ai.driftkit.workflows.core.domain.StartEvent;
import ai.driftkit.workflows.core.domain.StopEvent;
import ai.driftkit.workflows.core.domain.WorkflowContext;
import lombok.Data;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* A registry for all available workflows in the system.
* This provides a centralized place to register and retrieve workflows.
*/
public class WorkflowRegistry {
private static final Map<String, RegisteredWorkflow> registeredWorkflows = new HashMap<>();
/**
* Register a new workflow.
*
* @param id The unique identifier for the workflow
* @param name A human-readable name for the workflow
* @param description A brief description of what the workflow does
* @param workflow The executable workflow instance
*/
public static void registerWorkflow(String id, String name, String description, Object workflow) {
if (!(workflow instanceof ExecutableWorkflow)) {
throw new IllegalArgumentException("Workflow must be an instance of ExecutableWorkflow");
}
RegisteredWorkflow registeredWorkflow = new RegisteredWorkflow();
registeredWorkflow.setId(id);
registeredWorkflow.setName(name);
registeredWorkflow.setDescription(description);
registeredWorkflow.setWorkflow((ExecutableWorkflow<?,?>) workflow);
registeredWorkflows.put(id, registeredWorkflow);
}
/**
* Get a workflow by its ID.
*
* @param id The workflow ID
* @return The registered workflow or null if not found
*/
public static RegisteredWorkflow getWorkflow(String id) {
return registeredWorkflows.get(id);
}
/**
* Get all registered workflows.
*
* @return A list of all registered workflows
*/
public static List<RegisteredWorkflow> getAllWorkflows() {
return new ArrayList<>(registeredWorkflows.values());
}
/**
* Check if a workflow is registered with the given ID.
*
* @param id The workflow ID to check
* @return true if the workflow is registered, false otherwise
*/
public static boolean hasWorkflow(String id) {
return registeredWorkflows.containsKey(id);
}
/**
* Execute a workflow by its ID.
*
* @param workflowId The ID of the workflow to execute
* @param startEvent The start event to pass to the workflow
* @param workflowContext The workflow context
* @return The result of the workflow execution
* @throws Exception If the workflow execution fails
*/
@SuppressWarnings("unchecked")
public static <T, E extends StartEvent> StopEvent<T> executeWorkflow(String workflowId, E startEvent, WorkflowContext workflowContext) throws Exception {
RegisteredWorkflow registeredWorkflow = getWorkflow(workflowId);
if (registeredWorkflow == null) {
throw new IllegalArgumentException("No workflow registered with ID: " + workflowId);
}
ExecutableWorkflow<E, T> workflow = (ExecutableWorkflow<E, T>) registeredWorkflow.getWorkflow();
return workflow.execute(startEvent, workflowContext);
}
/**
* Represents a registered workflow in the system.
*/
@Data
public static class RegisteredWorkflow {
private String id;
private String name;
private String description;
private ExecutableWorkflow<?, ?> workflow;
// Hide the actual workflow instance from serialization
public ExecutableWorkflow<?, ?> getWorkflow() {
return workflow;
}
// For JSON serialization
public Map<String, String> toMap() {
Map<String, String> map = new HashMap<>();
map.put("id", id);
map.put("name", name);
map.put("description", description);
return map;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-examples-core/0.8.1/ai/driftkit/workflows/examples
|
java-sources/ai/driftkit/driftkit-workflows-examples-core/0.8.1/ai/driftkit/workflows/examples/agent/SimplifiedAgentExamples.java
|
package ai.driftkit.workflows.examples.agent;
import ai.driftkit.common.domain.client.ModelClient;
import ai.driftkit.common.tools.ToolInfo;
import ai.driftkit.workflows.core.agent.*;
import ai.driftkit.workflows.core.agent.tool.AgentAsTool;
import lombok.extern.slf4j.Slf4j;
/**
* Examples demonstrating the simplified agent API usage.
* These examples show how to use the new simplified interfaces compared to complex workflows.
*/
@Slf4j
public class SimplifiedAgentExamples {
private final ModelClient llm;
public SimplifiedAgentExamples(ModelClient llm) {
this.llm = llm;
}
/**
* Example 1: Simple Loop Agent for Travel Planning
* Equivalent to the example from your request.
*/
public String travelPlanningLoop() {
// Агент, который выполняет основную работу
Agent workerAgent = LLMAgent.builder()
.modelClient(llm)
.systemMessage("Generate a travel plan for a 3-day trip to Paris.")
.build();
Agent evaluatorAgent = LLMAgent.builder()
.modelClient(llm)
.systemMessage(
"Analyze the provided travel plan. Check if it includes all required elements: " +
"1. Visit to the Louvre Museum, " +
"2. Visit to the Eiffel Tower, " +
"3. A boat trip on the Seine. " +
"Determine whether the plan is COMPLETE or needs REVISE status.")
.build();
// LoopAgent, который управляет циклом
LoopAgent planningLoop = LoopAgent.builder()
.worker(workerAgent)
.evaluator(evaluatorAgent)
.stopCondition(LoopStatus.COMPLETE) // Условие для выхода из цикла
.build();
// Запускаем цикл
return planningLoop.execute("Create a plan for me.");
}
/**
* Example 2: Sequential Agent for Research and Writing
* Equivalent to the workflow example from your request.
*/
public String researchAndWriteWorkflow() {
// Агент-исследователь: ищет информацию
Agent researcherAgent = LLMAgent.builder()
.modelClient(llm)
.systemMessage("You are a researcher. Find detailed information on the given topic.")
.build();
// Агент-писатель: пишет краткую заметку на основе полученной информации
Agent writerAgent = LLMAgent.builder()
.modelClient(llm)
.systemMessage("You are a writer. Summarize the provided text into a concise paragraph.")
.build();
// Создаём SequentialAgent, который будет управлять workflow
SequentialAgent researchAndWriteWorkflow = SequentialAgent.builder()
.agent(researcherAgent) // Шаг 1
.agent(writerAgent) // Шаг 2
.build();
// Запускаем весь рабочий процесс с начальным запросом
String topic = "The history of the Eiffel Tower";
return researchAndWriteWorkflow.execute(topic);
}
/**
* Example 3: Agent Composition with Tools
* Shows how agents can be used as tools for other agents.
*/
public String travelOrchestratorExample() {
// Создаём агентов для каждого шага
Agent flightAgent = LLMAgent.builder()
.modelClient(llm)
.systemMessage("Search for flights. Return flight options with prices and times.")
.name("FlightSearchAgent")
.build();
Agent hotelAgent = LLMAgent.builder()
.modelClient(llm)
.systemMessage("Book hotels for given dates. Return hotel options with availability.")
.name("HotelBookingAgent")
.build();
// Оборачиваем агентов в инструменты
ToolInfo flightTool = AgentAsTool.create("flightSearch", "Searches for flights.", flightAgent);
ToolInfo hotelTool = AgentAsTool.create("hotelBooking", "Books hotels for given dates.", hotelAgent);
// Создаём главного агента-оркестратора
Agent travelOrchestrator = LLMAgent.builder()
.modelClient(llm)
.systemMessage(
"You are a travel planner. First, find flights. Then, book a hotel. " +
"If hotel booking fails, try to find flights for different dates and repeat the process.")
.addTool(flightTool) // Регистрируем агентов как инструменты
.addTool(hotelTool)
.build();
// Запускаем оркестратор
return travelOrchestrator.execute("Plan a trip to Rome for next week.");
}
/**
* Example 4: Complex Multi-Agent System
* Combines sequential processing with loop validation.
*/
public String complexAgentSystem() {
// Анализатор требований
Agent requirementAnalyzer = LLMAgent.builder()
.modelClient(llm)
.systemMessage("Analyze user requirements and extract key components.")
.build();
// Генератор решения
Agent solutionGenerator = LLMAgent.builder()
.modelClient(llm)
.systemMessage("Generate a solution based on analyzed requirements.")
.build();
// Валидатор качества
Agent qualityValidator = LLMAgent.builder()
.modelClient(llm)
.systemMessage(
"Validate the solution quality. Return JSON with status: " +
"{\"status\": \"COMPLETE\"} if good, {\"status\": \"REVISE\", \"feedback\": \"issues\"} if needs work.")
.build();
// Создаем последовательный workflow для анализа и генерации
SequentialAgent analysisAndGeneration = SequentialAgent.builder()
.agent(requirementAnalyzer)
.agent(solutionGenerator)
.build();
// Создаем loop для итеративного улучшения
LoopAgent qualityLoop = LoopAgent.builder()
.worker(analysisAndGeneration)
.evaluator(qualityValidator)
.stopCondition(LoopStatus.COMPLETE)
.maxIterations(5)
.build();
return qualityLoop.execute("I need a comprehensive project management solution for a remote team of 20 developers.");
}
/**
* Example 5: Multimodal Agent Usage
* Shows how to work with images and text.
*/
public String multimodalExample(byte[] imageData) {
Agent imageAnalyzer = LLMAgent.builder()
.modelClient(llm)
.systemMessage("Analyze the provided image and describe what you see in detail.")
.build();
Agent contentWriter = LLMAgent.builder()
.modelClient(llm)
.systemMessage("Write engaging content based on the image analysis.")
.build();
// First analyze the image
String imageAnalysis = imageAnalyzer.execute("Describe this image:", imageData);
// Then generate content based on analysis
return contentWriter.execute("Create marketing content based on this analysis: " + imageAnalysis);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-examples-core/0.8.1/ai/driftkit/workflows/examples
|
java-sources/ai/driftkit/driftkit-workflows-examples-core/0.8.1/ai/driftkit/workflows/examples/domain/RoutedMessage.java
|
package ai.driftkit.workflows.examples.domain;
import ai.driftkit.workflows.examples.workflows.RouterWorkflow.Route;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
/**
* Represents a message with embedded routing information in JSON format.
* Used when a client wants to send a message with explicit routes in a single JSON.
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonIgnoreProperties(ignoreUnknown = true)
public class RoutedMessage {
private String message;
private List<Route> routes;
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-examples-core/0.8.1/ai/driftkit/workflows/examples
|
java-sources/ai/driftkit/driftkit-workflows-examples-core/0.8.1/ai/driftkit/workflows/examples/workflows/ChatWorkflow.java
|
package ai.driftkit.workflows.examples.workflows;
import ai.driftkit.common.domain.*;
import ai.driftkit.common.domain.client.ModelTextResponse;
import ai.driftkit.workflows.core.chat.ChatMemory;
import ai.driftkit.workflows.core.chat.Message;
import ai.driftkit.config.EtlConfig;
import ai.driftkit.config.EtlConfig.VaultConfig;
import ai.driftkit.clients.core.ModelClientFactory;
import ai.driftkit.clients.openai.client.OpenAIModelClient;
import ai.driftkit.context.core.util.PromptUtils;
import ai.driftkit.vector.spring.domain.Index;
import ai.driftkit.vector.spring.service.IndexService;
import ai.driftkit.workflows.core.domain.*;
import ai.driftkit.context.core.service.PromptService;
import ai.driftkit.workflows.examples.domain.RoutedMessage;
import ai.driftkit.workflows.spring.ModelWorkflow;
import ai.driftkit.workflows.spring.ModelRequestParams;
import ai.driftkit.workflows.examples.workflows.ChatWorkflow.ChatResult;
import ai.driftkit.workflows.examples.workflows.ChatWorkflow.ChatStartEvent;
import ai.driftkit.workflows.examples.workflows.RouterWorkflow.*;
import ai.driftkit.vector.core.domain.Document;
import ai.driftkit.vector.core.domain.DocumentsResult;
import ai.driftkit.workflows.spring.service.*;
import ai.driftkit.common.utils.JsonUtils;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.MapUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import java.nio.charset.Charset;
import java.util.*;
import java.util.stream.Collectors;
@Slf4j
public class ChatWorkflow extends ModelWorkflow<ChatStartEvent, ChatResult> {
public static final String STARTING_PROMPT = "Please respond to the user request [{{query}}], think step by step. Your response MUST be in [{{language}}] language";
public static final String CURRENT_MESSAGE_NAME = "currentMessage";
private final VaultConfig modelConfig;
private final ImageModelService imageService;
private final ChatService chatService;
private final IndexService indexService;
private final LLMMemoryProvider memoryProvider;
public static final String RAG_CHAT_WITH_CONTEXT = "chat_with_context";
public ChatWorkflow(EtlConfig config, PromptService promptService, ModelRequestService modelRequestService, ChatService chatService, TasksService tasksService, ImageModelService imageService, IndexService indexService) throws Exception {
super(ModelClientFactory.fromConfig(config.getModelConfig(OpenAIModelClient.OPENAI_PREFIX).orElseThrow()),
modelRequestService,
promptService);
this.modelConfig = config.getModelConfig(OpenAIModelClient.OPENAI_PREFIX).orElseThrow();
this.chatService = chatService;
this.imageService = imageService;
this.indexService = indexService;
this.memoryProvider = new LLMMemoryProvider(chatService, tasksService);
promptService.createIfNotExists(
PromptService.DEFAULT_STARTING_PROMPT,
STARTING_PROMPT,
null,
false,
Language.GENERAL
);
promptService.createIfNotExists(
RAG_CHAT_WITH_CONTEXT,
IOUtils.resourceToString("/prompts/dictionary/rag/%s.prompt".formatted(RAG_CHAT_WITH_CONTEXT), Charset.defaultCharset()),
null,
true,
Language.GENERAL
);
}
@Override
public Class<ChatStartEvent> getInputType() {
return ChatStartEvent.class;
}
@Override
public Class<ChatResult> getOutputType() {
return ChatResult.class;
}
@Step(name = "start")
@StepInfo(description = "Starting step of the workflow")
public ExternalEvent<RouterStartEvent> start(ChatStartEvent startEvent, WorkflowContext workflowContext) throws Exception {
MessageTask task = startEvent.getTask();
Chat chat = getChatOrCreate(startEvent);
// Parse message to extract routes and actual message content
MessageWithRoutes parsedMessage = parseMessage(task.getMessage());
String query = getQuery(parsedMessage.getMessage(), task, chat.getLanguage());
List<Index> indexes = indexService.getIndexList().stream()
.filter(e -> !e.isDisabled())
.filter(e -> e.getLanguage() == Language.GENERAL || e.getLanguage() == chat.getLanguage())
.collect(Collectors.toList());
log.info("Executing start step with query: {}, chatId: {}", query, chat.getChatId());
ChatMemory chatMemory = memoryProvider.get(chat.getChatId());
task.setMessage(query);
Message message = new Message(
task.getMessageId(),
query,
ChatMessageType.USER,
MessageType.TEXT,
null,
null,
null,
null,
null,
task.getCreatedTime(),
task.getCreatedTime(),
null
);
chatMemory.add(message);
memoryProvider.update(chat.getChatId(), List.of(task));
workflowContext.put("query", query);
workflowContext.put("currentMessage", task);
workflowContext.put("context", chatMemory);
// Use routes extracted from message
return new ExternalEvent<>(
RouterWorkflow.class,
new RouterStartEvent(chatMemory.messages(), parsedMessage.getRoutes(), indexes),
"processResponse"
);
}
@Step(name = "processResponse")
@StepInfo(description = "Check router result")
public WorkflowEvent processResponse(DataEvent<RouterResult> routerResult, WorkflowContext workflowContext) throws Exception {
RouterResult result = routerResult.getResult();
String model = Optional.ofNullable(modelConfig.getModel()).orElse(OpenAIModelClient.GPT_DEFAULT);
String query = workflowContext.get("query");
MessageTask task = workflowContext.get(CURRENT_MESSAGE_NAME);
Set<RouterDefaultInputTypes> inputTypes = result.getInputTypes()
.stream()
.map(RouterDecision::getDecision)
.collect(Collectors.toSet());
if (inputTypes.contains(RouterDefaultInputTypes.CUSTOM)) {
return StopEvent.ofObject(ChatResult.create(result));
} else if (inputTypes.contains(RouterDefaultInputTypes.ESCALATION) || inputTypes.contains(RouterDefaultInputTypes.PRODUCT_ISSUE)) {
return StopEvent.ofObject(ChatResult.create(result));
} else if (inputTypes.contains(RouterDefaultInputTypes.IMAGE_GENERATION)) {
MessageTask messageTask = imageService.generateImage(task, query, 1);
return StopEvent.ofObject(ChatResult.createImage(result, messageTask.getImageTaskId()));
}
Map<String, Object> variables = task.getVariables();
ModelTextResponse response;
// Use related documents to improve answer if available
if (CollectionUtils.isNotEmpty(result.getRelatedDocs())) {
log.info("Using RAG with {} related document collections", result.getRelatedDocs().size());
// Extract and format context from related documents
StringBuilder contextBuilder = new StringBuilder();
for (DocumentsResult docsResult : result.getRelatedDocs()) {
for (Document doc : docsResult.documents()) {
contextBuilder.append("--- Source: ").append(doc.getId()).append(" ---\n");
contextBuilder.append(doc.getPageContent()).append("\n\n");
}
}
String context = contextBuilder.toString();
// Add context to variables for the prompt
Map<String, Object> ragVariables = new HashMap<>(variables);
ragVariables.put("context", context);
ragVariables.put("query", query);
// Use RAG-specific prompt with context
response = sendTextToText(
ModelRequestParams.create()
.setPromptId(RAG_CHAT_WITH_CONTEXT)
.setVariables(ragVariables)
.setModel(model),
workflowContext);
} else {
// Standard query without context
response = sendPromptText(
ModelRequestParams.create()
.setPromptText(query)
.setVariables(variables),
workflowContext);
}
return StopEvent.ofObject(ChatResult.create(result, response.getResponse()));
}
/**
* Parse the message to extract potential routing information
* @param messageContent Original message content
* @return MessageWithRoutes containing the actual message and any routes found
*/
private MessageWithRoutes parseMessage(String messageContent) {
if (!JsonUtils.isJSON(messageContent)) {
return MessageWithRoutes.of(messageContent);
}
try {
RoutedMessage routedMessage = JsonUtils.safeParse(messageContent, RoutedMessage.class);
// If valid RoutedMessage with content, extract message and routes
if (routedMessage != null && StringUtils.isNotBlank(routedMessage.getMessage())) {
return new MessageWithRoutes(
routedMessage.getMessage(),
CollectionUtils.isEmpty(routedMessage.getRoutes()) ? Collections.emptyList() : routedMessage.getRoutes()
);
}
} catch (Exception e) {
log.error("Failed to parse message as RoutedMessage: {}", e.getMessage());
}
// Default to original message with no routes
return MessageWithRoutes.of(messageContent);
}
private String getQuery(String message, MessageTask task, Language language) {
if (MapUtils.isNotEmpty(task.getVariables())) {
message = PromptUtils.applyVariables(message, task.getVariables());
}
Prompt check = promptService.getCurrentPromptOrThrow(PromptService.DEFAULT_STARTING_PROMPT, Language.GENERAL);
message = check.applyVariables(Map.of(
"query", message,
"language", language.name()
));
return message;
}
private Chat getChatOrCreate(ChatStartEvent startEvent) {
MessageTask task = startEvent.getTask();
if (StringUtils.isBlank(task.getChatId())) {
return chatService.createChat(
ChatRequest.builder()
.id(UUID.randomUUID().toString())
.memoryLength(startEvent.getMemoryLength())
.language(task.getLanguage())
.name(task.getMessage())
.build()
);
} else {
Optional<Chat> chat = chatService.getChat(task.getChatId());
if (chat.isEmpty()) {
return chatService.createChat(
ChatRequest.builder()
.id(task.getChatId())
.memoryLength(startEvent.getMemoryLength())
.language(task.getLanguage())
.name(task.getMessage())
.build()
);
}
return chat.orElseThrow();
}
}
/**
* Helper class to hold parsed message content and routes
*/
@Data
@AllArgsConstructor
private static class MessageWithRoutes {
private String message;
private List<Route> routes;
public static MessageWithRoutes of(String message) {
return new MessageWithRoutes(message, Collections.emptyList());
}
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class ChatResult {
RouterResult route;
String responce;
String imageId;
public static ChatResult create(RouterResult route) {
return new ChatResult(route, null, null);
}
public static ChatResult create(RouterResult route, String responce) {
return new ChatResult(route, responce, null);
}
public static ChatResult createImage(RouterResult route, String imageId) {
return new ChatResult(route, null, imageId);
}
}
@Data
public static class ChatStartEvent extends StartEvent {
private MessageTask task;
private int memoryLength;
@Builder
public ChatStartEvent(MessageTask task, int memoryLength) {
this.task = task;
this.memoryLength = memoryLength;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-examples-core/0.8.1/ai/driftkit/workflows/examples
|
java-sources/ai/driftkit/driftkit-workflows-examples-core/0.8.1/ai/driftkit/workflows/examples/workflows/RAGModifyWorkflow.java
|
package ai.driftkit.workflows.examples.workflows;
import ai.driftkit.clients.core.ModelClientFactory;
import ai.driftkit.clients.openai.client.OpenAIModelClient;
import ai.driftkit.config.EtlConfig;
import ai.driftkit.context.core.util.PromptUtils;
import ai.driftkit.embedding.core.service.EmbeddingFactory;
import ai.driftkit.vector.core.domain.EmbeddingVectorStore;
import ai.driftkit.vector.core.service.VectorStoreFactory;
import ai.driftkit.vector.spring.domain.ParsedContent;
import ai.driftkit.vector.spring.parser.UnifiedParser;
import ai.driftkit.vector.spring.parser.UnifiedParser.ByteArrayParserInput;
import ai.driftkit.vector.spring.parser.UnifiedParser.ParserInput;
import ai.driftkit.vector.spring.parser.UnifiedParser.StringParserInput;
import ai.driftkit.vector.spring.parser.UnifiedParser.YoutubeIdParserInput;
import ai.driftkit.workflows.core.domain.*;
import ai.driftkit.workflows.spring.ModelWorkflow;
import ai.driftkit.workflows.spring.ModelRequestParams;
import ai.driftkit.context.core.service.PromptService;
import ai.driftkit.vector.core.domain.Document;
import ai.driftkit.common.utils.DocumentSplitter;
import ai.driftkit.workflows.spring.service.ModelRequestService;
import ai.driftkit.embedding.core.domain.Embedding;
import ai.driftkit.embedding.core.domain.TextSegment;
import ai.driftkit.embedding.core.service.EmbeddingModel;
import ai.driftkit.embedding.core.domain.Response;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
@Slf4j
public class RAGModifyWorkflow extends ModelWorkflow<StartEvent, RAGModifyWorkflow.DocumentSaveResult> {
private ThreadPoolExecutor exec;
private UnifiedParser parser;
private EmbeddingModel embeddingModel;
private EmbeddingVectorStore vectorStore;
public RAGModifyWorkflow(EtlConfig config, PromptService promptService, ModelRequestService modelRequestService) throws Exception {
super(ModelClientFactory.fromConfig(config.getModelConfig(OpenAIModelClient.OPENAI_PREFIX).orElseThrow()),
modelRequestService,
promptService);
this.parser = new UnifiedParser(config);
this.embeddingModel = EmbeddingFactory.fromName(
config.getEmbedding().getName(),
config.getEmbedding().getConfig()
);
this.vectorStore = (EmbeddingVectorStore) VectorStoreFactory.fromConfig(config.getVectorStore());
Integer storingThreads = config.getVectorStore().getInt(EtlConfig.VECTOR_STORE_STORING_THREADS, 1);
this.exec = new ThreadPoolExecutor(
0,
storingThreads,
60L, TimeUnit.SECONDS,
new SynchronousQueue<>(),
new ThreadPoolExecutor.CallerRunsPolicy()
);
}
@Step
@StepInfo(description = "Parse input")
public DataEvent<ParsedContent> parseInput(DocumentsEvent startEvent, WorkflowContext workflowContext) throws Exception {
ParserInput document = startEvent.getInput();
ParsedContent parsed = parser.parse(document);
Map<String, Object> metadata = new HashMap<>();
metadata.put("contentType", document.getContentType());
metadata.put("parsedResultLength", parsed.getParsedContent().length());
if (document instanceof YoutubeIdParserInput youtubeInput) {
metadata.put("videoId", youtubeInput.getVideoId());
List<String> languages = new ArrayList<>();
languages.add(youtubeInput.getPrimaryLang());
if (CollectionUtils.isNotEmpty(youtubeInput.getInput())) {
languages.addAll(youtubeInput.getInput());
}
metadata.put("languages", languages);
} else if (document instanceof ByteArrayParserInput byteInput) {
metadata.put("fileSize", byteInput.getInput().length);
metadata.put("fileName", byteInput.getFileName());
} else if (document instanceof StringParserInput stringInput) {
metadata.put("stringHash", PromptUtils.hashString(stringInput.getInput()));
metadata.put("stringLength", stringInput.getInput().length());
}
workflowContext.put("metadata", metadata);
workflowContext.put("index", startEvent.getIndex());
return new DataEvent<>(parsed, "ingestDocument");
}
@Step
@StepInfo(description = "Ingest documents and store them into the vector store")
public StopEvent<DocumentSaveResult> ingestDocument(DataEvent<ParsedContent> documentEvent, WorkflowContext workflowContext) throws Exception {
String index = workflowContext.get("index");
Map<String, Object> metadata = workflowContext.get("metadata");
ParsedContent parsed = documentEvent.getResult();
int chunkSize = 512;
int overlap = 128;
List<String> chunks = DocumentSplitter.splitDocumentIntoShingles(parsed.getParsedContent(), chunkSize, overlap);
String id = parsed.getId();
log.info("Ingesting documents: {}", id);
metadata.put("totalChunks", chunks.size());
AtomicInteger counter = new AtomicInteger();
List<Callable<Document>> tasks = chunks.stream()
.map(chunk -> (Callable<Document>) () -> {
Response<Embedding> embeddingResp = embeddingModel.embed(TextSegment.from(chunk));
float[] vector = embeddingResp.content().vector();
Map<String, Object> newMeta = new HashMap<>(metadata);
int idx = counter.incrementAndGet();
newMeta.put("docIndex", idx);
Map<String, Object> currentStatus = new HashMap<>(metadata);
currentStatus.put("totalChunksProcessed", idx);
workflowContext.put("metadata", currentStatus);
return new Document(id + "-" + idx, vector, chunk, newMeta);
})
.collect(Collectors.toList());
List<Future<Document>> futures = exec.invokeAll(tasks);
List<Document> docsToAdd = new ArrayList<>();
for (Future<Document> future : futures) {
docsToAdd.add(future.get());
}
workflowContext.put("metadata", metadata);
log.info("Ingested documents: {}", docsToAdd.size());
vectorStore.addDocuments(index, docsToAdd);
return StopEvent.ofObject(new DocumentSaveResult(
id,
parsed
));
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class DocumentSaveResult {
String id;
ParsedContent content;
}
@Override
public Class<DocumentSaveResult> getOutputType() {
return DocumentSaveResult.class;
}
@Data
@Builder
public static class DocumentsEvent<T extends ParserInput<?>> extends StartEvent {
private T input;
private String index;
@Builder
public DocumentsEvent(T input, String index) {
this.input = input;
this.index = index;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-examples-core/0.8.1/ai/driftkit/workflows/examples
|
java-sources/ai/driftkit/driftkit-workflows-examples-core/0.8.1/ai/driftkit/workflows/examples/workflows/RAGSearchWorkflow.java
|
package ai.driftkit.workflows.examples.workflows;
import ai.driftkit.clients.openai.client.OpenAIModelClient;
import ai.driftkit.common.domain.Language;
import ai.driftkit.common.domain.client.ModelTextResponse;
import ai.driftkit.clients.core.ModelClientFactory;
import ai.driftkit.config.EtlConfig;
import ai.driftkit.config.EtlConfig.VaultConfig;
import ai.driftkit.context.core.service.PromptService;
import ai.driftkit.embedding.core.domain.Embedding;
import ai.driftkit.embedding.core.domain.TextSegment;
import ai.driftkit.embedding.core.service.EmbeddingFactory;
import ai.driftkit.vector.core.domain.Document;
import ai.driftkit.vector.core.domain.DocumentsResult;
import ai.driftkit.vector.core.domain.EmbeddingVectorStore;
import ai.driftkit.vector.core.service.VectorStoreFactory;
import ai.driftkit.workflows.core.domain.*;
import ai.driftkit.workflows.spring.ModelWorkflow;
import ai.driftkit.workflows.spring.ModelRequestParams;
import ai.driftkit.workflows.examples.workflows.RAGSearchWorkflow.VectorStoreStartEvent;
import ai.driftkit.workflows.spring.service.ModelRequestService;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import ai.driftkit.embedding.core.domain.Response;
import ai.driftkit.embedding.core.service.EmbeddingModel;
import lombok.Builder;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.IOUtils;
import java.nio.charset.Charset;
import java.util.*;
@Slf4j
public class RAGSearchWorkflow extends ModelWorkflow<VectorStoreStartEvent, DocumentsResult> {
public static final String RAG_RERANK_METHOD = "rerank";
private String queryPrefix;
private EmbeddingModel embeddingModel;
private EmbeddingVectorStore vectorStore;
private VaultConfig modelConfig;
public RAGSearchWorkflow(EtlConfig config, PromptService promptService, ModelRequestService modelRequestService) throws Exception {
super(ModelClientFactory.fromConfig(config.getModelConfig(OpenAIModelClient.OPENAI_PREFIX).orElseThrow()),
modelRequestService,
promptService);
this.embeddingModel = EmbeddingFactory.fromName(
config.getEmbedding().getName(),
config.getEmbedding().getConfig()
);
this.vectorStore = (EmbeddingVectorStore) VectorStoreFactory.fromConfig(config.getVectorStore());
this.queryPrefix = config.getEmbedding().get(EtlConfig.BASE_QUERY, "Instruct: Retrieve semantically similar text.\\nQuery: ");
this.modelConfig = config.getModelConfig(OpenAIModelClient.OPENAI_PREFIX).orElseThrow();
promptService.createIfNotExists(
RAG_RERANK_METHOD,
IOUtils.resourceToString("/prompts/dictionary/rag/%s.prompt".formatted(RAG_RERANK_METHOD), Charset.defaultCharset()),
null,
true,
Language.GENERAL
);
}
@Step
@StepInfo(description = "First search request")
public WorkflowEvent start(VectorStoreStartEvent startEvent, WorkflowContext workflowContext) throws Exception {
String query = startEvent.getQuery();
log.info("Executing start step with query: {}", query);
Response<Embedding> embeddingResp = embeddingModel.embed(TextSegment.from(queryPrefix + query));
float[] queryVector = embeddingResp.content().vector();
DocumentsResult documents = vectorStore.findRelevant(startEvent.getIndexName(), queryVector, startEvent.getLimit());
workflowContext.add("query", query);
workflowContext.add("retrievedDocuments", documents);
if (documents.isEmpty()) {
return StopEvent.ofObject(documents);
}
return new DataEvent<>(documents, "rerank");
}
@Step
@StepInfo(description = "Rerank request")
public DataEvent<DocumentsResult> rerank(DataEvent<DocumentsResult> dataEvent, WorkflowContext workflowContext) throws Exception {
DocumentsResult retrievedDocuments = dataEvent.getResult();
List<String> query = workflowContext.get("query");
DocumentsResult rerankedDocuments = rerankDocuments(retrievedDocuments, query.getFirst(), workflowContext);
return new DataEvent<>(rerankedDocuments, "finalStep");
}
@Step
@StepInfo(description = "Return results")
public StopEvent<DocumentsResult> finalStep(DataEvent<DocumentsResult> event, WorkflowContext workflowContext) throws Exception {
DocumentsResult finalDocuments = event.getResult();
return StopEvent.ofObject(finalDocuments);
}
private DocumentsResult rerankDocuments(
DocumentsResult retrievedDocuments,
String query,
WorkflowContext workflowContext) throws Exception {
StringBuilder promptBuilder = new StringBuilder();
Map<String, Document> docIdMap = new HashMap<>();
for (Document doc : retrievedDocuments.documents()) {
promptBuilder.append("Document ID ").append(doc.getId()).append(":\n");
promptBuilder.append(doc.getPageContent()).append("\n\n");
docIdMap.put(doc.getId(), doc);
}
String docsText = promptBuilder.toString();
Map<String, Object> variables = Map.of("query", query, "documents", docsText);
ModelTextResponse response = sendTextToText(
ModelRequestParams.create()
.setPromptId(RAG_RERANK_METHOD)
.setVariables(variables),
workflowContext);
String responseText = response.getResponse();
ObjectMapper objectMapper = new ObjectMapper();
Map<String, Float> scoresMap;
try {
TypeReference<Map<String, Float>> typeRef = new TypeReference<>() {};
scoresMap = objectMapper.readValue(responseText, typeRef);
} catch (Exception e) {
throw new Exception("Failed to parse JSON from model response: " + responseText, e);
}
DocumentsResult rerankedDocuments = new DocumentsResult();
scoresMap.entrySet().stream()
.sorted(Map.Entry.<String, Float>comparingByValue().reversed())
.forEach(entry -> {
Document doc = docIdMap.get(entry.getKey());
if (doc != null) {
rerankedDocuments.put(doc, entry.getValue());
}
});
return rerankedDocuments;
}
@Data
public static class VectorStoreStartEvent extends StartQueryEvent {
private String indexName;
private int limit;
@Builder
public VectorStoreStartEvent(String indexName, String query, int limit) {
super(query);
this.indexName = indexName;
this.limit = limit;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-examples-core/0.8.1/ai/driftkit/workflows/examples
|
java-sources/ai/driftkit/driftkit-workflows-examples-core/0.8.1/ai/driftkit/workflows/examples/workflows/ReasoningWorkflow.java
|
package ai.driftkit.workflows.examples.workflows;
import ai.driftkit.clients.openai.client.OpenAIModelClient;
import ai.driftkit.common.domain.Language;
import ai.driftkit.common.domain.MessageTask;
import ai.driftkit.common.domain.Prompt;
import ai.driftkit.common.domain.client.ModelImageResponse;
import ai.driftkit.common.domain.client.ModelTextResponse;
import ai.driftkit.common.domain.client.Role;
import ai.driftkit.clients.core.ModelClientFactory;
import ai.driftkit.config.EtlConfig;
import ai.driftkit.config.EtlConfig.VaultConfig;
import ai.driftkit.context.core.service.PromptService;
import ai.driftkit.workflows.core.domain.*;
import ai.driftkit.workflows.spring.ModelWorkflow;
import ai.driftkit.workflows.spring.ModelRequestParams;
import ai.driftkit.workflows.spring.service.ModelRequestService;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.IOUtils;
import javax.validation.constraints.NotNull;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.*;
@Slf4j
public class ReasoningWorkflow extends ModelWorkflow<StartEvent, JsonNode> {
public static final String STARTING_PROMPT = "Please respond to the user request [{{query}}], think step by step. Your response MUST be in [{{language}}] language";
public static final String REASONING_METHOD = "reasoning";
public static final String CHECK_RESULT_METHOD = "check_result";
public static final String REPLAY_RESPONSE_AFTER_CHECK_METHOD = "replay_response_after_check";
public static final String NEXT_ACTION = "next_action";
public static final String RESULT = "result";
public static final String CONTEXT = "context";
public static final String FINAL_STEP = "finalStep";
public static final String CHECK_STEP = "checkStep";
public static final String CONTENT = "content";
public static final String NEXT_STEP = "nextStep";
public static final String FINAL_ANSWER = "final_answer";
public static final String CONTINUE = "continue";
public static final String ERRORS = "errors";
private final VaultConfig modelConfig;
public ReasoningWorkflow(EtlConfig config, PromptService promptService, ModelRequestService modelRequestService) throws IOException {
super(ModelClientFactory.fromConfig(config.getModelConfig(OpenAIModelClient.OPENAI_PREFIX).orElseThrow()),
modelRequestService,
promptService);
this.modelConfig = config.getModelConfig(OpenAIModelClient.OPENAI_PREFIX).orElseThrow();
for (String method : new String[]{
REASONING_METHOD,
CHECK_RESULT_METHOD,
REPLAY_RESPONSE_AFTER_CHECK_METHOD
}) {
promptService.createIfNotExists(
method,
IOUtils.resourceToString("/prompts/dictionary/reasoning/%s.prompt".formatted(method), Charset.defaultCharset()),
null,
true,
Language.GENERAL
);
}
promptService.createIfNotExists(
PromptService.DEFAULT_STARTING_PROMPT,
STARTING_PROMPT,
null,
false,
Language.GENERAL
);
}
@Override
public Class<JsonNode> getOutputType() {
return JsonNode.class;
}
@Step(name = "start")
@StepInfo(description = "Starting step of the workflow")
public DataEvent<JsonNode> start(StartEvent startEvent, WorkflowContext workflowContext) throws Exception {
String query;
Map<String, Object> variables = Collections.emptyMap();
Language language = Language.GENERAL;
if (startEvent instanceof StartQueryEvent) {
query = ((StartQueryEvent) startEvent).getQuery();
} else if (startEvent instanceof LLMRequestEvent llmRequestEvent) {
MessageTask task = llmRequestEvent.getTask();
query = task.getMessage();
if (task.getVariables() != null) {
variables = task.getVariables();
}
language = Optional.ofNullable(task.getLanguage()).orElse(Language.GENERAL);
// Store the current MessageTask in the workflow context to access logprobs later
workflowContext.put("currentMessage", task);
} else {
throw new IllegalArgumentException("Unexpected event type: " + startEvent.getClass());
}
log.info("Executing start step with query: {} {}", query, variables);
if (language != Language.GENERAL) {
Prompt check = promptService.getCurrentPromptOrThrow(PromptService.DEFAULT_STARTING_PROMPT, Language.GENERAL);
query = check.applyVariables(Map.of(
"query", query,
"language", language.name()
));
}
Prompt prompt = promptService.getCurrentPromptOrThrow(REASONING_METHOD, language);
JsonNode response = processReasoningQuery(query + " " + prompt.getMessage(), variables, workflowContext);
return new DataEvent<>(response, NEXT_STEP);
}
@Step(name = NEXT_STEP, invocationLimit = 5)
@StepInfo(description = "Processes the next step based on the 'next_action'")
public DataEvent<JsonNode> nextStep(DataEvent<JsonNode> event, WorkflowContext workflowContext) throws Exception {
JsonNode result = event.getResult();
log.info("Executing nextStep with event: {}", result);
workflowContext.add(CONTEXT, result);
if (isResponseBeforeFinal(result)) {
return new DataEvent<>(result, CHECK_STEP);
} else if (isContinue(result)) {
// Proceed to the next reasoning step. Do not repeat the previous response.
JsonNode resultContinue = processReasoningQuery(CONTINUE, Collections.emptyMap(), workflowContext);
return new DataEvent<>(resultContinue, NEXT_STEP);
} else if (!result.has(NEXT_ACTION)) {
if (isUnformattedResult(result)) {
return new DataEvent<>(result, CHECK_STEP);
}
JsonNode resultContinue = processReasoningQuery(CONTINUE, Collections.emptyMap(), workflowContext);
return new DataEvent<>(resultContinue, NEXT_STEP);
} else if (isFinal(result)) {
JsonNode resultFinal = processReasoningQuery(FINAL_ANSWER, Collections.emptyMap(), workflowContext);
workflowContext.add(CONTEXT, resultFinal);
return new DataEvent<>(resultFinal, CHECK_STEP);
} else if (isCompleted(result)) {
return new DataEvent<>(result, FINAL_STEP);
}
throw new RuntimeException("Unexpected step in result [" + result + "]");
}
@Step(name = "replayStep", invocationLimit = 5)
@StepInfo(description = "Replay result according to check result")
public WorkflowEvent replayStep(DataEvent<JsonNode> event, WorkflowContext workflowContext) throws Exception {
JsonNode lastMsg = workflowContext.getLastContext(ERRORS);
log.info("Executing replayStep with event: {} {}", event.getResult(), lastMsg);
Prompt replay = promptService.getCurrentPromptOrThrow(REPLAY_RESPONSE_AFTER_CHECK_METHOD, Language.GENERAL);
String replayMsg = replay.applyVariables(Map.of(
ERRORS, lastMsg == null ?
"IMPORTANT!: Reconsider the initial (first request) because the result is not correct. It doesn't follow the required json structure." :
String.valueOf(lastMsg.get("reason"))
));
JsonNode result = processReasoningQuery(replayMsg, Collections.emptyMap(), workflowContext);
if (isContinue(result)) {
return new DataEvent<>(result, NEXT_STEP);
}
workflowContext.add(CONTEXT, result);
if (isUnformattedResult(result)) {
return new DataEvent<>(result, CHECK_STEP);
}
return new DataEvent<>(result, "replayStep");
}
@Step(name = CHECK_STEP, invocationLimit = 5, onInvocationsLimit = OnInvocationsLimit.STOP)
@StepInfo(description = "Check step of the final result")
public WorkflowEvent checkStep(DataEvent<JsonNode> event, WorkflowContext workflowContext) throws Exception {
JsonNode result = event.getResult();
log.info("Executing checkStep with event: {}", result);
if (isContinue(result)) {
return new DataEvent<>(result, NEXT_STEP);
}
Prompt check = promptService.getCurrentPromptOrThrow(CHECK_RESULT_METHOD, Language.GENERAL);
ModelTextResponse modelTextResponse = sendQuery(check.getMessage(), Collections.emptyMap(), workflowContext, false);
String finalResult = result.toString();
JsonNode checkResult = modelTextResponse.getResponseJson();
workflowContext.add(CONTEXT, checkResult);
if (!checkResult.has(RESULT)) {
log.warn("[reasoning] Check failed result is {} for {}", checkResult, workflowContext.getContext());
return StopEvent.ofJson(finalResult);
}
boolean checkStatus = checkResult.get(RESULT).asBoolean();
if (checkStatus) {
return new DataEvent<>(result, FINAL_STEP);
}
workflowContext.add(ERRORS, checkResult);
return new DataEvent<>(result, "replayStep");
}
@Step(name = FINAL_STEP)
@StepInfo(description = "Final step of the workflow")
public StopEvent<String> finalStep(DataEvent<JsonNode> event, WorkflowContext workflowContext) throws JsonProcessingException {
log.info("Executing finalStep with event: {}", event.getResult());
JsonNode resultNode = event.getResult();
String finalResult = resultNode.toString();
return StopEvent.ofJson(finalResult);
}
@NotNull
private JsonNode processReasoningQuery(String query, Map<String, Object> variables, WorkflowContext workflowContext) throws Exception {
ModelTextResponse modelTextResponse = sendQuery(query, variables, workflowContext, true);
return modelTextResponse.getResponseJson();
}
@SneakyThrows
private ModelTextResponse sendQuery(String query, Map<String, Object> variables, WorkflowContext workflowContext, boolean reasoning) {
// Convert context objects to ModelContentMessages for the conversation history
List<ModelImageResponse.ModelContentMessage> contextMessages = workflowContext.getOrDefault(CONTEXT, Collections.emptyList())
.stream()
.map(e -> {
JsonNode nextAction = e instanceof JsonNode node ? node.get(NEXT_ACTION) : null;
if (nextAction == null) {
return ModelImageResponse.ModelContentMessage.create(Role.user, e.toString());
} else {
return ModelImageResponse.ModelContentMessage.create(Role.assistant, e.toString());
}
})
.toList();
List<ModelImageResponse.ModelContentMessage> messages = new ArrayList<>();
if (!reasoning) {
Prompt prompt = promptService.getCurrentPromptOrThrow(REASONING_METHOD, Language.GENERAL);
messages.add(ModelImageResponse.ModelContentMessage.create(Role.system, prompt.getMessage()));
}
messages.addAll(contextMessages);
// Add the query to the context for future reference
workflowContext.add(CONTEXT, query);
// Use the sendPromptTextWithHistory method from ModelWorkflow which will handle all the details
ModelTextResponse response = sendPromptTextWithHistory(
ModelRequestParams.create()
.setPromptText(query)
.setContextMessages(messages)
.setVariables(variables),
workflowContext);
return response;
}
private static boolean isUnformattedResult(JsonNode result) {
return result.get(CONTENT) == null && !result.has(NEXT_ACTION);
}
private static boolean isResponseBeforeFinal(JsonNode result) {
JsonNode na = result.get(NEXT_ACTION);
return result.get("response") != null && (na == null || !CONTINUE.equals(na.asText()));
}
private static boolean isContinue(JsonNode result) {
JsonNode na = result.get(NEXT_ACTION);
return na != null && CONTINUE.equals(na.asText());
}
private static boolean isCompleted(JsonNode result) {
JsonNode na = result.get(NEXT_ACTION);
return na != null && "none".equals(na.asText());
}
private static boolean isFinal(JsonNode result) {
JsonNode na = result.get(NEXT_ACTION);
return na != null && FINAL_ANSWER.equals(na.asText());
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-examples-core/0.8.1/ai/driftkit/workflows/examples
|
java-sources/ai/driftkit/driftkit-workflows-examples-core/0.8.1/ai/driftkit/workflows/examples/workflows/RouterWorkflow.java
|
package ai.driftkit.workflows.examples.workflows;
import ai.driftkit.clients.core.ModelClientFactory;
import ai.driftkit.clients.openai.client.OpenAIModelClient;
import ai.driftkit.common.domain.Language;
import ai.driftkit.workflows.core.chat.Message;
import ai.driftkit.common.domain.Prompt;
import ai.driftkit.common.domain.client.ModelTextResponse;
import ai.driftkit.common.utils.JsonUtils;
import ai.driftkit.config.EtlConfig;
import ai.driftkit.config.EtlConfig.VaultConfig;
import ai.driftkit.context.core.service.PromptService;
import ai.driftkit.vector.core.domain.DocumentsResult;
import ai.driftkit.vector.spring.domain.Index;
import ai.driftkit.workflows.core.domain.*;
import ai.driftkit.workflows.spring.ModelWorkflow;
import ai.driftkit.workflows.spring.ModelRequestParams;
import ai.driftkit.workflows.examples.workflows.RAGSearchWorkflow.VectorStoreStartEvent;
import ai.driftkit.workflows.examples.workflows.RouterWorkflow.RouterResult;
import ai.driftkit.workflows.examples.workflows.RouterWorkflow.RouterStartEvent;
import ai.driftkit.workflows.spring.service.ModelRequestService;
import com.fasterxml.jackson.annotation.JsonAlias;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.io.IOUtils;
import org.jetbrains.annotations.NotNull;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.*;
import java.util.stream.Collectors;
@Slf4j
public class RouterWorkflow extends ModelWorkflow<RouterStartEvent, RouterResult> {
public static final String FINAL_STEP = "finalStep";
public static final String ROUTER_METHOD = "router";
private final VaultConfig modelConfig;
private final RAGSearchWorkflow searchWorkflow;
public RouterWorkflow(EtlConfig config, PromptService promptService, RAGSearchWorkflow searchWorkflow, ModelRequestService modelRequestService) throws IOException {
super(ModelClientFactory.fromConfig(config.getModelConfig(OpenAIModelClient.OPENAI_PREFIX).orElseThrow()),
modelRequestService,
promptService);
this.modelConfig = config.getModelConfig(OpenAIModelClient.OPENAI_PREFIX).orElseThrow();
this.searchWorkflow = searchWorkflow;
promptService.createIfNotExists(
ROUTER_METHOD,
IOUtils.resourceToString("/prompts/dictionary/router/%s.prompt".formatted(ROUTER_METHOD), Charset.defaultCharset()),
null,
true,
Language.GENERAL,
true
);
}
@Step(name = "start")
@StepInfo(description = "Starting step of the workflow")
public WorkflowEvent start(RouterStartEvent startEvent, WorkflowContext workflowContext) throws Exception {
Prompt check = promptService.getCurrentPromptOrThrow(ROUTER_METHOD, Language.GENERAL);
String model = Optional.ofNullable(modelConfig.getModelMini()).orElse(OpenAIModelClient.GPT_MINI_DEFAULT);
RouterResult routerResult = getRouterResult(startEvent, workflowContext, check, model);
if (routerResult.isInputType(RouterDefaultInputTypes.CUSTOM)) {
if (CollectionUtils.isEmpty(routerResult.getCustomRoutes())) {
return new DataEvent<>(startEvent, "retry");
}
Set<String> routes = startEvent.getCustomRoutes().stream()
.map(Route::getRoute)
.collect(Collectors.toSet());
boolean allFound = routerResult.getCustomRoutes().stream()
.allMatch(e -> routes.contains(e.getDecision()));
if (!allFound) {
return new DataEvent<>(startEvent, "retry");
}
}
return new DataEvent<>(routerResult, FINAL_STEP);
}
@Step(name = "retry")
@StepInfo(description = "Retry with smarter model")
public WorkflowEvent retry(DataEvent<RouterStartEvent> startEvent, WorkflowContext workflowContext) throws Exception {
Prompt check = promptService.getCurrentPromptOrThrow(ROUTER_METHOD, Language.GENERAL);
String model = Optional.ofNullable(modelConfig.getModel()).orElse(OpenAIModelClient.GPT_DEFAULT);
RouterResult routerResult = getRouterResult(startEvent.getResult(), workflowContext, check, model);
return new DataEvent<>(routerResult, FINAL_STEP);
}
@Step(name = FINAL_STEP)
@StepInfo(description = "Final step of the workflow")
public StopEvent<RouterResult> finalStep(DataEvent<RouterResult> event, WorkflowContext workflowContext) throws JsonProcessingException {
log.info("Executing finalStep with event: {}", event.getResult());
RouterResult resultNode = event.getResult();
return StopEvent.ofObject(resultNode);
}
@NotNull
private RouterResult getRouterResult(RouterStartEvent startEvent, WorkflowContext workflowContext, Prompt check, String model) throws JsonProcessingException {
Map<String, Object> variables = Map.of(
"query", startEvent.getQuery(),
"history", JsonUtils.toJson(startEvent.getMessages()),
"customRoutes", startEvent.getCustomRoutes(),
"defaultInputTypes", RouterDefaultInputTypes.routes(),
"routesDefault", List.of(RouterDefaultOutputTypes.values()),
"indexesList", startEvent.getIndexesList()
);
// Send the request with the specified model and prompt message
ModelTextResponse response = sendPromptText(
ModelRequestParams.create()
.setPromptText(check.getMessage())
.setVariables(variables)
.setTemperature(modelConfig.getTemperature())
.setModel(model),
workflowContext);
RouterResult routerResult = response.getResponseJson(RouterResult.class);
if (CollectionUtils.isNotEmpty(routerResult.getIndexes())) {
List<DocumentsResult> relatedDocs = new ArrayList<>();
routerResult.setRelatedDocs(relatedDocs);
for (RouterDecision<String> index : routerResult.getIndexes()) {
try {
StopEvent<DocumentsResult> docs = searchWorkflow.execute(new VectorStoreStartEvent(index.decision, startEvent.getQuery(), 10), workflowContext);
relatedDocs.add(docs.get());
} catch (Exception e) {
log.error("[router] Couldn't query index [%s] for query [%s]".formatted(index, startEvent.getQuery()), e);
}
}
}
return routerResult;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class Route {
private String route;
private String description;
}
@Data
public static class RouterStartEvent extends StartQueryEvent {
private List<Message> messages;
private List<Route> customRoutes;
private List<Index> indexesList;
private Message currentMessage;
@Builder
public RouterStartEvent(List<Message> messages, List<Route> customRoutes, List<Index> indexesList) {
super(messages.getLast().getMessage());
this.messages = new ArrayList<>(messages);
this.currentMessage = messages.remove(messages.size() - 1);
this.customRoutes = customRoutes;
this.indexesList = indexesList;
}
public List<Route> getCustomRoutes() {
return customRoutes == null ? Collections.emptyList() : customRoutes;
}
public List<Index> getIndexesList() {
return indexesList == null ? Collections.emptyList() : indexesList;
}
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class RouterDecision<T> {
@JsonAlias({"route", "type", "index"})
@JsonProperty("decision")
private T decision;
private double confidence;
}
@AllArgsConstructor
public enum RouterDefaultInputTypes {
GREETING("Initial greeting and contact establishment"),
INFORMATION_REQUEST("Requests for information (e.g., FAQs or KB search)"),
CLARIFICATION("Requests for further clarification / interactive dialogue"),
CHAT("Regular interactive dialogue"),
IMAGE_GENERATION("Image generation task"),
FEEDBACK("Feedback or requests to modify generated content"),
ESCALATION("Cases requiring human intervention or escalation"),
SALES_SUPPORT("Inquiries related to sales, marketing, or product info"),
PRODUCT_ISSUE("Product issue"),
CUSTOM("Only if some of the customRoutes found in initial query"),
UNKNOWN("Fallback when the type is unclear");
private final String description;
public static List<Route> routes() {
return Arrays.stream(values())
.map(e -> new Route(e.name(), e.description))
.collect(Collectors.toList());
}
}
public enum RouterDefaultOutputTypes {
RAG,
SUPPORT_REQUEST,
REDO_WITH_SMARTER_MODEL,
REASONING,
SALES_REQUEST,
CHAT
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class RouterResult {
private Set<RouterDecision<RouterDefaultInputTypes>> inputTypes;
private Set<RouterDecision<RouterDefaultOutputTypes>> routes;
private Set<RouterDecision<String>> customRoutes;
private Set<RouterDecision<String>> indexes;
private List<DocumentsResult> relatedDocs;
public boolean isInputType(RouterDefaultInputTypes type) {
return inputTypes.stream().anyMatch(e -> e.getDecision() == type);
}
public boolean isOutputType(RouterDefaultOutputTypes type) {
return routes.stream().anyMatch(e -> e.getDecision() == type);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-examples-spring-boot-starter/0.8.1/ai/driftkit/workflows/examples/spring
|
java-sources/ai/driftkit/driftkit-workflows-examples-spring-boot-starter/0.8.1/ai/driftkit/workflows/examples/spring/controller/SimplifiedAgentController.java
|
package ai.driftkit.workflows.examples.spring.controller;
import ai.driftkit.workflows.examples.spring.service.SimplifiedAgentService;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
/**
* REST controller demonstrating the simplified agent API.
* Provides easy-to-use endpoints for various agent types.
*/
@Slf4j
@RestController
@RequestMapping("/api/v1/simplified-agents")
@RequiredArgsConstructor
public class SimplifiedAgentController {
private final SimplifiedAgentService agentService;
/**
* Simple chat endpoint.
*/
@PostMapping("/chat")
public ResponseEntity<String> chat(@RequestBody ChatRequest request) {
try {
String response = agentService.chat(request.getMessage());
return ResponseEntity.ok(response);
} catch (Exception e) {
log.error("Error in chat endpoint", e);
return ResponseEntity.internalServerError().body("Error: " + e.getMessage());
}
}
/**
* Travel planning with loop validation.
*/
@PostMapping("/travel/plan")
public ResponseEntity<String> planTravel(@RequestBody TravelRequest request) {
try {
String response = agentService.chat("Plan a travel to " + request.getDestination() + " for " + request.getDays() + " days");
return ResponseEntity.ok(response);
} catch (Exception e) {
log.error("Error in travel planning endpoint", e);
return ResponseEntity.internalServerError().body("Error: " + e.getMessage());
}
}
/**
* Sequential content creation.
*/
@PostMapping("/content/create")
public ResponseEntity<String> createContent(@RequestBody ContentRequest request) {
try {
String response = agentService.chat("Create content about: " + request.getTopic());
return ResponseEntity.ok(response);
} catch (Exception e) {
log.error("Error in content creation endpoint", e);
return ResponseEntity.internalServerError().body("Error: " + e.getMessage());
}
}
/**
* Business analysis with agent composition.
*/
@PostMapping("/business/analyze")
public ResponseEntity<String> analyzeBusinessIdea(@RequestBody BusinessRequest request) {
try {
String response = agentService.chat("Analyze this business idea: " + request.getBusinessIdea());
return ResponseEntity.ok(response);
} catch (Exception e) {
log.error("Error in business analysis endpoint", e);
return ResponseEntity.internalServerError().body("Error: " + e.getMessage());
}
}
/**
* Quality-controlled content generation.
*/
@PostMapping("/content/quality")
public ResponseEntity<String> generateQualityContent(@RequestBody ContentRequest request) {
try {
String response = agentService.chat("Generate high-quality content about: " + request.getTopic());
return ResponseEntity.ok(response);
} catch (Exception e) {
log.error("Error in quality content generation endpoint", e);
return ResponseEntity.internalServerError().body("Error: " + e.getMessage());
}
}
// Request DTOs
public static class ChatRequest {
private String message;
public String getMessage() { return message; }
public void setMessage(String message) { this.message = message; }
}
public static class TravelRequest {
private String destination;
private int days;
public String getDestination() { return destination; }
public void setDestination(String destination) { this.destination = destination; }
public int getDays() { return days; }
public void setDays(int days) { this.days = days; }
}
public static class ContentRequest {
private String topic;
public String getTopic() { return topic; }
public void setTopic(String topic) { this.topic = topic; }
}
public static class BusinessRequest {
private String businessIdea;
public String getBusinessIdea() { return businessIdea; }
public void setBusinessIdea(String businessIdea) { this.businessIdea = businessIdea; }
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-examples-spring-boot-starter/0.8.1/ai/driftkit/workflows/examples/spring
|
java-sources/ai/driftkit/driftkit-workflows-examples-spring-boot-starter/0.8.1/ai/driftkit/workflows/examples/spring/service/ChatWorkflowService.java
|
package ai.driftkit.workflows.examples.spring.service;
import ai.driftkit.config.EtlConfig;
import ai.driftkit.context.core.service.PromptService;
import ai.driftkit.vector.spring.service.IndexService;
import ai.driftkit.workflows.examples.workflows.ChatWorkflow;
import ai.driftkit.workflows.spring.service.ChatService;
import ai.driftkit.workflows.spring.service.ImageModelService;
import ai.driftkit.workflows.spring.service.ModelRequestService;
import ai.driftkit.workflows.spring.service.TasksService;
import org.springframework.stereotype.Service;
/**
* Spring wrapper for the framework-agnostic ChatWorkflow.
* This class handles Spring dependency injection and delegates to the core workflow.
*/
@Service
public class ChatWorkflowService extends ChatWorkflow {
public ChatWorkflowService(
EtlConfig config,
PromptService promptService,
ModelRequestService modelRequestService,
ChatService chatService,
TasksService tasksService,
ImageModelService imageService,
IndexService indexService
) throws Exception {
super(config, promptService, modelRequestService, chatService, tasksService, imageService, indexService);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-examples-spring-boot-starter/0.8.1/ai/driftkit/workflows/examples/spring
|
java-sources/ai/driftkit/driftkit-workflows-examples-spring-boot-starter/0.8.1/ai/driftkit/workflows/examples/spring/service/RAGSearchWorkflowService.java
|
package ai.driftkit.workflows.examples.spring.service;
import ai.driftkit.config.EtlConfig;
import ai.driftkit.context.core.service.PromptService;
import ai.driftkit.workflows.examples.workflows.RAGSearchWorkflow;
import ai.driftkit.workflows.spring.service.ModelRequestService;
import org.springframework.stereotype.Service;
/**
* Spring wrapper for the framework-agnostic RAGSearchWorkflow.
* This class handles Spring dependency injection and delegates to the core workflow.
*/
@Service
public class RAGSearchWorkflowService extends RAGSearchWorkflow {
public RAGSearchWorkflowService(
EtlConfig config,
PromptService promptService,
ModelRequestService modelRequestService
) throws Exception {
super(config, promptService, modelRequestService);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-examples-spring-boot-starter/0.8.1/ai/driftkit/workflows/examples/spring
|
java-sources/ai/driftkit/driftkit-workflows-examples-spring-boot-starter/0.8.1/ai/driftkit/workflows/examples/spring/service/RouterWorkflowService.java
|
package ai.driftkit.workflows.examples.spring.service;
import ai.driftkit.config.EtlConfig;
import ai.driftkit.context.core.service.PromptService;
import ai.driftkit.workflows.examples.workflows.RouterWorkflow;
import ai.driftkit.workflows.examples.workflows.RAGSearchWorkflow;
import ai.driftkit.workflows.spring.service.ModelRequestService;
import org.springframework.stereotype.Service;
import java.io.IOException;
/**
* Spring wrapper for the framework-agnostic RouterWorkflow.
* This class handles Spring dependency injection and delegates to the core workflow.
*/
@Service
public class RouterWorkflowService extends RouterWorkflow {
public RouterWorkflowService(
EtlConfig config,
PromptService promptService,
RAGSearchWorkflow searchWorkflow,
ModelRequestService modelRequestService
) throws IOException {
super(config, promptService, searchWorkflow, modelRequestService);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-examples-spring-boot-starter/0.8.1/ai/driftkit/workflows/examples/spring
|
java-sources/ai/driftkit/driftkit-workflows-examples-spring-boot-starter/0.8.1/ai/driftkit/workflows/examples/spring/service/SimplifiedAgentService.java
|
package ai.driftkit.workflows.examples.spring.service;
import ai.driftkit.common.domain.client.ModelClient;
import ai.driftkit.workflows.core.chat.ChatMemory;
import ai.driftkit.workflows.core.chat.TokenWindowChatMemory;
import ai.driftkit.workflows.core.chat.SimpleTokenizer;
import ai.driftkit.common.tools.Tool;
import ai.driftkit.common.tools.ToolCall;
import ai.driftkit.workflows.core.agent.AgentResponse;
import ai.driftkit.workflows.core.agent.LLMAgent;
import ai.driftkit.workflows.core.agent.ToolExecutionResult;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.Map;
/**
* Example service demonstrating the simplified LLMAgent API with
* tool calling and structured output support.
*/
@Slf4j
@Service
public class SimplifiedAgentService {
private final LLMAgent agent;
public SimplifiedAgentService(ModelClient modelClient, SimpleTokenizer tokenizer) {
// Create chat memory with 4000 token window
ChatMemory chatMemory = TokenWindowChatMemory.withMaxTokens(4000, tokenizer);
this.agent = LLMAgent.builder()
.modelClient(modelClient)
.systemMessage("You are a helpful assistant")
.temperature(0.7)
.maxTokens(1000)
.chatMemory(chatMemory)
.build();
// Register example tools
registerExampleTools();
}
/**
* Simple chat - returns text response
*/
public String chat(String message) {
AgentResponse<String> response = agent.executeText(message);
return response.getText();
}
/**
* Chat with variables - returns text response
*/
public String chatWithVariables(String message, Map<String, Object> variables) {
AgentResponse<String> response = agent.executeText(message, variables);
return response.getText();
}
/**
* Get tool calls without execution - for manual tool execution
*/
public List<ToolCall> getToolCalls(String message) {
AgentResponse<List<ToolCall>> response = agent.executeForToolCalls(message);
return response.getToolCalls();
}
/**
* Execute a specific tool call manually
*/
public ToolExecutionResult executeToolCall(ToolCall toolCall) {
return agent.executeToolCall(toolCall);
}
/**
* Chat with automatic tool execution - returns typed tool results
*/
public List<ToolExecutionResult> chatWithTools(String message) {
AgentResponse<List<ToolExecutionResult>> response = agent.executeWithTools(message);
// Get typed results
List<ToolExecutionResult> results = response.getToolResults();
// Example of accessing typed tool results
for (ToolExecutionResult result : results) {
if (result.isSuccess()) {
log.info("Tool {} returned: {} (type: {})",
result.getToolName(),
result.getResult(),
result.getResultType().getSimpleName());
// Get typed result based on tool name
switch (result.getToolName()) {
case "getCurrentWeather" -> {
WeatherInfo weather = result.getTypedResult();
log.info("Weather in {}: {} degrees", weather.location(), weather.temperature());
}
case "searchDatabase" -> {
List<DatabaseRecord> records = result.getTypedResult();
log.info("Found {} database records", records.size());
}
}
}
}
return results;
}
/**
* Extract structured data from text
*/
public Person extractPersonInfo(String text) {
AgentResponse<Person> response = agent.executeStructured(text, Person.class);
return response.getStructuredData();
}
/**
* Extract structured data with user-controlled prompt
*/
public Company extractCompanyInfo(String text) {
String userMessage = "Extract company information including name, founded year, and CEO from the following text:\n\n" + text;
AgentResponse<Company> response = agent.executeStructured(userMessage, Company.class);
return response.getStructuredData();
}
/**
* Clear conversation history
*/
public void clearHistory() {
agent.clearHistory();
}
// Register example tools
private void registerExampleTools() {
agent.registerTool("getCurrentWeather", this, "Get current weather for a location")
.registerTool("searchDatabase", this, "Search database for records")
.registerTool("getCurrentTime", this, "Get current time in a specific timezone");
}
// Example tool methods
@Tool(name = "getCurrentWeather", description = "Get current weather for a location")
public WeatherInfo getCurrentWeather(String location) {
log.info("Getting weather for: {}", location);
// Simulate weather API call
return new WeatherInfo(location, 22.5, "Partly cloudy", 65);
}
@Tool(name = "searchDatabase", description = "Search database for records")
public List<DatabaseRecord> searchDatabase(String query, int limit) {
log.info("Searching database for: {} (limit: {})", query, limit);
// Simulate database search
return List.of(
new DatabaseRecord("1", "Record 1", "Description 1"),
new DatabaseRecord("2", "Record 2", "Description 2")
);
}
@Tool(name = "getCurrentTime", description = "Get current time in a specific timezone")
public String getCurrentTime(String timezone) {
log.info("Getting time for timezone: {}", timezone);
try {
return LocalDateTime.now()
.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")) + " " + timezone;
} catch (Exception e) {
return "Invalid timezone: " + timezone;
}
}
// Data classes for structured output and tool results
public record Person(String name, Integer age, String occupation, String email) {}
public record Company(String name, Integer foundedYear, String ceo, List<String> products) {}
public record WeatherInfo(String location, Double temperature, String description, Integer humidity) {}
public record DatabaseRecord(String id, String title, String description) {}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/autoconfigure/WorkflowAutoConfiguration.java
|
package ai.driftkit.workflows.autoconfigure;
import ai.driftkit.workflows.spring.config.AsyncConfig;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.AutoConfiguration;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Import;
import org.springframework.data.mongodb.repository.config.EnableMongoRepositories;
/**
* Auto-configuration for workflow services.
*
* This configuration automatically sets up:
* - Component scanning for workflow services
* - MongoDB repositories for workflow persistence
* - Async configuration for workflow execution
*/
@Slf4j
@AutoConfiguration(after = ai.driftkit.config.autoconfigure.EtlConfigAutoConfiguration.class)
@ComponentScan(basePackages = {
"ai.driftkit.workflows.spring.service",
"ai.driftkit.workflows.spring.config",
"ai.driftkit.workflows.spring.controller"
})
@EnableMongoRepositories(basePackages = "ai.driftkit.workflows.spring.repository")
@Import(AsyncConfig.class)
public class WorkflowAutoConfiguration {
public WorkflowAutoConfiguration() {
log.info("Initializing DriftKit Workflow Auto-Configuration");
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/AgentWorkflow.java
|
package ai.driftkit.workflows.spring;
import ai.driftkit.common.domain.Language;
import ai.driftkit.common.domain.client.ModelClient;
import ai.driftkit.context.core.service.PromptService;
import ai.driftkit.workflows.core.agent.LLMAgent;
import ai.driftkit.workflows.core.agent.RequestTracingProvider;
import ai.driftkit.workflows.core.chat.ChatMemory;
import ai.driftkit.workflows.core.chat.SimpleTokenizer;
import ai.driftkit.workflows.core.chat.TokenWindowChatMemory;
import ai.driftkit.workflows.core.domain.ExecutableWorkflow;
import ai.driftkit.workflows.core.domain.StartEvent;
import ai.driftkit.workflows.core.domain.WorkflowContext;
import lombok.Getter;
import org.apache.commons.lang3.StringUtils;
import java.util.Map;
/**
* Base workflow class that uses LLMAgent for model interactions.
* Provides simplified API while maintaining full tracing support through RequestTracingProvider.
*/
public abstract class AgentWorkflow<I extends StartEvent, O> extends ExecutableWorkflow<I, O> {
@Getter
protected final LLMAgent agent;
protected final PromptService promptService;
public AgentWorkflow(ModelClient modelClient, PromptService promptService) {
this(modelClient, promptService, 4000);
}
public AgentWorkflow(ModelClient modelClient, PromptService promptService, int memoryTokens) {
this.promptService = promptService;
// Create chat memory with configurable token window
ChatMemory chatMemory = TokenWindowChatMemory.withMaxTokens(memoryTokens, new SimpleTokenizer());
// Create LLMAgent without any hardcoded values
// Temperature, maxTokens, and systemMessage will be set per request
this.agent = LLMAgent.builder()
.modelClient(modelClient)
.chatMemory(chatMemory)
.promptService(promptService)
.build();
}
/**
* Create a custom agent with specific configuration
*/
protected LLMAgent createCustomAgent(String name, String description, String systemMessage,
Double temperature, Integer maxTokens) {
ChatMemory chatMemory = TokenWindowChatMemory.withMaxTokens(maxTokens, new SimpleTokenizer());
LLMAgent.CustomLLMAgentBuilder builder = LLMAgent.builder()
.modelClient(agent.getModelClient())
.chatMemory(chatMemory)
.promptService(promptService);
if (StringUtils.isNotBlank(name)) {
builder.name(name);
}
if (StringUtils.isNotBlank(description)) {
builder.description(description);
}
if (StringUtils.isNotBlank(systemMessage)) {
builder.systemMessage(systemMessage);
}
if (temperature != null) {
builder.temperature(temperature);
}
if (maxTokens != null) {
builder.maxTokens(maxTokens);
}
return builder.build();
}
/**
* Build tracing context from workflow context
*/
protected RequestTracingProvider.RequestContext buildTracingContext(WorkflowContext context,
String promptId,
Map<String, Object> variables) {
String workflowId = context.getWorkflowId();
String workflowType = this.getClass().getSimpleName();
String workflowStep = context.getCurrentStep();
RequestTracingProvider.RequestContext.RequestContextBuilder builder =
RequestTracingProvider.RequestContext.builder()
.contextId(agent.getAgentId())
.contextType(workflowType + "_" + workflowStep)
.workflowId(workflowId)
.workflowType(workflowType)
.workflowStep(workflowStep);
if (StringUtils.isNotBlank(promptId)) {
builder.promptId(promptId);
}
if (variables != null) {
builder.variables(variables);
}
if (context.getTask() != null) {
builder.chatId(context.getTask().getChatId());
}
return builder.build();
}
protected Language getLanguageFromContext(WorkflowContext context) {
Language language = context.get("language");
return language != null ? language : Language.GENERAL;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/ModelRequestParams.java
|
package ai.driftkit.workflows.spring;
import ai.driftkit.common.domain.client.ModelImageResponse.ModelContentMessage;
import ai.driftkit.common.domain.client.ModelImageResponse.ModelContentMessage.ModelContentElement.ImageData;
import lombok.AccessLevel;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Parameter holder for model requests with a fluent builder API
*/
@Data
@Accessors(chain = true)
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public class ModelRequestParams {
private String promptId;
private String promptText;
private Map<String, Object> variables;
private Double temperature;
private String model;
private List<ModelContentMessage> contextMessages;
private ImageData imageData;
/**
* Create a new empty params builder
* @return A new params builder
*/
public static ModelRequestParams create() {
return new ModelRequestParams();
}
/**
* Add a single variable to the variables map
* @param key The variable name
* @param value The variable value
* @return This builder
*/
public ModelRequestParams withVariable(String key, Object value) {
if (this.variables == null) {
this.variables = new HashMap<>();
}
this.variables.put(key, value);
return this;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/ModelWorkflow.java
|
package ai.driftkit.workflows.spring;
import ai.driftkit.common.domain.Language;
import ai.driftkit.common.domain.Prompt;
import ai.driftkit.common.domain.client.ModelClient;
import ai.driftkit.common.domain.client.ModelImageResponse;
import ai.driftkit.common.domain.client.ModelTextResponse;
import ai.driftkit.context.core.service.PromptService;
import ai.driftkit.context.core.util.PromptUtils;
import ai.driftkit.workflows.core.domain.ExecutableWorkflow;
import ai.driftkit.workflows.core.domain.StartEvent;
import ai.driftkit.workflows.core.domain.WorkflowContext;
import ai.driftkit.workflows.core.service.WorkflowRegistry;
import ai.driftkit.workflows.spring.domain.ModelRequestTrace;
import ai.driftkit.workflows.spring.domain.ModelRequestTrace.ContextType;
import ai.driftkit.workflows.spring.domain.ModelRequestTrace.WorkflowInfo;
import ai.driftkit.workflows.spring.service.ModelRequestContext;
import ai.driftkit.workflows.spring.service.ModelRequestService;
import lombok.Getter;
import org.apache.commons.collections4.MapUtils;
import org.apache.commons.lang3.StringUtils;
import java.util.List;
import java.util.Map;
public abstract class ModelWorkflow<I extends StartEvent, O> extends ExecutableWorkflow<I, O> {
@Getter
protected final ModelClient modelClient;
protected final ModelRequestService modelRequestService;
protected final PromptService promptService;
public ModelWorkflow(ModelClient modelClient, ModelRequestService modelRequestService, PromptService promptService) {
this.modelClient = modelClient;
this.modelRequestService = modelRequestService;
this.promptService = promptService;
String clsName = this.getClass().getSimpleName();
WorkflowRegistry.registerWorkflow(clsName, clsName, null, this);
}
/**
* Send a text request using a prompt ID from the prompt service
*
* @param params The request parameters
* @param context The workflow context
* @return The model response
*/
protected ModelTextResponse sendTextToText(ModelRequestParams params, WorkflowContext context) {
if (StringUtils.isBlank(params.getPromptId())) {
throw new IllegalArgumentException("promptId must be provided");
}
Language language = getLanguageFromContext(context);
Prompt prompt = promptService.getCurrentPromptOrThrow(params.getPromptId(), language);
String promptText = prompt.getMessage();
if (MapUtils.isNotEmpty(params.getVariables())) {
promptText = PromptUtils.applyVariables(promptText, params.getVariables());
}
ModelRequestContext requestContext = buildRequestContext(
ModelRequestTrace.RequestType.TEXT_TO_TEXT,
promptText,
params.getPromptId(),
params.getVariables(),
context
);
applyParamsToContext(params, requestContext);
return modelRequestService.textToText(modelClient, requestContext);
}
/**
* Send a text request using the provided prompt text
*
* @param params The request parameters
* @param context The workflow context
* @return The model response
*/
protected ModelTextResponse sendPromptText(ModelRequestParams params, WorkflowContext context) {
if (StringUtils.isBlank(params.getPromptText())) {
throw new IllegalArgumentException("promptText must be provided");
}
String actualText = params.getPromptText();
if (MapUtils.isNotEmpty(params.getVariables())) {
actualText = PromptUtils.applyVariables(actualText, params.getVariables());
}
ModelRequestContext requestContext = buildRequestContext(
ModelRequestTrace.RequestType.TEXT_TO_TEXT,
actualText,
params.getPromptId(),
params.getVariables(),
context
);
applyParamsToContext(params, requestContext);
if (context.getTask() != null) {
if (requestContext.getTemperature() == null) {
requestContext.setTemperature(context.getTask().getTemperature());
}
if (requestContext.getModel() == null) {
requestContext.setModel(context.getTask().getModelId());
}
}
if (context.getTask() != null) {
requestContext.setChatId(context.getTask().getChatId());
}
return modelRequestService.textToText(modelClient, requestContext);
}
/**
* Send a text request with context messages for conversation history
*
* @param params The request parameters, must include contextMessages
* @param context The workflow context
* @return The model response
*/
protected ModelTextResponse sendPromptTextWithHistory(ModelRequestParams params, WorkflowContext context) {
if (StringUtils.isBlank(params.getPromptText())) {
throw new IllegalArgumentException("promptText must be provided");
}
if (params.getContextMessages() == null) {
throw new IllegalArgumentException("contextMessages must be provided");
}
String actualText = params.getPromptText();
if (MapUtils.isNotEmpty(params.getVariables())) {
actualText = PromptUtils.applyVariables(actualText, params.getVariables());
}
ModelRequestContext requestContext = buildRequestContext(
ModelRequestTrace.RequestType.TEXT_TO_TEXT,
actualText,
params.getPromptId(),
params.getVariables(),
context
);
requestContext.setContextMessages(params.getContextMessages());
applyParamsToContext(params, requestContext);
return modelRequestService.textToText(modelClient, requestContext);
}
/**
* Send a text-to-image request using a prompt ID
*
* @param params The request parameters
* @param context The workflow context
* @return The model image response
*/
protected ModelImageResponse sendTextToImage(ModelRequestParams params, WorkflowContext context) {
if (StringUtils.isBlank(params.getPromptId())) {
throw new IllegalArgumentException("promptId must be provided");
}
Language language = getLanguageFromContext(context);
Prompt prompt = promptService.getCurrentPromptOrThrow(params.getPromptId(), language);
String promptText = prompt.getMessage();
if (MapUtils.isNotEmpty(params.getVariables())) {
promptText = PromptUtils.applyVariables(promptText, params.getVariables());
}
ModelRequestContext requestContext = buildRequestContext(
ModelRequestTrace.RequestType.TEXT_TO_IMAGE,
promptText,
params.getPromptId(),
params.getVariables(),
context
);
applyParamsToContext(params, requestContext);
return modelRequestService.textToImage(modelClient, requestContext);
}
/**
* Send a text-to-image request using prompt text
*
* @param params The request parameters
* @param context The workflow context
* @return The model image response
*/
protected ModelImageResponse sendImagePrompt(ModelRequestParams params, WorkflowContext context) {
if (StringUtils.isBlank(params.getPromptText())) {
throw new IllegalArgumentException("promptText must be provided");
}
String actualText = params.getPromptText();
if (MapUtils.isNotEmpty(params.getVariables())) {
actualText = PromptUtils.applyVariables(actualText, params.getVariables());
}
ModelRequestContext requestContext = buildRequestContext(
ModelRequestTrace.RequestType.TEXT_TO_IMAGE,
actualText,
params.getPromptId(),
params.getVariables(),
context
);
applyParamsToContext(params, requestContext);
return modelRequestService.textToImage(modelClient, requestContext);
}
/**
* Send an image-to-text request using a prompt ID
*
* @param params The request parameters, must include imageData
* @param context The workflow context
* @return The model text response
*/
protected ModelTextResponse sendImageToText(ModelRequestParams params, WorkflowContext context) {
if (StringUtils.isBlank(params.getPromptId())) {
throw new IllegalArgumentException("promptId must be provided");
}
if (params.getImageData() == null) {
throw new IllegalArgumentException("imageData must be provided");
}
Language language = getLanguageFromContext(context);
Prompt prompt = promptService.getCurrentPromptOrThrow(params.getPromptId(), language);
String promptText = prompt.getMessage();
if (MapUtils.isNotEmpty(params.getVariables())) {
promptText = PromptUtils.applyVariables(promptText, params.getVariables());
}
ModelRequestContext requestContext = buildRequestContext(
ModelRequestTrace.RequestType.IMAGE_TO_TEXT,
promptText,
params.getPromptId(),
params.getVariables(),
context
);
requestContext.setImageData(List.of(params.getImageData()));
applyParamsToContext(params, requestContext);
return modelRequestService.imageToText(modelClient, requestContext);
}
/**
* Send an image-to-text request with custom text prompt
*
* @param params The request parameters, must include imageData
* @param context The workflow context
* @return The model text response
*/
protected ModelTextResponse sendImageWithText(ModelRequestParams params, WorkflowContext context) {
if (params.getImageData() == null) {
throw new IllegalArgumentException("imageData must be provided");
}
String actualText = params.getPromptText() != null ? params.getPromptText() : "";
if (MapUtils.isNotEmpty(params.getVariables()) && StringUtils.isNotBlank(actualText)) {
actualText = PromptUtils.applyVariables(actualText, params.getVariables());
}
ModelRequestContext requestContext = buildRequestContext(
ModelRequestTrace.RequestType.IMAGE_TO_TEXT,
actualText,
params.getPromptId(),
params.getVariables(),
context
);
requestContext.setImageData(List.of(params.getImageData()));
applyParamsToContext(params, requestContext);
return modelRequestService.imageToText(modelClient, requestContext);
}
private void applyParamsToContext(ModelRequestParams params, ModelRequestContext requestContext) {
if (params.getTemperature() != null) {
requestContext.setTemperature(params.getTemperature());
}
if (StringUtils.isNotBlank(params.getModel())) {
requestContext.setModel(params.getModel());
}
}
private Language getLanguageFromContext(WorkflowContext context) {
Language language = context.get("language");
return language != null ? language : Language.GENERAL;
}
private ModelRequestContext buildRequestContext(ModelRequestTrace.RequestType requestType,
String promptText, String promptId,
Map<String, Object> variables, WorkflowContext context) {
String step = context.getCurrentStep();
WorkflowInfo workflowInfo = WorkflowInfo.builder()
.workflowId(context.getWorkflowId())
.workflowType(this.getClass().getSimpleName())
.workflowStep(step)
.build();
ModelRequestContext.ModelRequestContextBuilder builder = ModelRequestContext.builder()
.requestType(requestType)
.contextType(ContextType.WORKFLOW)
.contextId(context.getWorkflowId())
.promptText(promptText)
.promptId(promptId)
.variables(variables)
.workflowInfo(workflowInfo);
// Include task from context if available to extract logprobs, chatId and other parameters
if (context.getTask() != null) {
builder.messageTask(context.getTask());
builder.chatId(context.getTask().getChatId());
}
return builder.build();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/config/AsyncConfig.java
|
package ai.driftkit.workflows.spring.config;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
@Configuration
@EnableAsync
public class AsyncConfig {
@Bean(name = "workflowTraceExecutor")
@ConditionalOnMissingBean(name = "traceExecutor")
public Executor workflowTraceExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(2);
executor.setMaxPoolSize(5);
executor.setQueueCapacity(100);
executor.setThreadNamePrefix("trace-");
executor.initialize();
return executor;
}
@Bean(name = "testExecutor")
public Executor testExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(5);
executor.setMaxPoolSize(10);
executor.setQueueCapacity(50);
executor.setThreadNamePrefix("test-run-");
// Allow threads to timeout when idle
executor.setAllowCoreThreadTimeOut(true);
executor.initialize();
return executor;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/config/WorkflowConfig.java
|
package ai.driftkit.workflows.spring.config;
import ai.driftkit.workflows.core.service.WorkflowRegistry;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* Configuration for workflow-related beans.
*/
@Configuration
public class WorkflowConfig {
@Bean
public WorkflowRegistry workflowRegistry() {
return new WorkflowRegistry();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/controller/AnalyticsController.java
|
package ai.driftkit.workflows.spring.controller;
import ai.driftkit.common.domain.RestResponse;
import ai.driftkit.workflows.spring.domain.ModelRequestTrace;
import ai.driftkit.workflows.spring.service.AnalyticsService;
import ai.driftkit.workflows.spring.service.AnalyticsService.DailyMetricsResponse;
import ai.driftkit.workflows.spring.service.AnalyticsService.PromptMetricsResponse;
import ai.driftkit.workflows.spring.service.AnalyticsService.TaskVariables;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.format.annotation.DateTimeFormat;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.List;
@Slf4j
@Controller
@RequestMapping(path = "/data/v1.0/analytics")
public class AnalyticsController {
@Autowired
private AnalyticsService analyticsService;
/**
* Get model request traces within a time range
*/
@GetMapping("/traces")
public @ResponseBody RestResponse<Page<ModelRequestTrace>> getTraces(
@RequestParam(required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime startTime,
@RequestParam(required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime endTime,
@RequestParam(required = false) String promptId,
@RequestParam(required = false) String excludePurpose,
@RequestParam(defaultValue = "0") int page,
@RequestParam(defaultValue = "50") int size
) {
Page<ModelRequestTrace> traces = analyticsService.getTraces(startTime, endTime, promptId, excludePurpose, page, size);
return new RestResponse<>(true, traces);
}
/**
* Get traces by context ID
*/
@GetMapping("/traces/{contextId}")
public @ResponseBody RestResponse<List<ModelRequestTrace>> getTracesByContextId(
@PathVariable String contextId
) {
List<ModelRequestTrace> traces = analyticsService.getTracesByContextId(contextId);
return new RestResponse<>(true, traces);
}
/**
* Get daily metrics for the dashboard
*/
@GetMapping("/metrics/daily")
public @ResponseBody RestResponse<DailyMetricsResponse> getDailyMetrics(
@RequestParam(required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE) LocalDate startDate,
@RequestParam(required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE) LocalDate endDate
) {
DailyMetricsResponse metrics = analyticsService.getDailyMetrics(startDate, endDate);
return new RestResponse<>(true, metrics);
}
/**
* Get available prompt methods for analytics
*/
@GetMapping("/prompt-methods")
public @ResponseBody RestResponse<List<String>> getAvailablePromptMethods() {
List<String> methods = analyticsService.getAvailablePromptMethods();
return new RestResponse<>(true, methods);
}
/**
* Get message tasks by context IDs
*/
@GetMapping("/message-tasks")
public @ResponseBody RestResponse<List<TaskVariables>> getMessageTasksByContextIds(
@RequestParam String contextIds
) {
List<String> ids = List.of(contextIds.split(","));
List<TaskVariables> tasks = analyticsService.getMessageTasksByContextIds(ids);
return new RestResponse<>(true, tasks);
}
/**
* Get metrics for a specific prompt method
*/
@GetMapping("/metrics/prompt")
public @ResponseBody RestResponse<PromptMetricsResponse> getPromptMetrics(
@RequestParam(required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime startTime,
@RequestParam(required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime endTime,
@RequestParam String promptId
) {
PromptMetricsResponse metrics = analyticsService.getPromptMetrics(startTime, endTime, promptId);
return new RestResponse<>(true, metrics);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/controller/ChecklistController.java
|
package ai.driftkit.workflows.spring.controller;
import ai.driftkit.common.domain.RestResponse;
import ai.driftkit.workflows.spring.domain.ChecklistItemEntity;
import ai.driftkit.workflows.spring.service.ChecklistService;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.stream.Collectors;
/**
* Controller for accessing and searching checklists
*/
@Slf4j
@RestController
@RequestMapping("/data/v1.0/admin/llm/checklists")
@RequiredArgsConstructor
public class ChecklistController {
private final ChecklistService checklistService;
/**
* Search checklist items with optional filters
*
* @param promptId Optional promptId to filter by
* @param query Optional query text to filter by (partial match)
* @param description Optional description text to filter by (partial match)
* @param includeSimilar Whether to include items marked as similar to others
* @return List of matching checklist items
*/
@GetMapping("/search")
public RestResponse<List<ChecklistItemEntity>> searchChecklists(
@RequestParam(required = false) String promptId,
@RequestParam(required = false) String query,
@RequestParam(required = false) String description,
@RequestParam(required = false, defaultValue = "false") boolean includeSimilar) {
log.info("Searching for checklists with promptId: {}, query: {}, description: {}, includeSimilar: {}",
promptId, query, description, includeSimilar);
List<ChecklistItemEntity> results = checklistService.searchChecklistItems(promptId, query, description);
// Filter out items marked as similar if requested
if (!includeSimilar) {
results = results.stream()
.filter(item -> item.getSimilarToId() == null)
.collect(Collectors.toList());
}
return new RestResponse<>(true, results);
}
/**
* Get all available promptIds that have checklist items
*/
@GetMapping("/prompt-ids")
public RestResponse<List<String>> getPromptIds() {
List<String> promptIds = checklistService.getAllPromptIds();
return new RestResponse<>(true, promptIds);
}
/**
* Update an existing checklist item
*
* @param id Checklist item ID
* @param item Updated checklist item data
* @return Updated item
*/
@PostMapping("/{id}")
public RestResponse<ChecklistItemEntity> updateChecklistItem(
@PathVariable String id,
@RequestBody ChecklistItemEntity item) {
if (!id.equals(item.getId())) {
return new RestResponse<>(false, null);
}
ChecklistItemEntity updatedItem = checklistService.updateChecklistItem(item);
return new RestResponse<>(true, updatedItem);
}
/**
* Delete a checklist item
*
* @param id Checklist item ID to delete
* @return Success response
*/
@DeleteMapping("/{id}")
public RestResponse<String> deleteChecklistItem(@PathVariable String id) {
checklistService.deleteChecklistItem(id);
return new RestResponse<>(true, "Checklist item deleted successfully");
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/controller/LLMRestController.java
|
package ai.driftkit.workflows.spring.controller;
import ai.driftkit.common.domain.*;
import ai.driftkit.common.domain.ImageMessageTask.GeneratedImage;
import ai.driftkit.common.utils.AIUtils;
import ai.driftkit.context.core.service.PromptService;
import ai.driftkit.workflows.core.chat.Message;
import ai.driftkit.workflows.spring.domain.MessageTaskEntity;
import ai.driftkit.workflows.spring.repository.MessageTaskRepositoryV1;
import ai.driftkit.workflows.spring.service.AIService.LLMTaskFuture;
import ai.driftkit.workflows.spring.service.ChatService;
import ai.driftkit.workflows.spring.service.ImageModelService;
import ai.driftkit.workflows.spring.service.TasksService;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.InputStreamResource;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Sort.Direction;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import javax.validation.constraints.NotNull;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.util.Comparator;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
@Slf4j
@Controller
@RequestMapping(path = "/data/v1.0/admin/llm/")
public class LLMRestController {
@Autowired
private ChatService chatService;
@Autowired
private TasksService tasksService;
@Autowired
private MessageTaskRepositoryV1 messageTaskRepository;
@Autowired
private ImageModelService imageGenerationService;
@Autowired
private PromptService promptService;
@GetMapping("/message/fixed")
public @ResponseBody RestResponse<List<MessageTask>> getFixedMessages(
@RequestParam("page") Integer page,
@RequestParam("limit") Integer limit
) {
List<MessageTask> messageTasks = messageTaskRepository.findMessageTasksWithFixes(
PageRequest.of(page, limit)
).stream()
.map(MessageTaskEntity::toMessageTask)
.collect(Collectors.toList());
return new RestResponse<>(
true,
messageTasks
);
}
@PutMapping("/chat")
public @ResponseBody RestResponse<Chat> createChat(
@RequestBody ChatRequest request
) {
Chat chat = chatService.createChat(request);
return new RestResponse<>(
true,
chat
);
}
@PostMapping("/chat")
public @ResponseBody RestResponse<Chat> updateChat(
@RequestBody Chat request
) {
Chat chat = chatService.save(request);
return new RestResponse<>(
true,
chat
);
}
@GetMapping("/chats")
public @ResponseBody RestResponse<List<Chat>> getChats() {
return new RestResponse<>(
true,
chatService.getChats()
.stream()
.filter(e -> !e.isHidden())
.sorted(Comparator.comparing(Chat::getCreatedTime).reversed())
.collect(Collectors.toList())
);
}
@GetMapping("/languages")
public @ResponseBody RestResponse<Language[]> getLanguages() {
return new RestResponse<>(
true,
Language.values()
);
}
@PostMapping("/message")
public @ResponseBody RestResponse<MessageId> sendMessage(
@RequestBody LLMRequest request
) {
LLMTaskFuture future = tasksService.addTask(
MessageTask.builder()
.messageId(AIUtils.generateId())
.message(request.getMessage())
.language(request.getLanguage())
.chatId(request.getChatId())
.workflow(request.getWorkflow())
.jsonResponse(request.isJsonResponse())
.responseFormat(request.getResponseFormat())
.systemMessage(request.getSystemMessage())
.variables(request.getVariables())
.modelId(request.getModel())
.logprobs(request.getLogprobs())
.topLogprobs(request.getTopLogprobs())
.purpose(request.getPurpose())
.imageBase64(request.getImagesBase64())
.imageMimeType(request.getImageMimeType())
.createdTime(System.currentTimeMillis())
.build()
);
return new RestResponse<>(
true,
new MessageId(future.getMessageId())
);
}
@PostMapping("/message/sync")
public @ResponseBody RestResponse<MessageTask> sendMessageSync(
@RequestBody LLMRequest request
) throws ExecutionException, InterruptedException {
LLMTaskFuture future = tasksService.addTask(
MessageTask.builder()
.messageId(AIUtils.generateId())
.message(request.getMessage())
.language(request.getLanguage())
.chatId(request.getChatId())
.workflow(request.getWorkflow())
.jsonResponse(request.isJsonResponse())
.responseFormat(request.getResponseFormat())
.systemMessage(request.getSystemMessage())
.variables(request.getVariables())
.logprobs(request.getLogprobs())
.topLogprobs(request.getTopLogprobs())
.purpose(request.getPurpose())
.imageBase64(request.getImagesBase64())
.imageMimeType(request.getImageMimeType())
.createdTime(System.currentTimeMillis())
.build()
);
return new RestResponse<>(
true,
future.getFuture().get()
);
}
@PostMapping("/prompt/message")
public @ResponseBody RestResponse<MessageId> sendPromptMessage(
@RequestBody PromptRequest request
) {
MessageTask llmRequest = promptService.getTaskFromPromptRequest(request);
LLMTaskFuture future = tasksService.addTask(llmRequest);
return new RestResponse<>(
true,
new MessageId(future.getMessageId())
);
}
@PostMapping("/prompt/message/sync")
public @ResponseBody RestResponse<MessageTask> sendPromptMessageSync(
@RequestBody PromptRequest request
) throws ExecutionException, InterruptedException {
if (StringUtils.isBlank(request.getWorkflow())) {
request.setWorkflow(null);
}
MessageTask llmRequest = promptService.getTaskFromPromptRequest(request);
LLMTaskFuture future = tasksService.addTask(llmRequest);
return new RestResponse<>(
true,
future.getFuture().get()
);
}
@GetMapping("/message/{messageId}")
public @ResponseBody RestResponse<MessageTask> getMessage(
@PathVariable String messageId
) {
Optional<MessageTask> message = tasksService.getTaskByMessageId(messageId);
return new RestResponse<>(
message.isPresent(),
message.orElse(null)
);
}
@GetMapping("/messageTask/byContext")
public @ResponseBody RestResponse<List<MessageTask>> getMessageTasksByContextIds(
@RequestParam String contextIds
) {
// Since contextId is the same as messageId, we can use it directly
String[] messageIds = contextIds.split(",");
List<MessageTask> tasks = messageTaskRepository.findAllById(List.of(messageIds))
.stream()
.map(MessageTaskEntity::toMessageTask)
.collect(Collectors.toList());
return new RestResponse<>(
true,
tasks
);
}
@PostMapping("/message/{messageId}/rate")
public @ResponseBody RestResponse<MessageTask> rateMessage(
@PathVariable String messageId,
@RequestBody MessageRate request
) {
MessageTask msg = tasksService.rate(
messageId,
request.getGrade(),
request.getGradeComment()
);
return new RestResponse<>(
true,
msg
);
}
@GetMapping("/image/{messageId}/resource/{index}")
@ResponseBody
public ResponseEntity<InputStreamResource> getImageResource(@PathVariable String messageId, @PathVariable Integer index) {
log.info("Request for image resource with id: {}, index: {}", messageId, index);
ImageMessageTask msg = imageGenerationService
.getImageMessageById(messageId)
.orElse(null);
if (msg == null) {
log.error("Image message is not found for id [{}]", messageId);
throw new RuntimeException("Image message is not found for id [%s]".formatted(messageId));
}
if (msg.getImages() == null || msg.getImages().isEmpty()) {
log.error("Image list is empty for task id [{}]", messageId);
throw new RuntimeException("Image list is empty for task id [%s]".formatted(messageId));
}
if (index >= msg.getImages().size()) {
log.error("Image index {} is out of bounds for task id [{}] with {} images",
index, messageId, msg.getImages().size());
throw new RuntimeException("Image index %s is out of bounds for task id [%s]"
.formatted(index, messageId));
}
GeneratedImage image = msg.getImages().get(index);
if (image.getData() == null || image.getData().length == 0) {
log.error("Image data is empty for task id [{}], index [{}]", messageId, index);
throw new RuntimeException("Image data is empty for task id [%s], index [%s]"
.formatted(messageId, index));
}
InputStream in = new ByteArrayInputStream(image.getData());
return ResponseEntity.ok()
.contentType(MediaType.parseMediaType(image.getMimeType()))
.body(new InputStreamResource(in));
}
@GetMapping("/image/{messageId}")
public @ResponseBody RestResponse<ImageMessageTask> message(
@PathVariable String messageId
) {
ImageMessageTask msg = imageGenerationService
.getImageMessageById(messageId)
.orElse(null);
if (msg != null) {
msg.getImages().forEach(e -> e.setData(null));
}
return new RestResponse<>(
msg != null,
msg
);
}
@PostMapping("/image/{messageId}/rate")
public @ResponseBody RestResponse<ImageMessageTask> rateImage(
@PathVariable String messageId,
@RequestBody MessageRate request
) {
ImageMessageTask msg = imageGenerationService.rate(
messageId,
request.getGrade(),
request.getGradeComment()
);
return new RestResponse<>(
true,
msg
);
}
@GetMapping(value = {"/chat/{chatId}/asTasks", "/chat/{chatId}/asMessages"})
public @ResponseBody RestResponse<List<MessageTask>> getMessagesFromDb(
@PathVariable String chatId,
@RequestParam("skip") Integer skip,
@RequestParam("limit") Integer limit,
@RequestParam(value = "direction", required = false, defaultValue = "DESC") Direction direction
) {
if (skip == null) {
skip = 0;
}
if (limit == null) {
limit = 10;
}
List<MessageTask> chat = tasksService.getTasksByChatId(chatId, skip, limit, direction);
return new RestResponse<>(
true,
chat.stream().sorted(Comparator.comparing(AITask::getCreatedTime)).collect(Collectors.toList())
);
}
@GetMapping("/chat/asTasks")
public @ResponseBody RestResponse<List<MessageTask>> getMessagesFromDb(
@RequestParam("skip") Integer skip,
@RequestParam("limit") Integer limit,
@RequestParam(value = "direction", required = false, defaultValue = "DESC") Direction direction
) {
if (skip == null) {
skip = 0;
}
if (limit == null) {
limit = 10;
}
List<MessageTask> chat = tasksService.getTasks(skip, limit, direction);
return new RestResponse<>(
true,
chat.stream().sorted(Comparator.comparing(AITask::getCreatedTime)).collect(Collectors.toList())
);
}
@GetMapping("/chat/{chatId}/item")
public @ResponseBody RestResponse<Chat> getChat(
@PathVariable String chatId
) {
Optional<Chat> chat = chatService.getChat(chatId);
return new RestResponse<>(
chat.isPresent(),
chat.orElse(null)
);
}
@GetMapping("/chat/{chatId}")
public @ResponseBody RestResponse<List<Message>> getMessages(
@PathVariable String chatId,
@RequestParam(value = "skip", required = false) Integer skip,
@RequestParam(value = "limit", required = false) Integer limit,
@RequestParam(value = "direction", required = false, defaultValue = "DESC") Direction direction
) {
if (skip == null) {
skip = 0;
}
if (limit == null) {
limit = 10;
}
List<Message> chat = tasksService.getMessagesList(chatId, skip, limit, direction);
return new RestResponse<>(
true,
chat.stream().sorted(Comparator.comparing(Message::getCreatedTime)).collect(Collectors.toList())
);
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class MessageId {
@NotNull
private String messageId;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class MessageRate {
@NotNull
private String gradeComment;
@NotNull
private Grade grade;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/controller/WorkflowController.java
|
package ai.driftkit.workflows.spring.controller;
import ai.driftkit.common.domain.RestResponse;
import ai.driftkit.workflows.core.service.WorkflowRegistry;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* Controller for providing information about available workflows in the system.
*/
@Controller
@RequestMapping(path = "/data/v1.0/admin/workflows")
public class WorkflowController {
@Autowired
private WorkflowRegistry workflowRegistry;
/**
* Gets all available workflows.
*
* @return A list of all registered workflows with their id, name, and description
*/
@GetMapping
public @ResponseBody RestResponse<List<Map<String, String>>> getWorkflows() {
List<Map<String, String>> workflows = workflowRegistry.getAllWorkflows()
.stream()
.map(WorkflowRegistry.RegisteredWorkflow::toMap)
.collect(Collectors.toList());
return new RestResponse<>(true, workflows);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/domain/ChatEntity.java
|
package ai.driftkit.workflows.spring.domain;
import ai.driftkit.common.domain.Chat;
import ai.driftkit.common.domain.Language;
import ai.driftkit.common.domain.ModelRole;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.mapping.Document;
/**
* MongoDB entity wrapper for Chat with proper @Id annotation
*/
@Data
@NoArgsConstructor
@EqualsAndHashCode(callSuper = true)
@Document(collection = "chats")
public class ChatEntity extends Chat {
@Override
@Id
public String getChatId() {
return super.getChatId();
}
/**
* Create entity from domain object
*/
public static ChatEntity fromChat(Chat chat) {
ChatEntity entity = new ChatEntity();
entity.setChatId(chat.getChatId());
entity.setName(chat.getName());
entity.setSystemMessage(chat.getSystemMessage());
entity.setLanguage(chat.getLanguage());
entity.setMemoryLength(chat.getMemoryLength());
entity.setModelRole(chat.getModelRole());
entity.setCreatedTime(chat.getCreatedTime());
entity.setHidden(chat.isHidden());
return entity;
}
/**
* Convert to domain object
*/
public Chat toChat() {
return Chat.builder()
.chatId(this.getChatId())
.name(this.getName())
.systemMessage(this.getSystemMessage())
.language(this.getLanguage())
.memoryLength(this.getMemoryLength())
.modelRole(this.getModelRole())
.createdTime(this.getCreatedTime())
.hidden(this.isHidden())
.build();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/domain/ChecklistItemEntity.java
|
package ai.driftkit.workflows.spring.domain;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.index.Indexed;
import org.springframework.data.mongodb.core.mapping.Document;
import java.time.LocalDateTime;
/**
* Entity to store individual checklist items from reasoning workflow
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Document(collection = "checklist_items")
public class ChecklistItemEntity {
@Id
private String id;
// Checklist item content
private String description;
private String severity; // CRITICAL, HIGH, MEDIUM, LOW
// Reference information
@Indexed
private String promptId;
@Indexed
private String query;
// Workflow type information
private String workflowType;
// Metadata
private LocalDateTime createdAt;
private int useCount;
// For similarity detection
private String normalizedDescription;
// Reference to similar item
private String similarToId; // ID of the checklist item this one is similar to
private Double similarityScore; // Score of similarity (0.0 to 1.0)
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/domain/ImageMessageTaskEntity.java
|
package ai.driftkit.workflows.spring.domain;
import ai.driftkit.common.domain.ImageMessageTask;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.mapping.Document;
/**
* MongoDB entity wrapper for ImageMessageTask with proper @Id annotation
*/
@Data
@NoArgsConstructor
@EqualsAndHashCode(callSuper = true)
@Document(collection = "image_message_tasks")
public class ImageMessageTaskEntity extends ImageMessageTask {
@Override
@Id
public String getMessageId() {
return super.getMessageId();
}
/**
* Create entity from domain object
*/
public static ImageMessageTaskEntity fromImageMessageTask(ImageMessageTask task) {
ImageMessageTaskEntity entity = new ImageMessageTaskEntity();
entity.setMessageId(task.getMessageId());
entity.setChatId(task.getChatId());
entity.setMessage(task.getMessage());
entity.setSystemMessage(task.getSystemMessage());
entity.setGradeComment(task.getGradeComment());
entity.setGrade(task.getGrade());
entity.setCreatedTime(task.getCreatedTime());
entity.setResponseTime(task.getResponseTime());
entity.setModelId(task.getModelId());
entity.setPromptIds(task.getPromptIds());
entity.setJsonRequest(task.isJsonRequest());
entity.setJsonResponse(task.isJsonResponse());
entity.setResponseFormat(task.getResponseFormat());
entity.setVariables(task.getVariables());
entity.setWorkflow(task.getWorkflow());
entity.setContextJson(task.getContextJson());
entity.setWorkflowStopEvent(task.getWorkflowStopEvent());
entity.setLanguage(task.getLanguage());
entity.setImages(task.getImages());
entity.setPurpose(task.getPurpose());
entity.setLogprobs(task.getLogprobs());
entity.setTopLogprobs(task.getTopLogprobs());
entity.setImageBase64(task.getImageBase64());
entity.setImageMimeType(task.getImageMimeType());
return entity;
}
/**
* Convert to domain object
*/
public ImageMessageTask toImageMessageTask() {
ImageMessageTask task = new ImageMessageTask();
task.setMessageId(this.getMessageId());
task.setChatId(this.getChatId());
task.setMessage(this.getMessage());
task.setSystemMessage(this.getSystemMessage());
task.setGradeComment(this.getGradeComment());
task.setGrade(this.getGrade());
task.setCreatedTime(this.getCreatedTime());
task.setResponseTime(this.getResponseTime());
task.setModelId(this.getModelId());
task.setPromptIds(this.getPromptIds());
task.setJsonRequest(this.isJsonRequest());
task.setJsonResponse(this.isJsonResponse());
task.setResponseFormat(this.getResponseFormat());
task.setVariables(this.getVariables());
task.setWorkflow(this.getWorkflow());
task.setContextJson(this.getContextJson());
task.setWorkflowStopEvent(this.getWorkflowStopEvent());
task.setLanguage(this.getLanguage());
task.setImages(this.getImages());
task.setPurpose(this.getPurpose());
task.setLogprobs(this.getLogprobs());
task.setTopLogprobs(this.getTopLogprobs());
task.setImageBase64(this.getImageBase64());
task.setImageMimeType(this.getImageMimeType());
return task;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/domain/MessageTaskEntity.java
|
package ai.driftkit.workflows.spring.domain;
import ai.driftkit.common.domain.MessageTask;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.mapping.Document;
@Data
@NoArgsConstructor
@EqualsAndHashCode(callSuper = true)
@Document(collection = "messageTasks")
public class MessageTaskEntity extends MessageTask {
@Id
@Override
public String getMessageId() {
return super.getMessageId();
}
public MessageTaskEntity(MessageTask task) {
super(
task.getMessageId(),
task.getChatId(),
task.getMessage(),
task.getSystemMessage(),
task.getGradeComment(),
task.getGrade(),
task.getCreatedTime(),
task.getResponseTime(),
task.getModelId(),
task.getResult(),
task.getImageTaskId(),
task.getPromptIds(),
task.getTemperature(),
task.getWorkflow(),
task.getContextJson(),
task.getLanguage(),
task.getVariables(),
task.isJsonRequest(),
task.isJsonResponse(),
task.getResponseFormat(),
task.getWorkflowStopEvent(),
task.getLogprobs(),
task.getTopLogprobs(),
task.getLogProbs(),
task.getPurpose(),
task.getImageBase64(),
task.getImageMimeType()
);
}
public static MessageTaskEntity fromMessageTask(MessageTask task) {
if (task == null) {
return null;
}
if (task instanceof MessageTaskEntity) {
return (MessageTaskEntity) task;
}
return new MessageTaskEntity(task);
}
public static MessageTask toMessageTask(MessageTaskEntity entity) {
if (entity == null) {
return null;
}
return MessageTask.builder()
.messageId(entity.getMessageId())
.chatId(entity.getChatId())
.message(entity.getMessage())
.systemMessage(entity.getSystemMessage())
.gradeComment(entity.getGradeComment())
.grade(entity.getGrade())
.createdTime(entity.getCreatedTime())
.responseTime(entity.getResponseTime())
.modelId(entity.getModelId())
.result(entity.getResult())
.imageTaskId(entity.getImageTaskId())
.promptIds(entity.getPromptIds())
.temperature(entity.getTemperature())
.workflow(entity.getWorkflow())
.context(entity.getContextJson())
.language(entity.getLanguage())
.variables(entity.getVariables())
.jsonRequest(entity.isJsonRequest())
.jsonResponse(entity.isJsonResponse())
.responseFormat(entity.getResponseFormat())
.workflowStopEvent(entity.getWorkflowStopEvent())
.logprobs(entity.getLogprobs())
.topLogprobs(entity.getTopLogprobs())
.logProbs(entity.getLogProbs())
.purpose(entity.getPurpose())
.imageBase64(entity.getImageBase64())
.imageMimeType(entity.getImageMimeType())
.build();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/domain/ModelRequestTrace.java
|
package ai.driftkit.workflows.spring.domain;
import ai.driftkit.common.domain.client.ModelImageResponse;
import ai.driftkit.common.domain.client.ModelTextResponse;
import ai.driftkit.common.domain.ModelTrace;
import ai.driftkit.common.utils.AIUtils;
import ai.driftkit.context.core.util.PromptUtils;
import lombok.*;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.mapping.Document;
import java.util.Map;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Document(collection = "model_request_traces")
public class ModelRequestTrace {
@Id
private String id;
private String contextId;
private ContextType contextType;
private RequestType requestType;
private long timestamp;
private String promptTemplate;
private String promptId;
private Map<String, String> variables;
private String modelId;
private String responseId;
private String response;
private String errorMessage;
private ModelTrace trace;
private WorkflowInfo workflowInfo;
private String purpose;
private String chatId;
@SneakyThrows
public static ModelRequestTrace fromTextResponse(
String contextId,
ContextType contextType,
RequestType requestType,
String promptTemplate,
String promptId,
Map<String, Object> variables,
String modelId,
ModelTextResponse response,
WorkflowInfo workflowInfo,
String purpose,
String chatId) {
ModelRequestTrace trace = ModelRequestTrace.builder()
.contextId(contextId)
.contextType(contextType)
.requestType(requestType)
.timestamp(System.currentTimeMillis())
.promptTemplate(promptTemplate)
.promptId(promptId)
.variables(PromptUtils.convertVariables(variables))
.modelId(modelId)
.workflowInfo(workflowInfo)
.purpose(purpose)
.chatId(chatId)
.build();
if (response != null) {
trace.setResponseId(response.getId());
trace.setResponse(response.getResponse());
if (response.getTrace() != null) {
trace.setTrace(response.getTrace());
if (response.getTrace().isHasError()) {
trace.setErrorMessage(response.getTrace().getErrorMessage());
}
}
}
return trace;
}
public static ModelRequestTrace fromImageResponse(
String contextId,
ContextType contextType,
String promptTemplate,
String promptId,
Map<String, Object> variables,
String modelId,
ModelImageResponse response,
WorkflowInfo workflowInfo,
String purpose,
String chatId) {
ModelRequestTrace trace = ModelRequestTrace.builder()
.contextId(contextId)
.contextType(contextType)
.requestType(RequestType.TEXT_TO_IMAGE)
.timestamp(System.currentTimeMillis())
.promptTemplate(promptTemplate)
.promptId(promptId)
.variables(PromptUtils.convertVariables(variables))
.modelId(modelId)
.workflowInfo(workflowInfo)
.purpose(purpose)
.chatId(chatId)
.build();
if (response != null) {
if (response.getTrace() != null) {
trace.setTrace(response.getTrace());
if (response.getTrace().isHasError()) {
trace.setErrorMessage(response.getTrace().getErrorMessage());
}
}
}
return trace;
}
public enum RequestType {
TEXT_TO_TEXT,
TEXT_TO_IMAGE,
IMAGE_TO_TEXT
}
public enum ContextType {
WORKFLOW,
MESSAGE_TASK,
IMAGE_TASK,
AGENT,
CUSTOM
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class WorkflowInfo {
private String workflowId;
private String workflowType;
private String workflowStep;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/repository/ChatRepository.java
|
package ai.driftkit.workflows.spring.repository;
import ai.driftkit.workflows.spring.domain.ChatEntity;
import org.springframework.data.domain.Pageable;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
public interface ChatRepository extends MongoRepository<ChatEntity, String> {
List<ChatEntity> findChatsByHiddenIsFalse();
List<ChatEntity> findChatsByHiddenIsFalse(Pageable pageable);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/repository/ChecklistItemRepository.java
|
package ai.driftkit.workflows.spring.repository;
import ai.driftkit.workflows.spring.domain.ChecklistItemEntity;
import org.springframework.data.domain.Pageable;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.data.mongodb.repository.Query;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
public interface ChecklistItemRepository extends MongoRepository<ChecklistItemEntity, String> {
/**
* Find all checklist items by promptId with pagination
*/
List<ChecklistItemEntity> findByPromptId(String promptId, Pageable pageable);
/**
* Find all checklist items by query with pagination
*/
List<ChecklistItemEntity> findByQuery(String query, Pageable pageable);
/**
* Find all checklist items by query containing with pagination
*/
List<ChecklistItemEntity> findByQueryContaining(String queryPart, Pageable pageable);
/**
* Find all checklist items by promptId or query with pagination
*/
List<ChecklistItemEntity> findByPromptIdOrQuery(String promptId, String query, Pageable pageable);
/**
* Find checklist items by workflow type with pagination
*/
List<ChecklistItemEntity> findByWorkflowType(String workflowType, Pageable pageable);
/**
* Find checklist items by severity with pagination
*/
List<ChecklistItemEntity> findBySeverity(String severity, Pageable pageable);
/**
* Find checklist items by description containing with pagination
*/
List<ChecklistItemEntity> findByDescriptionContaining(String description, Pageable pageable);
/**
* Find checklist items by normalized description with pagination
*/
List<ChecklistItemEntity> findByNormalizedDescription(String normalizedDescription, Pageable pageable);
/**
* Find checklist items by promptId and workflowType with pagination
*/
List<ChecklistItemEntity> findByPromptIdAndWorkflowType(String promptId, String workflowType, Pageable pageable);
/**
* Find checklist items that have not been marked as similar to others with pagination
*/
List<ChecklistItemEntity> findBySimilarToIdIsNull(Pageable pageable);
/**
* Find checklist items that have been marked as similar to others with pagination
*/
List<ChecklistItemEntity> findBySimilarToIdIsNotNull(Pageable pageable);
/**
* Find checklist items that reference a specific item as similar
*/
List<ChecklistItemEntity> findBySimilarToId(String similarToId, Pageable pageable);
/**
* Find all unique non-null promptIds
*/
@Query(value = "{ 'promptId': { $ne: null } }", fields = "{ '_id': 0, 'promptId': 1 }")
List<String> findDistinctPromptIds();
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/repository/ImageTaskRepository.java
|
package ai.driftkit.workflows.spring.repository;
import ai.driftkit.workflows.spring.domain.ImageMessageTaskEntity;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
public interface ImageTaskRepository extends MongoRepository<ImageMessageTaskEntity, String> {
Page<ImageMessageTaskEntity> findByChatId(String chatId, Pageable pageable);
List<ImageMessageTaskEntity> findAllByMessageIdIn(List<String> messageIds);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/repository/MessageTaskRepositoryV1.java
|
package ai.driftkit.workflows.spring.repository;
import ai.driftkit.workflows.spring.domain.MessageTaskEntity;
import org.springframework.context.annotation.Primary;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.data.mongodb.repository.Query;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
@Primary
public interface MessageTaskRepositoryV1 extends MongoRepository<MessageTaskEntity, String> {
@Query("{ 'checkerResponse.fixes': { $exists: true, $not: { $size: 0 } } }")
List<MessageTaskEntity> findMessageTasksWithFixes(Pageable pageable);
Page<MessageTaskEntity> findByChatId(String chatId, Pageable pageable);
List<MessageTaskEntity> findAllByMessageIdIn(List<String> messageIds);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/repository/ModelRequestTraceRepository.java
|
package ai.driftkit.workflows.spring.repository;
import ai.driftkit.workflows.spring.domain.ModelRequestTrace;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
public interface ModelRequestTraceRepository extends MongoRepository<ModelRequestTrace, String> {
List<ModelRequestTrace> findByContextId(String contextId);
List<ModelRequestTrace> findByWorkflowInfoWorkflowId(String workflowId);
Page<ModelRequestTrace> findByModelId(String modelId, Pageable pageable);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/service/AIService.java
|
package ai.driftkit.workflows.spring.service;
import ai.driftkit.clients.core.ModelClientFactory;
import ai.driftkit.clients.openai.client.OpenAIModelClient;
import ai.driftkit.common.domain.Language;
import ai.driftkit.common.domain.MessageTask;
import ai.driftkit.common.domain.client.*;
import ai.driftkit.common.utils.JsonUtils;
import ai.driftkit.config.EtlConfig;
import ai.driftkit.config.EtlConfig.VaultConfig;
import ai.driftkit.context.core.util.PromptUtils;
import ai.driftkit.workflows.core.domain.LLMRequestEvent;
import ai.driftkit.workflows.core.domain.StopEvent;
import ai.driftkit.workflows.core.domain.WorkflowContext;
import ai.driftkit.workflows.core.service.WorkflowRegistry;
import ai.driftkit.workflows.spring.domain.MessageTaskEntity;
import ai.driftkit.workflows.spring.repository.MessageTaskRepositoryV1;
import jakarta.annotation.PostConstruct;
import lombok.*;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.Future;
@Slf4j
@Service
public class AIService {
public static final String IMAGE_TASK_MARKER = "image:";
public static final String SYSTEM_PROMPT = "Please respond to the user request [%s], think step by step. " +
"Your response MUST be in [%s] language. " +
"Put your thoughts before the response in the <thoughts /> tag.";
public static final String THOUGHTS_START = "<thoughts>";
public static final String THOUGHTS_END = "</thoughts>";
public static final String THOUGHTS = "<thoughts />";
@Autowired
private ImageModelService imageService;
@Autowired
private EtlConfig config;
@Autowired
private MessageTaskRepositoryV1 messageTaskRepository;
@Getter
@Autowired
private ModelRequestService modelRequestService;
@Getter
private ModelClient modelClient;
private VaultConfig modelConfig;
@PostConstruct
public void init() {
this.modelConfig = config.getVault().get(0);
this.modelClient = ModelClientFactory.fromConfig(modelConfig);
}
@SneakyThrows
public MessageTask chat(MessageTask task) {
String message = task.getMessage();
if (message.startsWith(IMAGE_TASK_MARKER)) {
String[] marker2number = message.substring(0, IMAGE_TASK_MARKER.length() + 1).split(":");
int images = 1;
if (marker2number.length == 2 && StringUtils.isNumeric(marker2number[1])) {
images = Integer.parseInt(marker2number[1]);
}
String msg = PromptUtils.applyVariables(task.getMessage(), task.getVariables());
String query = msg.substring(IMAGE_TASK_MARKER.length() + 1).trim();
return imageService.generateImage(task, query, images);
}
String result;
try {
if (task.isJsonRequest() && JsonUtils.isJSON(message)) {
message = JsonUtils.fixIncompleteJSON(message);
}
// Use GENERAL as default language if not specified
String languageStr = Optional.ofNullable(task.getLanguage()).orElse(Language.GENERAL).name();
String workflowId = task.getWorkflow();
// Check if the workflow is registered
if (workflowId != null && WorkflowRegistry.hasWorkflow(workflowId)) {
WorkflowContext workflowContext = new WorkflowContext(task);
// For all workflows, use LLMRequestEvent
StopEvent<?> stopEvent = WorkflowRegistry.executeWorkflow(
workflowId,
new LLMRequestEvent(task),
workflowContext
);
result = stopEvent.getResult();
task.setWorkflowStopEvent(JsonUtils.toJson(stopEvent));
task.setModelId(workflowId);
} else {
// Default behavior with direct model call
String systemMsg = SYSTEM_PROMPT.formatted(
message,
languageStr
);
String msg = systemMsg.replace("[", "\"").replace("]", "\"");
msg = PromptUtils.applyVariables(msg, task.getVariables());
List<ModelImageResponse.ModelContentMessage> messages = new ArrayList<>();
messages.add(ModelImageResponse.ModelContentMessage.create(Role.user, msg));
if (StringUtils.isNotBlank(task.getSystemMessage())) {
messages.add(ModelImageResponse.ModelContentMessage.create(Role.system, task.getSystemMessage()));
}
String model = Optional.ofNullable(task.getModelId()).orElse(modelConfig.getModel());
if (StringUtils.isBlank(model)) {
model = OpenAIModelClient.GPT_DEFAULT;
}
boolean isImageToText = task.getImageBase64() != null && !task.getImageBase64().isEmpty() && StringUtils.isNotBlank(task.getImageMimeType());
ModelRequestContext.ModelRequestContextBuilder contextBuilder = ModelRequestContext.builder()
.contextId(task.getMessageId())
.promptText(msg)
.messageTask(task)
.contextMessages(messages)
.temperature(modelConfig.getTemperature())
.model(model)
.chatId(task.getChatId());
if (isImageToText) {
List<ModelImageResponse.ModelContentMessage.ModelContentElement.ImageData> images = new ArrayList<>();
for (String base64Image : task.getImageBase64()) {
byte[] imageBytes = java.util.Base64.getDecoder().decode(base64Image);
images.add(new ModelImageResponse.ModelContentMessage.ModelContentElement.ImageData(
imageBytes,
task.getImageMimeType()
));
}
contextBuilder.imageData(images);
}
ModelRequestContext requestContext = contextBuilder.build();
ModelTextResponse response;
if (isImageToText) {
response = modelRequestService.imageToText(modelClient, requestContext);
} else {
response = modelRequestService.textToText(modelClient, requestContext);
}
result = response.getResponse();
task.updateWithResponseLogprobs(response);
task.setModelId(model);
result = result.replace(THOUGHTS, "");
if (result.contains(THOUGHTS_START)) {
String thoughts = getThoughts(result);
result = getResultWoThoughts(result);
task.setContextJson(thoughts);
task.setResult(result);
}
}
} catch (Exception e) {
log.error("Model issue [{}]".formatted(task.getMessageId()), e);
throw e;
}
// Determine if we should attempt JSON fixing
boolean shouldFixJson = false;
if (task.getResponseFormat() != null) {
// If responseFormat is specified, only fix JSON for JSON response types
ResponseFormat.ResponseType responseType = task.getResponseFormat().getType();
shouldFixJson = responseType == ResponseFormat.ResponseType.JSON_OBJECT ||
responseType == ResponseFormat.ResponseType.JSON_SCHEMA;
} else {
// Fall back to the old behavior if no responseFormat is specified
shouldFixJson = task.isJsonResponse() || JsonUtils.isJSON(result);
}
if (shouldFixJson && result != null) {
result = JsonUtils.fixIncompleteJSON(result);
}
task.setResponseTime(System.currentTimeMillis());
task.setResult(result);
MessageTaskEntity entity = MessageTaskEntity.fromMessageTask(task);
messageTaskRepository.save(entity);
log.info("[llm] Result for message with id [{}] in chat [{}]: [{}]", task.getMessageId(), task.getChatId(), result);
return task;
}
@org.jetbrains.annotations.NotNull
public static String getResultWoThoughts(String result) {
return result.substring(result.lastIndexOf(THOUGHTS_END) + THOUGHTS_END.length());
}
@org.jetbrains.annotations.NotNull
public static String getThoughts(String result) {
return result.substring(result.indexOf(THOUGHTS_START) + THOUGHTS_START.length(), result.lastIndexOf(THOUGHTS_END));
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class LLMTaskFuture {
private String messageId;
private Future<MessageTask> future;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class Assistant {
String modelId;
AssistantBase instance;
}
public interface AssistantBase {
String chat(String taskId, String message);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/service/AnalyticsService.java
|
package ai.driftkit.workflows.spring.service;
import ai.driftkit.common.domain.Language;
import ai.driftkit.common.domain.MessageTask;
import ai.driftkit.common.domain.Prompt;
import ai.driftkit.context.core.service.PromptService;
import ai.driftkit.workflows.spring.domain.MessageTaskEntity;
import ai.driftkit.workflows.spring.domain.ModelRequestTrace;
import ai.driftkit.workflows.spring.repository.MessageTaskRepositoryV1;
import ai.driftkit.workflows.spring.repository.ModelRequestTraceRepository;
import com.fasterxml.jackson.annotation.JsonInclude;
import jakarta.annotation.PostConstruct;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.apache.commons.lang3.StringUtils;
import org.jetbrains.annotations.NotNull;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.*;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.aggregation.Aggregation;
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import java.text.SimpleDateFormat;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
@Service
public class AnalyticsService {
private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd");
@Autowired
private ModelRequestTraceRepository modelRequestTraceRepository;
@Autowired
private MessageTaskRepositoryV1 messageTaskRepository;
@Autowired
private MongoTemplate mongoTemplate;
@Autowired
private PromptService promptService;
private Map<String, DailyMetricsResponse> dailyMetricsCache;
@PostConstruct
public void init() {
this.dailyMetricsCache = new ConcurrentHashMap<>();
}
@Scheduled(fixedRate = 8 * 60000)
public void dailyMetrics() {
getDailyMetrics(LocalDate.now().minusDays(1), LocalDate.now());
}
/**
* Get model request traces within a time range
* @param startTime Start of time range
* @param endTime End of time range
* @param promptId Optional prompt method filter
* @param excludePurpose Optional comma-separated list of purpose keywords to exclude
* @param page Page number
* @param size Page size
* @return Page of traces
*/
public Page<ModelRequestTrace> getTraces(LocalDateTime startTime, LocalDateTime endTime, String promptId, String excludePurpose, int page, int size) {
Pageable pageable = PageRequest.of(page, size, Sort.by("timestamp").descending());
// Default to today if not specified
Criteria criteria = getDatesCriteria(startTime, endTime);
// Add promptId filter if specified
if (StringUtils.isNotBlank(promptId)) {
promptId = getPromptIdByMethod(promptId);
criteria = criteria.and("promptId").is(promptId);
}
// Add purpose exclusion filter if specified
if (StringUtils.isNotBlank(excludePurpose)) {
List<String> purposesToExclude = Arrays.asList(excludePurpose.split(","));
// Create criteria that matches documents where either:
// 1. purpose field doesn't exist, OR
// 2. purpose field exists but doesn't match any of the excluded values
Criteria purposeCriteria = new Criteria().orOperator(
Criteria.where("purpose").exists(false),
Criteria.where("purpose").not().regex(String.join("|", purposesToExclude), "i")
);
criteria = criteria.andOperator(purposeCriteria);
}
List<ModelRequestTrace> traces = getTraces(criteria, pageable);
// Count total for pagination
long total = mongoTemplate.count(
Query.query(criteria),
ModelRequestTrace.class);
return new PageImpl<>(
traces, pageable, total);
}
@NotNull
private List<ModelRequestTrace> getTraces(Criteria criteria, Pageable pageable) {
Aggregation agg = pageable == null ? Aggregation.newAggregation(
Aggregation.match(criteria),
Aggregation.sort(Sort.by("timestamp").descending())
) : Aggregation.newAggregation(
Aggregation.match(criteria),
Aggregation.sort(Sort.by("timestamp").descending()),
Aggregation.skip((long) pageable.getPageNumber() * pageable.getPageSize()),
Aggregation.limit(pageable.getPageSize())
);
AggregationResults<ModelRequestTrace> results = mongoTemplate.aggregate(
agg, "model_request_traces", ModelRequestTrace.class);
List<ModelRequestTrace> traces = results.getMappedResults();
return traces;
}
@NotNull
private static Criteria getDatesCriteria(LocalDateTime startTime, LocalDateTime endTime) {
if (startTime == null) {
startTime = LocalDateTime.now();
}
if (endTime == null) {
endTime = LocalDateTime.now();
}
// Convert dates to timestamps
long startTimestamp = startTime.atZone(ZoneId.systemDefault()).toInstant().toEpochMilli() - TimeUnit.DAYS.toMillis(1);
long endTimestamp = endTime.plusDays(1).atZone(ZoneId.systemDefault()).toInstant().toEpochMilli() - 1;
// Use MongoDB Criteria to find traces within time range
Criteria criteria = Criteria.where("timestamp").gte(startTimestamp).lte(endTimestamp);
return criteria;
}
/**
* Get traces by context ID
*/
public List<ModelRequestTrace> getTracesByContextId(String contextId) {
List<ModelRequestTrace> traces = modelRequestTraceRepository.findByContextId(contextId);
traces.sort(Comparator.comparing(ModelRequestTrace::getTimestamp));
return traces;
}
/**
* Get available prompt methods for analytics
*/
public List<String> getAvailablePromptMethods() {
List<Prompt> prompts = promptService.getPrompts();
return prompts.stream()
.map(Prompt::getMethod)
.distinct()
.sorted()
.collect(Collectors.toList());
}
/**
* Get message tasks by context IDs
*
* @param contextIds List of context IDs (same as messageIds)
* @return List of TaskVariables objects containing task data
*/
public List<TaskVariables> getMessageTasksByContextIds(List<String> contextIds) {
List<MessageTaskEntity> entities = messageTaskRepository.findAllByMessageIdIn(contextIds);
List<MessageTask> tasks = entities.stream()
.map(MessageTaskEntity::toMessageTask)
.collect(Collectors.toList());
return tasks.stream()
.map(task -> TaskVariables.builder()
.messageId(task.getMessageId())
.contextId(task.getMessageId()) // Same as messageId for use in the UI
.message(task.getMessage())
.result(task.getResult())
.modelId(task.getModelId())
.variables(task.getVariables())
.createdTime(task.getCreatedTime())
.responseTime(task.getResponseTime())
.promptIds(task.getPromptIds())
.build())
.collect(Collectors.toList());
}
/**
* Get daily metrics for the dashboard based on MessageTask and promptIds.
*
* @param startDate Start date for the metrics range
* @param endDate End date for the metrics range
* @return DailyMetricsResponse object containing all metrics
*/
public DailyMetricsResponse getDailyMetrics(LocalDate startDate, LocalDate endDate) {
// Default to today if not specified
if (startDate == null) {
startDate = LocalDate.now();
}
if (endDate == null) {
endDate = LocalDate.now();
}
String dailyCacheKey = startDate + " " + endDate;
DailyMetricsResponse dailyMetricsResponse = dailyMetricsCache.get(dailyCacheKey);
if (dailyMetricsResponse != null && System.currentTimeMillis() - dailyMetricsResponse.getTimestamp() < 10 * 60 * 1000) {
return dailyMetricsResponse;
}
// Convert dates to timestamps
long startTimestamp = startDate.atStartOfDay().atZone(ZoneId.systemDefault()).toInstant().toEpochMilli();
long endTimestamp = endDate.plusDays(1).atStartOfDay().atZone(ZoneId.systemDefault()).toInstant().toEpochMilli() - 1;
// Query message tasks
Criteria taskCriteria = Criteria.where("createdTime").gte(startTimestamp).lte(endTimestamp);
Query taskQuery = Query.query(taskCriteria);
List<MessageTaskEntity> entities = mongoTemplate.find(taskQuery, MessageTaskEntity.class);
List<MessageTask> tasks = entities.stream()
.map(MessageTaskEntity::toMessageTask)
.collect(Collectors.toList());
// Query model request traces for token usage data
// The contextId in trace equals messageId in MessageTask
Criteria traceCriteria = Criteria.where("timestamp").gte(startTimestamp).lte(endTimestamp);
Query traceQuery = Query.query(traceCriteria);
List<ModelRequestTrace> traces = mongoTemplate.find(traceQuery, ModelRequestTrace.class);
// Create a map of messageId to traces for easy lookup
Map<String, List<ModelRequestTrace>> tracesByMessageId = traces.stream()
.collect(Collectors.groupingBy(ModelRequestTrace::getContextId));
// Get all prompt IDs from tasks to load their corresponding Prompt records
List<String> allPromptIds = tasks.stream()
.filter(t -> t.getPromptIds() != null && !t.getPromptIds().isEmpty())
.flatMap(t -> t.getPromptIds().stream())
.distinct()
.collect(Collectors.toList());
// Load all Prompt objects needed for the method field
List<Prompt> prompts = promptService.getPromptsByIds(allPromptIds);
// Create a map of promptId -> method for conversion
Map<String, String> promptIdToMethodMap = prompts.stream()
.collect(Collectors.toMap(Prompt::getId, Prompt::getMethod, (m1, m2) -> m1));
// Calculate overall metrics
Map<String, Object> metrics = new HashMap<>();
// --- Overall metrics ---
// Request counts
metrics.put("totalTasks", tasks.size());
// Token usage from traces
int totalPromptTokens = traces.stream()
.filter(t -> t.getTrace() != null)
.mapToInt(t -> t.getTrace().getPromptTokens())
.sum();
int totalCompletionTokens = traces.stream()
.filter(t -> t.getTrace() != null)
.mapToInt(t -> t.getTrace().getCompletionTokens())
.sum();
metrics.put("totalPromptTokens", totalPromptTokens);
metrics.put("totalCompletionTokens", totalCompletionTokens);
// --- Latency metrics ---
// Overall latency calculated from tasks
List<Long> latencies = tasks.stream()
.filter(t -> t.getResponseTime() > 0 && t.getCreatedTime() > 0)
.map(t -> t.getResponseTime() - t.getCreatedTime())
.sorted()
.collect(Collectors.toList());
metrics.put("latencyPercentiles", calculatePercentiles(latencies));
// --- Group metrics ---
// By Model
Map<String, Long> tasksByModel = tasks.stream()
.filter(t -> t.getModelId() != null && !t.getModelId().isEmpty())
.collect(Collectors.groupingBy(MessageTask::getModelId, Collectors.counting()));
metrics.put("tasksByModel", tasksByModel);
// Extract all promptIds from tasks, map them to method names, and count occurrences
Map<String, Long> tasksByPromptMethod = new HashMap<>();
// Iterate through tasks to count by prompt method
tasks.stream()
.filter(t -> t.getPromptIds() != null && !t.getPromptIds().isEmpty())
.forEach(task -> {
for (String promptId : task.getPromptIds()) {
// Convert promptId to method name (if available)
String method = promptIdToMethodMap.getOrDefault(promptId, promptId);
tasksByPromptMethod.merge(method, 1L, Long::sum);
}
});
metrics.put("tasksByPromptMethod", tasksByPromptMethod);
// --- Detailed metrics with group breakdowns ---
// 1. Token usage by prompt method
Map<String, Map<String, Integer>> tokensByPromptMethod = new HashMap<>();
tokensByPromptMethod.put("promptTokens", new HashMap<>());
tokensByPromptMethod.put("completionTokens", new HashMap<>());
// Process tasks and their associated traces to get token usage by prompt method
tasks.forEach(task -> {
if (task.getPromptIds() != null && !task.getPromptIds().isEmpty()) {
// Get traces associated with this task via messageId = contextId
List<ModelRequestTrace> taskTraces = tracesByMessageId.getOrDefault(task.getMessageId(), Collections.emptyList());
// Calculate total tokens for this task
int taskPromptTokens = taskTraces.stream()
.filter(t -> t.getTrace() != null)
.mapToInt(t -> t.getTrace().getPromptTokens())
.sum();
int taskCompletionTokens = taskTraces.stream()
.filter(t -> t.getTrace() != null)
.mapToInt(t -> t.getTrace().getCompletionTokens())
.sum();
// Distribute tokens equally among prompt methods for this task
if (!task.getPromptIds().isEmpty() && (taskPromptTokens > 0 || taskCompletionTokens > 0)) {
for (String promptId : task.getPromptIds()) {
// Convert promptId to method name
String method = promptIdToMethodMap.getOrDefault(promptId, promptId);
// Add proportional prompt tokens (divided by number of prompts in the task)
if (taskPromptTokens > 0) {
int promptTokenPerMethod = taskPromptTokens / task.getPromptIds().size();
tokensByPromptMethod.get("promptTokens").merge(method, promptTokenPerMethod, Integer::sum);
}
// Add proportional completion tokens
if (taskCompletionTokens > 0) {
int completionTokenPerMethod = taskCompletionTokens / task.getPromptIds().size();
tokensByPromptMethod.get("completionTokens").merge(method, completionTokenPerMethod, Integer::sum);
}
}
}
}
});
metrics.put("tokensByPromptMethod", tokensByPromptMethod);
// 2. Token usage by promptMethod+model
Map<String, Map<String, Integer>> tokensByPromptMethodModel = new HashMap<>();
tokensByPromptMethodModel.put("promptTokens", new HashMap<>());
tokensByPromptMethodModel.put("completionTokens", new HashMap<>());
// Process tasks for token usage by prompt method and model
tasks.forEach(task -> {
if (task.getPromptIds() != null && !task.getPromptIds().isEmpty() &&
task.getModelId() != null && !task.getModelId().isEmpty()) {
// Get traces associated with this task
List<ModelRequestTrace> taskTraces = tracesByMessageId.getOrDefault(task.getMessageId(), Collections.emptyList());
// Calculate total tokens for this task
int taskPromptTokens = taskTraces.stream()
.filter(t -> t.getTrace() != null)
.mapToInt(t -> t.getTrace().getPromptTokens())
.sum();
int taskCompletionTokens = taskTraces.stream()
.filter(t -> t.getTrace() != null)
.mapToInt(t -> t.getTrace().getCompletionTokens())
.sum();
// Add tokens for each prompt method + model combination
if (taskPromptTokens > 0 || taskCompletionTokens > 0) {
for (String promptId : task.getPromptIds()) {
// Convert promptId to method name
String method = promptIdToMethodMap.getOrDefault(promptId, promptId);
String statKey = method + ":" + task.getModelId();
// Add proportional tokens
if (taskPromptTokens > 0) {
int promptTokenPerMethod = taskPromptTokens / task.getPromptIds().size();
tokensByPromptMethodModel.get("promptTokens").merge(statKey, promptTokenPerMethod, Integer::sum);
}
if (taskCompletionTokens > 0) {
int completionTokenPerMethod = taskCompletionTokens / task.getPromptIds().size();
tokensByPromptMethodModel.get("completionTokens").merge(statKey, completionTokenPerMethod, Integer::sum);
}
}
}
}
});
metrics.put("tokensByPromptMethodModel", tokensByPromptMethodModel);
// 3. Latency by prompt method
Map<String, Map<String, Long>> latencyByPromptMethod = new HashMap<>();
// Group tasks by prompt method
Map<String, List<MessageTask>> tasksByPromptMethodGroup = new HashMap<>();
tasks.forEach(task -> {
if (task.getPromptIds() != null && !task.getPromptIds().isEmpty()) {
for (String promptId : task.getPromptIds()) {
// Convert promptId to method
String method = promptIdToMethodMap.getOrDefault(promptId, promptId);
// Add task to the method's list
if (!tasksByPromptMethodGroup.containsKey(method)) {
tasksByPromptMethodGroup.put(method, new ArrayList<>());
}
tasksByPromptMethodGroup.get(method).add(task);
}
}
});
// Calculate latency percentiles for each prompt method
tasksByPromptMethodGroup.forEach((method, methodTasks) -> {
List<Long> methodLatencies = methodTasks.stream()
.filter(t -> t.getResponseTime() > 0 && t.getCreatedTime() > 0)
.map(t -> t.getResponseTime() - t.getCreatedTime())
.sorted()
.collect(Collectors.toList());
Map<String, Long> percentiles = calculatePercentiles(methodLatencies);
if (!percentiles.isEmpty()) {
latencyByPromptMethod.put(method, percentiles);
}
});
metrics.put("latencyByPromptMethod", latencyByPromptMethod);
// 3.1. Success/Error counts by prompt method, grouped by contextId
Map<String, Long> successByPromptMethod = new HashMap<>();
Map<String, Long> errorsByPromptMethod = new HashMap<>();
// First group traces by contextId to count each context as a single unit
Map<String, List<ModelRequestTrace>> allTracesByContextId = traces.stream()
.filter(t -> t.getContextId() != null && !t.getContextId().isEmpty())
.collect(Collectors.groupingBy(ModelRequestTrace::getContextId));
// Group context-based traces by prompt method
Map<String, Map<String, List<ModelRequestTrace>>> contextsByPromptMethod = new HashMap<>();
// For each context, create a mapping of prompt methods to traces
allTracesByContextId.forEach((contextId, contextTraces) -> {
contextTraces.forEach(trace -> {
if (trace.getPromptId() != null && trace.getTrace() != null) {
// Convert promptId to method
String method = promptIdToMethodMap.getOrDefault(trace.getPromptId(), trace.getPromptId());
// Initialize nested map if needed
if (!contextsByPromptMethod.containsKey(method)) {
contextsByPromptMethod.put(method, new HashMap<>());
}
// Add this trace to the mapping for this method and contextId
if (!contextsByPromptMethod.get(method).containsKey(contextId)) {
contextsByPromptMethod.get(method).put(contextId, new ArrayList<>());
}
contextsByPromptMethod.get(method).get(contextId).add(trace);
}
});
});
// Count success/error for each prompt method based on contexts
contextsByPromptMethod.forEach((method, contextMap) -> {
long methodSuccessCount = 0;
long methodErrorCount = 0;
// For each context, check if any trace has an error
for (List<ModelRequestTrace> contextTraces : contextMap.values()) {
boolean hasError = contextTraces.stream()
.anyMatch(t -> t.getTrace() != null && t.getTrace().isHasError());
if (hasError) {
methodErrorCount++;
} else {
methodSuccessCount++;
}
}
successByPromptMethod.put(method, methodSuccessCount);
errorsByPromptMethod.put(method, methodErrorCount);
});
// Calculate success rate for each prompt method
Map<String, Double> successRateByPromptMethod = new HashMap<>();
for (String method : contextsByPromptMethod.keySet()) {
long methodSuccess = successByPromptMethod.getOrDefault(method, 0L);
long methodError = errorsByPromptMethod.getOrDefault(method, 0L);
long total = methodSuccess + methodError;
double rate = total > 0 ? (double) methodSuccess / total : 0.0;
successRateByPromptMethod.put(method, rate);
}
// Add maps to metrics
metrics.put("successByPromptMethod", successByPromptMethod);
metrics.put("errorsByPromptMethod", errorsByPromptMethod);
metrics.put("successRateByPromptMethod", successRateByPromptMethod);
// Calculate overall success/error metrics based on contexts
long successCount = 0;
long errorCount = 0;
for (List<ModelRequestTrace> contextTraces : allTracesByContextId.values()) {
boolean hasError = contextTraces.stream()
.anyMatch(t -> t.getTrace() != null && t.getTrace().isHasError());
if (hasError) {
errorCount++;
} else {
successCount++;
}
}
metrics.put("successCount", successCount);
metrics.put("errorCount", errorCount);
// Add success rate for better metrics
double successRate = (successCount + errorCount) > 0 ?
(double) successCount / (successCount + errorCount) : 0;
metrics.put("successRate", successRate);
// 4. Latency by promptMethod+model
Map<String, Map<String, Long>> latencyByPromptMethodModel = new HashMap<>();
Map<String, List<MessageTask>> tasksByPromptMethodModelGroup = new HashMap<>();
// Process tasks to get latency by prompt method and model
tasks.forEach(task -> {
if (task.getPromptIds() != null && !task.getPromptIds().isEmpty() &&
task.getModelId() != null && !task.getModelId().isEmpty() &&
task.getResponseTime() > 0 && task.getCreatedTime() > 0) {
for (String promptId : task.getPromptIds()) {
// Convert promptId to method
String method = promptIdToMethodMap.getOrDefault(promptId, promptId);
String statKey = method + ":" + task.getModelId();
// Add task to the method+model combination
if (!tasksByPromptMethodModelGroup.containsKey(statKey)) {
tasksByPromptMethodModelGroup.put(statKey, new ArrayList<>());
}
tasksByPromptMethodModelGroup.get(statKey).add(task);
}
}
});
// Calculate latency percentiles for each prompt method+model combination
tasksByPromptMethodModelGroup.forEach((statKey, methodModelTasks) -> {
List<Long> methodModelLatencies = methodModelTasks.stream()
.filter(t -> t.getResponseTime() > 0 && t.getCreatedTime() > 0)
.map(t -> t.getResponseTime() - t.getCreatedTime())
.sorted()
.collect(Collectors.toList());
Map<String, Long> percentiles = calculatePercentiles(methodModelLatencies);
if (!percentiles.isEmpty()) {
latencyByPromptMethodModel.put(statKey, percentiles);
}
});
// Create TokensByCategory objects
TokensByCategory tokensByCategoryPromptMethod = TokensByCategory.builder()
.promptTokens(tokensByPromptMethod.get("promptTokens"))
.completionTokens(tokensByPromptMethod.get("completionTokens"))
.build();
TokensByCategory tokensByCategoryPromptMethodModel = TokensByCategory.builder()
.promptTokens(tokensByPromptMethodModel.get("promptTokens"))
.completionTokens(tokensByPromptMethodModel.get("completionTokens"))
.build();
// Convert latency percentiles maps to proper objects
Map<String, LatencyPercentiles> latencyPercentilesByPromptMethod = new HashMap<>();
latencyByPromptMethod.forEach((method, percentileMap) -> {
latencyPercentilesByPromptMethod.put(method, LatencyPercentiles.fromMap(percentileMap));
});
Map<String, LatencyPercentiles> latencyPercentilesByPromptMethodModel = new HashMap<>();
latencyByPromptMethodModel.forEach((method, percentileMap) -> {
latencyPercentilesByPromptMethodModel.put(method, LatencyPercentiles.fromMap(percentileMap));
});
// Build and return a properly structured object
DailyMetricsResponse response = DailyMetricsResponse.builder()
.totalTasks(tasks.size())
.totalPromptTokens(totalPromptTokens)
.totalCompletionTokens(totalCompletionTokens)
.latencyPercentiles(LatencyPercentiles.fromMap(calculatePercentiles(latencies)))
.tasksByModel(tasksByModel)
.tasksByPromptMethod(tasksByPromptMethod)
.tokensByPromptMethod(tokensByCategoryPromptMethod)
.tokensByPromptMethodModel(tokensByCategoryPromptMethodModel)
.latencyByPromptMethod(latencyPercentilesByPromptMethod)
.successByPromptMethod(successByPromptMethod)
.errorsByPromptMethod(errorsByPromptMethod)
.successRateByPromptMethod(successRateByPromptMethod)
.successCount(successCount)
.errorCount(errorCount)
.successRate(successRate)
.latencyByPromptMethodModel(latencyPercentilesByPromptMethodModel)
.timestamp(System.currentTimeMillis())
.build();
this.dailyMetricsCache.put(dailyCacheKey, response);
return response;
}
/**
* Get metrics for a specific prompt method
* @param startTime Start of time range
* @param endTime End of time range
* @param promptId The prompt method to get metrics for
* @return PromptMetricsResponse containing all metrics
*/
public PromptMetricsResponse getPromptMetrics(LocalDateTime startTime, LocalDateTime endTime, String promptId) {
// Default to the last 24 hours if not specified
Criteria criteria = getDatesCriteria(startTime, endTime);
// Only add promptId filter if specified
if (StringUtils.isNotBlank(promptId)) {
promptId = getPromptIdByMethod(promptId);
criteria = criteria.and("promptId").is(promptId);
}
List<ModelRequestTrace> traces = getTraces(criteria, null);
// Basic counts
long totalTraces = traces.size();
// Token usage
int totalPromptTokens = traces.stream()
.filter(t -> t.getTrace() != null)
.mapToInt(t -> t.getTrace().getPromptTokens())
.sum();
int totalCompletionTokens = traces.stream()
.filter(t -> t.getTrace() != null)
.mapToInt(t -> t.getTrace().getCompletionTokens())
.sum();
// Latency metrics
List<Long> latencies = traces.stream()
.filter(t -> t.getTrace() != null)
.map(t -> t.getTrace().getExecutionTimeMs())
.sorted()
.collect(Collectors.toList());
// By Model breakdown
Map<String, Long> tracesByModel = traces.stream()
.filter(t -> t.getModelId() != null && !t.getModelId().isEmpty())
.collect(Collectors.groupingBy(ModelRequestTrace::getModelId, Collectors.counting()));
// Token usage by model
Map<String, Integer> promptTokensByModel = new HashMap<>();
Map<String, Integer> completionTokensByModel = new HashMap<>();
traces.stream()
.filter(t -> t.getModelId() != null && !t.getModelId().isEmpty() && t.getTrace() != null)
.forEach(t -> {
String model = t.getModelId();
// Add prompt tokens
promptTokensByModel.merge(model, t.getTrace().getPromptTokens(), Integer::sum);
// Add completion tokens
completionTokensByModel.merge(model, t.getTrace().getCompletionTokens(), Integer::sum);
});
// Create TokensByCategory for models
TokensByCategory tokensByModel = TokensByCategory.builder()
.promptTokens(promptTokensByModel)
.completionTokens(completionTokensByModel)
.build();
// Latency by model
Map<String, LatencyPercentiles> latencyPercentilesByModel = new HashMap<>();
// Group traces by model
Map<String, List<ModelRequestTrace>> tracesByModelGroup = traces.stream()
.filter(t -> t.getModelId() != null && !t.getModelId().isEmpty() && t.getTrace() != null)
.collect(Collectors.groupingBy(ModelRequestTrace::getModelId));
// Calculate percentiles for each model
tracesByModelGroup.forEach((model, modelTraces) -> {
List<Long> modelLatencies = modelTraces.stream()
.map(t -> t.getTrace().getExecutionTimeMs())
.sorted()
.collect(Collectors.toList());
Map<String, Long> percentiles = calculatePercentiles(modelLatencies);
if (!percentiles.isEmpty()) {
latencyPercentilesByModel.put(model, LatencyPercentiles.fromMap(percentiles));
}
});
// Group traces by contextId
Map<String, List<ModelRequestTrace>> tracesByContextId = traces.stream()
.filter(t -> t.getContextId() != null && !t.getContextId().isEmpty())
.collect(Collectors.groupingBy(ModelRequestTrace::getContextId));
// Total unique contexts
long uniqueContexts = tracesByContextId.size();
// Count contexts with errors vs. successful contexts
long errorCount = 0;
long successCount = 0;
for (List<ModelRequestTrace> contextTraces : tracesByContextId.values()) {
// Check if any trace in this context has an error
boolean hasError = contextTraces.stream()
.anyMatch(t -> t.getTrace() != null && t.getTrace().isHasError());
if (hasError) {
errorCount++;
} else {
successCount++;
}
}
// Success rate calculation
double successRate = uniqueContexts == 0 ? 0 : (double) successCount / uniqueContexts;
// Daily counts (useful for graph visualization)
Map<String, Long> dailyCounts = traces.stream()
.collect(Collectors.groupingBy(
t -> DATE_FORMAT.format(new Date(t.getTimestamp())),
Collectors.counting()));
// Build and return the response object
return PromptMetricsResponse.builder()
.totalTraces(uniqueContexts)
.totalPromptTokens(totalPromptTokens)
.totalCompletionTokens(totalCompletionTokens)
.totalTokens(totalPromptTokens + totalCompletionTokens)
.latencyPercentiles(LatencyPercentiles.fromMap(calculatePercentiles(latencies)))
.tracesByModel(tracesByModel)
.tokensByModel(tokensByModel)
.latencyByModel(latencyPercentilesByModel)
.successCount(successCount)
.errorCount(errorCount)
.successRate(successRate)
.dailyCounts(dailyCounts)
.build();
}
private String getPromptIdByMethod(String promptId) {
Optional<Prompt> prompt = promptService.getCurrentPrompt(promptId, Language.GENERAL);
promptId = prompt.map(Prompt::getId).orElse(promptId);
return promptId;
}
/**
* Calculate percentiles for a list of values
*/
private Map<String, Long> calculatePercentiles(List<Long> values) {
Map<String, Long> percentiles = new HashMap<>();
if (values.isEmpty()) {
return percentiles;
}
int size = values.size();
percentiles.put("p25", values.get(Math.max(0, (int)(size * 0.25) - 1)));
percentiles.put("p50", values.get(Math.max(0, (int)(size * 0.50) - 1)));
percentiles.put("p75", values.get(Math.max(0, (int)(size * 0.75) - 1)));
percentiles.put("p90", values.get(Math.max(0, (int)(size * 0.90) - 1)));
return percentiles;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class TaskVariables {
private String messageId;
private String contextId; // same as messageId for use in UI
private String message;
private String result;
private String modelId;
private long createdTime;
private long responseTime;
private List<String> promptIds;
private Map<String, Object> variables;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class LatencyPercentiles {
private Long p25;
private Long p50;
private Long p75;
private Long p90;
public static LatencyPercentiles fromMap(Map<String, Long> map) {
if (map == null) {
return null;
}
return LatencyPercentiles.builder()
.p25(map.get("p25"))
.p50(map.get("p50"))
.p75(map.get("p75"))
.p90(map.get("p90"))
.build();
}
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class TokensByCategory {
private Map<String, Integer> promptTokens;
private Map<String, Integer> completionTokens;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class DailyMetricsResponse {
private long totalTasks;
private int totalPromptTokens;
private int totalCompletionTokens;
private LatencyPercentiles latencyPercentiles;
private Map<String, Long> tasksByModel;
private Map<String, Long> tasksByPromptMethod;
private TokensByCategory tokensByPromptMethod;
private TokensByCategory tokensByPromptMethodModel;
private Map<String, LatencyPercentiles> latencyByPromptMethod;
private Map<String, Long> successByPromptMethod;
private Map<String, Long> errorsByPromptMethod;
private Map<String, Double> successRateByPromptMethod;
private long successCount;
private long errorCount;
private double successRate;
private long timestamp;
private Map<String, LatencyPercentiles> latencyByPromptMethodModel;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class PromptMetricsResponse {
private long totalTraces;
private int totalPromptTokens;
private int totalCompletionTokens;
private int totalTokens;
private LatencyPercentiles latencyPercentiles;
private Map<String, Long> tracesByModel;
private TokensByCategory tokensByModel;
private Map<String, LatencyPercentiles> latencyByModel;
private long successCount;
private long errorCount;
private double successRate;
private Map<String, Long> dailyCounts;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring
|
java-sources/ai/driftkit/driftkit-workflows-spring-boot-starter/0.8.1/ai/driftkit/workflows/spring/service/ChatService.java
|
package ai.driftkit.workflows.spring.service;
import ai.driftkit.common.domain.Chat;
import ai.driftkit.common.domain.ChatRequest;
import ai.driftkit.common.domain.Language;
import ai.driftkit.workflows.spring.domain.ChatEntity;
import ai.driftkit.workflows.spring.repository.ChatRepository;
import jakarta.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
@Service
public class ChatService {
public static final String SYSTEM_CHAT_ID = "system";
public static final String SYSTEM_ENGLISH_CHAT_ID = "system_eng";
@Autowired
private ChatRepository chatRepository;
@PostConstruct
public void init() {
Optional<ChatEntity> systemChat = chatRepository.findById(SYSTEM_CHAT_ID);
save(Chat.builder()
.chatId(SYSTEM_ENGLISH_CHAT_ID)
.language(Language.ENGLISH)
.createdTime(System.currentTimeMillis())
.systemMessage(null)
.name(SYSTEM_ENGLISH_CHAT_ID)
.hidden(true)
.build());
if (systemChat.isPresent()) {
return;
}
save(Chat.builder()
.chatId(SYSTEM_CHAT_ID)
.language(Language.SPANISH)
.createdTime(System.currentTimeMillis())
.systemMessage(null)
.name(SYSTEM_CHAT_ID)
.hidden(true)
.build());
save(Chat.builder()
.chatId(SYSTEM_ENGLISH_CHAT_ID)
.language(Language.ENGLISH)
.createdTime(System.currentTimeMillis())
.systemMessage(null)
.name(SYSTEM_ENGLISH_CHAT_ID)
.hidden(true)
.build());
}
public List<Chat> getChats() {
return chatRepository.findChatsByHiddenIsFalse()
.stream()
.map(ChatEntity::toChat)
.toList();
}
public Optional<Chat> getChat(String chatId) {
return chatRepository.findById(chatId)
.map(ChatEntity::toChat);
}
public Chat createChat(ChatRequest request) {
Chat chat = Chat.builder()
.chatId(Optional.ofNullable(request.getId()).orElse(UUID.randomUUID().toString()))
.language(request.getLanguage())
.createdTime(System.currentTimeMillis())
.systemMessage(request.getSystemMessage())
.memoryLength(request.getMemoryLength())
.name(request.getName())
.build();
save(chat);
return chat;
}
public Chat save(Chat chat) {
if (chat.getCreatedTime() == 0) {
chat.setCreatedTime(System.currentTimeMillis());
}
ChatEntity entity = ChatEntity.fromChat(chat);
ChatEntity savedEntity = chatRepository.save(entity);
return savedEntity.toChat();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.