index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/domain/PageResult.java
|
package ai.driftkit.workflow.engine.domain;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
/**
* Simple page result abstraction without Spring dependencies.
* Represents a page of data with pagination information.
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class PageResult<T> {
private List<T> content;
private int pageNumber;
private int pageSize;
private long totalElements;
public int getTotalPages() {
return (int) Math.ceil((double) totalElements / pageSize);
}
public static <T> PageResult<T> empty(int pageNumber, int pageSize) {
return PageResult.<T>builder()
.content(List.of())
.pageNumber(pageNumber)
.pageSize(pageSize)
.totalElements(0)
.build();
}
public boolean hasContent() {
return content != null && !content.isEmpty();
}
public boolean isFirst() {
return pageNumber == 0;
}
public boolean isLast() {
return pageNumber >= getTotalPages() - 1;
}
public boolean hasNext() {
return pageNumber < getTotalPages() - 1;
}
public boolean hasPrevious() {
return pageNumber > 0;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/domain/RetryContext.java
|
package ai.driftkit.workflow.engine.domain;
import lombok.Builder;
import lombok.Getter;
import lombok.Singular;
import java.util.List;
/**
* Contains runtime information about retry attempts for a workflow step.
* This context is available to steps during execution and provides
* information about the current retry state.
*/
@Getter
@Builder
public class RetryContext {
private final String stepId;
private final int attemptNumber;
private final int maxAttempts;
@Singular
private final List<RetryAttempt> previousAttempts;
private final long firstAttemptTime;
private final long currentAttemptTime;
/**
* Gets the number of retries remaining.
*
* @return The remaining retry count
*/
public int getRemainingRetries() {
return Math.max(0, maxAttempts - attemptNumber);
}
/**
* Checks if this is the first attempt (not a retry).
*
* @return True if this is the first attempt
*/
public boolean isFirstAttempt() {
return attemptNumber == 1;
}
/**
* Checks if this is the last attempt.
*
* @return True if this is the last allowed attempt
*/
public boolean isLastAttempt() {
return attemptNumber >= maxAttempts;
}
/**
* Gets the total elapsed time since the first attempt.
*
* @return The elapsed duration in milliseconds
*/
public long getTotalElapsedMs() {
return currentAttemptTime - firstAttemptTime;
}
/**
* Records a single retry attempt.
*/
@Getter
@Builder
public static class RetryAttempt {
private final int attemptNumber;
private final long attemptTime;
private final Throwable failure;
private final long durationMs;
public String getFailureMessage() {
return failure != null ? failure.getMessage() : "Unknown failure";
}
public Class<? extends Throwable> getFailureType() {
return failure != null ? failure.getClass() : null;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/domain/StepMetadata.java
|
package ai.driftkit.workflow.engine.domain;
import ai.driftkit.workflow.engine.schema.AIFunctionSchema;
/**
* Metadata about a workflow step.
*/
public record StepMetadata(
String id,
String description,
boolean async,
AIFunctionSchema inputSchema,
AIFunctionSchema outputSchema
) {}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/domain/SuspensionData.java
|
package ai.driftkit.workflow.engine.domain;
import java.util.Map;
import java.util.UUID;
/**
* Container for workflow suspension data that preserves type information
* and original step input for proper resume handling.
*/
public record SuspensionData(
String messageId,
Object promptToUser,
Map<String, Object> metadata,
Object originalStepInput,
Class<?> originalStepInputType,
String suspendedStepId,
Class<?> nextInputClass
) {
/**
* Creates suspension data with type preservation and auto-generated message ID.
*/
public static SuspensionData create(
Object promptToUser,
Map<String, Object> metadata,
Object originalStepInput,
String suspendedStepId,
Class<?> nextInputClass) {
String messageId = UUID.randomUUID().toString();
Class<?> inputType = originalStepInput != null ?
originalStepInput.getClass() : Object.class;
return new SuspensionData(
messageId,
promptToUser,
metadata,
originalStepInput,
inputType,
suspendedStepId,
nextInputClass
);
}
/**
* Creates suspension data with explicit message ID.
*/
public static SuspensionData createWithMessageId(
String messageId,
Object promptToUser,
Map<String, Object> metadata,
Object originalStepInput,
String suspendedStepId,
Class<?> nextInputClass) {
if (messageId == null || messageId.isBlank()) {
throw new IllegalArgumentException("Message ID cannot be null or blank");
}
Class<?> inputType = originalStepInput != null ?
originalStepInput.getClass() : Object.class;
return new SuspensionData(
messageId,
promptToUser,
metadata,
originalStepInput,
inputType,
suspendedStepId,
nextInputClass
);
}
/**
* Checks if the original input matches the expected type.
*/
public boolean hasOriginalInputOfType(Class<?> expectedType) {
return originalStepInput != null &&
expectedType.isAssignableFrom(originalStepInputType);
}
/**
* Gets the original input cast to the specified type.
*/
@SuppressWarnings("unchecked")
public <T> T getOriginalInput(Class<T> type) {
if (originalStepInput == null) {
return null;
}
if (!type.isAssignableFrom(originalStepInputType)) {
throw new ClassCastException(
"Cannot cast original input from " + originalStepInputType.getName() +
" to " + type.getName()
);
}
return type.cast(originalStepInput);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/domain/WorkflowDetails.java
|
package ai.driftkit.workflow.engine.domain;
import ai.driftkit.workflow.engine.schema.AIFunctionSchema;
import java.util.List;
/**
* Detailed workflow information including steps.
*/
public record WorkflowDetails(
WorkflowMetadata metadata,
List<StepMetadata> steps,
String initialStepId,
AIFunctionSchema initialSchema
) {}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/domain/WorkflowEngineConfig.java
|
package ai.driftkit.workflow.engine.domain;
import ai.driftkit.common.service.ChatStore;
import ai.driftkit.workflow.engine.async.ProgressTracker;
import ai.driftkit.workflow.engine.core.RetryExecutor;
import ai.driftkit.workflow.engine.core.WorkflowContextFactory;
import ai.driftkit.workflow.engine.persistence.AsyncStepStateRepository;
import ai.driftkit.workflow.engine.persistence.ChatSessionRepository;
import ai.driftkit.workflow.engine.persistence.SuspensionDataRepository;
import ai.driftkit.workflow.engine.persistence.WorkflowStateRepository;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* Configuration for the WorkflowEngine.
* This is the base configuration that can be extended by Spring properties or used directly.
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class WorkflowEngineConfig {
/**
* Number of core threads in the executor service.
*/
@Builder.Default
private int coreThreads = 10;
/**
* Maximum number of threads in the executor service.
*/
@Builder.Default
private int maxThreads = 50;
/**
* Queue capacity for the executor service.
*/
@Builder.Default
private int queueCapacity = 1000;
/**
* Number of threads for scheduled executor.
*/
@Builder.Default
private int scheduledThreads = 5;
/**
* Default timeout for step execution in milliseconds.
* -1 means no timeout.
*/
@Builder.Default
private long defaultStepTimeoutMs = 300_000; // 5 minutes
/**
* Workflow state repository implementation.
* If null, an in-memory implementation will be used.
*/
private WorkflowStateRepository stateRepository;
/**
* Progress tracker implementation.
* If null, an in-memory implementation will be used.
*/
private ProgressTracker progressTracker;
/**
* Chat session repository implementation.
* If null, an in-memory implementation will be used.
*/
private ChatSessionRepository chatSessionRepository;
/**
* Chat store implementation for unified chat memory management.
* If null, chat tracking will be disabled.
*/
private ChatStore chatStore;
/**
* Async step state repository implementation.
* If null, an in-memory implementation will be used.
*/
private AsyncStepStateRepository asyncStepStateRepository;
/**
* Suspension data repository implementation.
* If null, an in-memory implementation will be used.
*/
private SuspensionDataRepository suspensionDataRepository;
/**
* Retry executor implementation.
* If null, the default retry executor will be used.
*/
private RetryExecutor retryExecutor;
/**
* Factory for creating WorkflowContext instances.
* If null, the default factory will be used.
*/
private WorkflowContextFactory contextFactory;
/**
* Creates a default configuration.
*/
public static WorkflowEngineConfig defaultConfig() {
return WorkflowEngineConfig.builder().build();
}
/**
* Creates a configuration from Spring properties.
*/
public static WorkflowEngineConfig fromProperties(
int coreThreads,
int maxThreads,
int queueCapacity,
int scheduledThreads,
long defaultStepTimeoutMs) {
return WorkflowEngineConfig.builder()
.coreThreads(coreThreads)
.maxThreads(maxThreads)
.queueCapacity(queueCapacity)
.scheduledThreads(scheduledThreads)
.defaultStepTimeoutMs(defaultStepTimeoutMs)
.build();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/domain/WorkflowEvent.java
|
package ai.driftkit.workflow.engine.domain;
import ai.driftkit.workflow.engine.schema.AIFunctionSchema;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Event class for workflow execution with schema and progress support.
* Inspired by StepEvent from driftkit-chat-assistant-framework
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
public class WorkflowEvent {
@Accessors(chain = true)
private String nextStepId;
@Accessors(chain = true)
private List<String> possibleNextStepIds = new ArrayList<>();
@Accessors(chain = true)
private Map<String, String> properties;
@Accessors(chain = true)
private AIFunctionSchema currentSchema;
@Accessors(chain = true)
private AIFunctionSchema nextInputSchema;
@Accessors(chain = true)
private boolean completed;
@Accessors(chain = true)
private int percentComplete;
@Accessors(chain = true)
@Builder.Default
private boolean required = true;
@Accessors(chain = true)
private String messageId;
@Accessors(chain = true)
private String error;
@Accessors(chain = true)
private boolean async;
@Accessors(chain = true)
private String asyncTaskId;
// Factory methods for common scenarios
public static WorkflowEvent nextStep(String nextStepId) {
return WorkflowEvent.builder()
.nextStepId(nextStepId)
.completed(true)
.percentComplete(100)
.build();
}
public static WorkflowEvent completed(Map<String, String> properties) {
return WorkflowEvent.builder()
.properties(properties)
.completed(true)
.percentComplete(100)
.build();
}
public static WorkflowEvent withProgress(int percentComplete, String message) {
Map<String, String> props = new HashMap<>();
props.put("progressMessage", message);
return WorkflowEvent.builder()
.properties(props)
.completed(false)
.percentComplete(percentComplete)
.build();
}
public static WorkflowEvent withSchema(AIFunctionSchema currentSchema, AIFunctionSchema nextSchema) {
return WorkflowEvent.builder()
.currentSchema(currentSchema)
.nextInputSchema(nextSchema)
.completed(true)
.percentComplete(100)
.build();
}
public static WorkflowEvent withError(String error) {
return WorkflowEvent.builder()
.error(error)
.completed(true)
.percentComplete(100)
.build();
}
public static WorkflowEvent asyncStarted(String asyncTaskId, String messageId) {
return WorkflowEvent.builder()
.async(true)
.asyncTaskId(asyncTaskId)
.messageId(messageId)
.completed(false)
.percentComplete(0)
.build();
}
// Builder enhancement methods
public WorkflowEvent addPossibleNextStep(String stepId) {
if (possibleNextStepIds == null) {
possibleNextStepIds = new ArrayList<>();
}
if (stepId != null && !possibleNextStepIds.contains(stepId)) {
possibleNextStepIds.add(stepId);
}
return this;
}
public WorkflowEvent addProperty(String key, String value) {
if (properties == null) {
properties = new HashMap<>();
} else if (!(properties instanceof HashMap)) {
// Convert immutable map to mutable
properties = new HashMap<>(properties);
}
properties.put(key, value);
return this;
}
public WorkflowEvent updateProgress(int percentComplete, String message) {
this.percentComplete = percentComplete;
return addProperty("progressMessage", message);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/domain/WorkflowException.java
|
package ai.driftkit.workflow.engine.domain;
/**
* Exception thrown during workflow execution.
* Contains error code for categorization and handling.
*/
public class WorkflowException extends RuntimeException {
private final String code;
public WorkflowException(String message) {
this(message, "WORKFLOW_ERROR", null);
}
public WorkflowException(String message, String code) {
this(message, code, null);
}
public WorkflowException(String message, Throwable cause) {
this(message, "WORKFLOW_ERROR", cause);
}
public WorkflowException(String message, String code, Throwable cause) {
super(message, cause);
this.code = code;
}
public String getCode() {
return code;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/domain/WorkflowMetadata.java
|
package ai.driftkit.workflow.engine.domain;
/**
* Basic workflow metadata.
*/
public record WorkflowMetadata(
String id,
String version,
String description,
Class<?> inputType,
Class<?> outputType
) {}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/graph/Edge.java
|
package ai.driftkit.workflow.engine.graph;
import lombok.extern.slf4j.Slf4j;
import java.util.function.Predicate;
/**
* Represents an edge in the workflow graph, defining the transition between steps.
* Edges can be conditional or unconditional, and typed or untyped.
*/
@Slf4j
public record Edge(
String fromStepId,
String toStepId,
EdgeType type,
Class<?> eventType,
Predicate<Object> condition,
String description,
Object branchValue // For BranchValue edges, stores the expected value
) {
/**
* Types of edges in the workflow graph.
*/
public enum EdgeType {
/**
* Standard sequential edge - flows when previous step returns Continue
*/
SEQUENTIAL,
/**
* Branch edge - flows when previous step returns Branch with matching event type
*/
BRANCH,
/**
* Conditional edge - flows based on predicate evaluation
*/
CONDITIONAL,
/**
* Error edge - flows when previous step returns Fail
*/
ERROR,
/**
* Parallel edge - indicates parallel execution branch
*/
PARALLEL
}
/**
* Validates the Edge parameters.
*/
public Edge {
if (fromStepId == null || fromStepId.isBlank()) {
throw new IllegalArgumentException("From step ID cannot be null or blank");
}
if (toStepId == null || toStepId.isBlank()) {
throw new IllegalArgumentException("To step ID cannot be null or blank");
}
if (type == null) {
type = EdgeType.SEQUENTIAL;
}
if (description == null || description.isBlank()) {
description = generateDescription(type, eventType, branchValue);
}
}
/**
* Creates a simple sequential edge.
*/
public static Edge sequential(String from, String to) {
return new Edge(from, to, EdgeType.SEQUENTIAL, null, null, null, null);
}
/**
* Creates a branch edge for a specific event type.
*/
public static Edge branch(String from, String to, Class<?> eventType) {
return new Edge(from, to, EdgeType.BRANCH, eventType, null, null, null);
}
/**
* Creates a branch edge for a specific event type with expected value.
*/
public static Edge branchWithValue(String from, String to, Class<?> eventType, Object expectedValue) {
return new Edge(from, to, EdgeType.BRANCH, eventType, null, null, expectedValue);
}
/**
* Creates a conditional edge with a predicate.
*/
public static Edge conditional(String from, String to, Predicate<Object> condition, String description) {
return new Edge(from, to, EdgeType.CONDITIONAL, null, condition, description, null);
}
/**
* Creates an error handling edge.
*/
public static Edge error(String from, String to) {
return new Edge(from, to, EdgeType.ERROR, null, null, "On error", null);
}
/**
* Creates a parallel execution edge.
*/
public static Edge parallel(String from, String to) {
return new Edge(from, to, EdgeType.PARALLEL, null, null, "Parallel execution", null);
}
/**
* Checks if this edge should be followed given the step result.
*
* @param stepResult The result from the previous step
* @return true if this edge should be followed
*/
public boolean shouldFollow(Object stepResult) {
return switch (type) {
case SEQUENTIAL -> true; // Always follow sequential edges
case BRANCH -> {
log.debug("Checking BRANCH edge: eventType={}, stepResult type={}, branchValue={}",
eventType != null ? eventType.getSimpleName() : "null",
stepResult != null ? stepResult.getClass().getSimpleName() : "null",
branchValue);
if (eventType != null && eventType.isInstance(stepResult)) {
// If we have a branchValue, compare it
if (branchValue != null && stepResult instanceof ai.driftkit.workflow.engine.builder.WorkflowBuilder.BranchValue<?> bv) {
Object actualValue = bv.value();
log.debug("BranchValue comparison: actualValue={} ({}), expectedValue={} ({})",
actualValue, actualValue != null ? actualValue.getClass() : "null",
branchValue, branchValue.getClass());
if (actualValue instanceof Enum<?> && branchValue instanceof Enum<?>) {
// Compare enum by name
boolean match = ((Enum<?>) actualValue).name().equals(((Enum<?>) branchValue).name());
log.debug("Enum comparison: {} vs {} = {}",
((Enum<?>) actualValue).name(), ((Enum<?>) branchValue).name(), match);
yield match;
} else {
// Compare by equals
boolean match = branchValue.equals(actualValue);
log.debug("Equals comparison: {} vs {} = {}", actualValue, branchValue, match);
yield match;
}
}
log.debug("No branchValue comparison, returning true for eventType match");
yield true;
}
log.debug("Event type doesn't match, returning false");
yield false;
}
case CONDITIONAL -> condition != null && condition.test(stepResult);
case ERROR -> stepResult instanceof Throwable;
case PARALLEL -> true; // Parallel edges are always valid
};
}
/**
* Generates a default description based on edge type.
*/
private static String generateDescription(EdgeType type, Class<?> eventType, Object branchValue) {
return switch (type) {
case SEQUENTIAL -> "Continue";
case BRANCH -> {
if (branchValue instanceof Enum<?> e) {
yield "On " + e.name();
} else if (branchValue != null) {
yield "On value";
} else if (eventType != null) {
yield "On " + eventType.getSimpleName();
} else {
yield "Branch";
}
}
case CONDITIONAL -> "Conditional";
case ERROR -> "On error";
case PARALLEL -> "Parallel";
};
}
/**
* Creates a more readable string representation.
*/
@Override
public String toString() {
return fromStepId + " -> " + toStepId + " [" + description + "]";
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/graph/StepNode.java
|
package ai.driftkit.workflow.engine.graph;
import ai.driftkit.workflow.engine.annotations.OnInvocationsLimit;
import ai.driftkit.workflow.engine.annotations.RetryPolicy;
import ai.driftkit.workflow.engine.core.StepResult;
import ai.driftkit.workflow.engine.core.WorkflowContext;
import ai.driftkit.workflow.engine.utils.ReflectionUtils;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
import java.lang.reflect.ParameterizedType;
import java.util.concurrent.CompletableFuture;
import java.util.function.BiFunction;
import java.util.function.Function;
/**
* Represents a node in the workflow graph.
* Each node encapsulates the metadata and execution logic for a single workflow step.
*/
public record StepNode(
String id,
String description,
StepExecutor executor,
boolean isAsync,
boolean isInitial,
RetryPolicy retryPolicy,
int invocationLimit,
OnInvocationsLimit onInvocationsLimit
) {
/**
* Validates the StepNode parameters.
*/
public StepNode {
if (id == null || id.isBlank()) {
throw new IllegalArgumentException("Step ID cannot be null or blank");
}
if (executor == null) {
throw new IllegalArgumentException("Step executor cannot be null");
}
if (description == null || description.isBlank()) {
description = "Step: " + id;
}
// Set defaults for retry configuration
if (invocationLimit <= 0) {
invocationLimit = 100;
}
if (onInvocationsLimit == null) {
onInvocationsLimit = OnInvocationsLimit.ERROR;
}
// retryPolicy can be null - means no retry
}
/**
* Checks if this step can accept the given input type.
*/
public boolean canAcceptInput(Class<?> inputType) {
Class<?> expectedType = executor.getInputType();
if (expectedType == null) {
// Step doesn't require input - only initial steps should accept any input
return isInitial;
}
return expectedType.isAssignableFrom(inputType);
}
/**
* Factory method to create a StepNode from a method reference.
*/
public static StepNode fromMethod(String id, Method method, Object instance) {
String desc = "Execute " + method.getName();
boolean async = CompletableFuture.class.isAssignableFrom(method.getReturnType());
return new StepNode(id, desc, new MethodStepExecutor(method, instance), async, false,
null, 100, OnInvocationsLimit.ERROR);
}
/**
* Factory method to create a StepNode from a method reference with retry configuration.
*/
public static StepNode fromMethod(String id, Method method, Object instance,
RetryPolicy retryPolicy, int invocationLimit,
OnInvocationsLimit onInvocationsLimit) {
String desc = "Execute " + method.getName();
boolean async = CompletableFuture.class.isAssignableFrom(method.getReturnType());
return new StepNode(id, desc, new MethodStepExecutor(method, instance), async, false,
retryPolicy, invocationLimit, onInvocationsLimit);
}
/**
* Factory method to create a StepNode from a function.
*/
public static StepNode fromFunction(String id, Function<Object, StepResult<?>> function) {
return new StepNode(id, "Function step", new FunctionStepExecutor(function, null, null), false, false,
null, 100, OnInvocationsLimit.ERROR);
}
/**
* Factory method to create a StepNode from a function with explicit type information.
*/
public static <I, O> StepNode fromFunction(String id,
Function<Object, StepResult<?>> function,
Class<I> inputType,
Class<O> outputType) {
return new StepNode(id, "Function step",
new FunctionStepExecutor(function, inputType, outputType), false, false,
null, 100, OnInvocationsLimit.ERROR);
}
/**
* Factory method to create a StepNode from a bi-function that accepts context.
*/
public static StepNode fromBiFunction(String id, BiFunction<Object, WorkflowContext, StepResult<?>> function) {
return new StepNode(id, "BiFunction step", new BiFunctionStepExecutor(function, null, null), false, false,
null, 100, OnInvocationsLimit.ERROR);
}
/**
* Factory method to create a StepNode from a bi-function with explicit type information.
*/
public static <I, O> StepNode fromBiFunction(String id,
BiFunction<Object, WorkflowContext, StepResult<?>> function,
Class<I> inputType,
Class<O> outputType) {
return new StepNode(id, "BiFunction step",
new BiFunctionStepExecutor(function, inputType, outputType), false, false,
null, 100, OnInvocationsLimit.ERROR);
}
/**
* Creates a new StepNode with the initial flag set.
*/
public StepNode asInitial() {
return new StepNode(id, description, executor, isAsync, true, retryPolicy, invocationLimit, onInvocationsLimit);
}
/**
* Creates a new StepNode with the async flag set.
*/
public StepNode asAsync() {
return new StepNode(id, description, executor, true, isInitial, retryPolicy, invocationLimit, onInvocationsLimit);
}
/**
* Creates a new StepNode with a different description.
*/
public StepNode withDescription(String newDescription) {
return new StepNode(id, newDescription, executor, isAsync, isInitial, retryPolicy, invocationLimit, onInvocationsLimit);
}
/**
* Creates a new StepNode with retry configuration.
*/
public StepNode withRetry(RetryPolicy retryPolicy, int invocationLimit, OnInvocationsLimit onInvocationsLimit) {
return new StepNode(id, description, executor, isAsync, isInitial, retryPolicy, invocationLimit, onInvocationsLimit);
}
/**
* Interface for step execution strategies.
*/
public interface StepExecutor {
/**
* Executes the step logic.
*
* @param input The input data for the step
* @param context The workflow context
* @return The result of the step execution
* @throws Exception if execution fails
*/
Object execute(Object input, WorkflowContext context) throws Exception;
/**
* Gets the expected input type for this step.
*
* @return The input type class, or null if any type is accepted
*/
Class<?> getInputType();
/**
* Gets the output type this step produces.
* This is the type wrapped in StepResult (e.g., for StepResult<String>, returns String.class)
*
* @return The output type class, or null if unknown
*/
Class<?> getOutputType();
/**
* Checks if this executor requires the workflow context as a parameter.
*
* @return true if context is required
*/
boolean requiresContext();
}
/**
* Executor implementation for method-based steps.
*/
private record MethodStepExecutor(Method method, Object instance) implements StepExecutor {
@Override
public Object execute(Object input, WorkflowContext context) throws Exception {
Class<?>[] paramTypes = method.getParameterTypes();
// Build arguments array based on parameter types
Object[] args = new Object[paramTypes.length];
// Track which arguments we've filled
boolean contextUsed = false;
boolean inputUsed = false;
// First pass: fill exact type matches
for (int i = 0; i < paramTypes.length; i++) {
if (!contextUsed && WorkflowContext.class.isAssignableFrom(paramTypes[i])) {
args[i] = context;
contextUsed = true;
} else if (!inputUsed && input != null && paramTypes[i].isInstance(input)) {
args[i] = input;
inputUsed = true;
}
}
// Second pass: fill remaining slots
for (int i = 0; i < paramTypes.length; i++) {
if (args[i] == null) {
// Try to use input if not used yet and type is not WorkflowContext
if (!inputUsed && !WorkflowContext.class.isAssignableFrom(paramTypes[i])) {
if (input == null && !paramTypes[i].isPrimitive()) {
// Null is acceptable for non-primitive types
args[i] = null;
inputUsed = true;
} else if (input != null) {
// Validate type compatibility
if (!paramTypes[i].isInstance(input)) {
throw new IllegalArgumentException(String.format(
"Step '%s' parameter %d expects type %s but received %s",
method.getName(), i, paramTypes[i].getName(), input.getClass().getName()
));
}
args[i] = input;
inputUsed = true;
} else {
throw new IllegalArgumentException(String.format(
"Step '%s' parameter %d of primitive type %s cannot be null",
method.getName(), i, paramTypes[i].getName()
));
}
}
}
}
// Validate all parameters are filled
for (int i = 0; i < args.length; i++) {
if (args[i] == null && paramTypes[i].isPrimitive()) {
throw new IllegalArgumentException(String.format(
"Step '%s' parameter %d of primitive type %s was not provided",
method.getName(), i, paramTypes[i].getName()
));
}
}
return method.invoke(instance, args);
}
@Override
public Class<?> getInputType() {
Class<?>[] paramTypes = method.getParameterTypes();
// Find the first parameter that is not WorkflowContext
for (Class<?> paramType : paramTypes) {
if (!WorkflowContext.class.isAssignableFrom(paramType)) {
return paramType;
}
}
// No input parameter found
return null;
}
@Override
public Class<?> getOutputType() {
Type genericReturnType = method.getGenericReturnType();
return ReflectionUtils.extractStepResultType(genericReturnType);
}
@Override
public boolean requiresContext() {
Class<?>[] paramTypes = method.getParameterTypes();
for (Class<?> paramType : paramTypes) {
if (WorkflowContext.class.isAssignableFrom(paramType)) {
return true;
}
}
return false;
}
}
/**
* Executor implementation for function-based steps.
*/
private record FunctionStepExecutor(
Function<Object, StepResult<?>> function,
Class<?> inputType,
Class<?> outputType
) implements StepExecutor {
@Override
public Object execute(Object input, WorkflowContext context) {
return function.apply(input);
}
@Override
public Class<?> getInputType() {
return inputType;
}
@Override
public Class<?> getOutputType() {
return outputType;
}
@Override
public boolean requiresContext() {
return false;
}
}
/**
* Executor implementation for bi-function-based steps that accept context.
*/
private record BiFunctionStepExecutor(
BiFunction<Object, WorkflowContext, StepResult<?>> function,
Class<?> inputType,
Class<?> outputType
) implements StepExecutor {
@Override
public Object execute(Object input, WorkflowContext context) {
return function.apply(input, context);
}
@Override
public Class<?> getInputType() {
return inputType;
}
@Override
public Class<?> getOutputType() {
return outputType;
}
@Override
public boolean requiresContext() {
return true;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/graph/WorkflowGraph.java
|
package ai.driftkit.workflow.engine.graph;
import ai.driftkit.workflow.engine.core.WorkflowAnalyzer;
import lombok.Builder;
import lombok.extern.slf4j.Slf4j;
import java.util.*;
import java.util.stream.Collectors;
/**
* Immutable representation of a workflow as a directed acyclic graph (DAG).
* This is the compiled form of a workflow definition that can be executed by the engine.
*
* @param <T> The type of input data for the workflow
* @param <R> The type of the final result of the workflow
*/
@Slf4j
@Builder
public record WorkflowGraph<T, R>(
String id,
String version,
Class<T> inputType,
Class<R> outputType,
Map<String, StepNode> nodes,
Map<String, List<Edge>> edges,
String initialStepId,
Object workflowInstance,
Map<String, WorkflowAnalyzer.AsyncStepMetadata> asyncStepMetadata
) {
/**
* Validates the WorkflowGraph structure.
*/
public WorkflowGraph {
if (id == null || id.isBlank()) {
throw new IllegalArgumentException("Workflow ID cannot be null or blank");
}
if (version == null || version.isBlank()) {
version = "1.0";
}
if (nodes == null || nodes.isEmpty()) {
throw new IllegalArgumentException("Workflow must have at least one node");
}
if (edges == null) {
edges = Collections.emptyMap();
}
if (asyncStepMetadata == null) {
asyncStepMetadata = Collections.emptyMap();
}
// Make collections immutable
nodes = Collections.unmodifiableMap(new HashMap<>(nodes));
edges = Collections.unmodifiableMap(
edges.entrySet().stream()
.collect(Collectors.toMap(
Map.Entry::getKey,
e -> Collections.unmodifiableList(new ArrayList<>(e.getValue()))
))
);
asyncStepMetadata = Collections.unmodifiableMap(new HashMap<>(asyncStepMetadata));
// Validate initial step
if (initialStepId == null || initialStepId.isBlank()) {
// Try to find a step marked as initial
initialStepId = nodes.values().stream()
.filter(StepNode::isInitial)
.map(StepNode::id)
.findFirst()
.orElseThrow(() -> new IllegalArgumentException(
"No initial step specified and none marked as initial"
));
}
if (!nodes.containsKey(initialStepId)) {
throw new IllegalArgumentException("Initial step not found: " + initialStepId);
}
// Validate graph structure
validateGraph(nodes, edges);
}
/**
* Gets all outgoing edges from a specific step.
*/
public List<Edge> getOutgoingEdges(String stepId) {
return edges.getOrDefault(stepId, Collections.emptyList());
}
/**
* Gets all incoming edges to a specific step.
*/
public List<Edge> getIncomingEdges(String stepId) {
return edges.values().stream()
.flatMap(List::stream)
.filter(edge -> edge.toStepId().equals(stepId))
.collect(Collectors.toList());
}
/**
* Gets a step node by ID.
*/
public Optional<StepNode> getNode(String stepId) {
return Optional.ofNullable(nodes.get(stepId));
}
/**
* Finds all terminal nodes (nodes with no outgoing edges).
*/
public Set<String> getTerminalNodes() {
Set<String> terminals = new HashSet<>(nodes.keySet());
edges.values().stream()
.flatMap(List::stream)
.map(Edge::fromStepId)
.forEach(terminals::remove);
return terminals;
}
/**
* Checks if the graph contains cycles.
*/
public boolean hasCycles() {
Set<String> visited = new HashSet<>();
Set<String> recursionStack = new HashSet<>();
for (String nodeId : nodes.keySet()) {
if (hasCyclesHelper(nodeId, visited, recursionStack)) {
return true;
}
}
return false;
}
private boolean hasCyclesHelper(String nodeId, Set<String> visited, Set<String> recursionStack) {
visited.add(nodeId);
recursionStack.add(nodeId);
List<Edge> outgoing = getOutgoingEdges(nodeId);
for (Edge edge : outgoing) {
if (!visited.contains(edge.toStepId())) {
if (hasCyclesHelper(edge.toStepId(), visited, recursionStack)) {
return true;
}
} else if (recursionStack.contains(edge.toStepId())) {
return true;
}
}
recursionStack.remove(nodeId);
return false;
}
/**
* Performs a topological sort of the graph nodes.
*
* @return List of node IDs in topological order
* @throws IllegalStateException if the graph contains cycles
*/
public List<String> topologicalSort() {
if (hasCycles()) {
throw new IllegalStateException("Cannot perform topological sort on graph with cycles");
}
Map<String, Integer> inDegree = new HashMap<>();
nodes.keySet().forEach(id -> inDegree.put(id, 0));
// Calculate in-degrees
edges.values().stream()
.flatMap(List::stream)
.forEach(edge -> inDegree.merge(edge.toStepId(), 1, Integer::sum));
// Find all nodes with no incoming edges
Queue<String> queue = new LinkedList<>();
inDegree.entrySet().stream()
.filter(e -> e.getValue() == 0)
.map(Map.Entry::getKey)
.forEach(queue::offer);
List<String> sorted = new ArrayList<>();
while (!queue.isEmpty()) {
String current = queue.poll();
sorted.add(current);
// Reduce in-degree for all neighbors
getOutgoingEdges(current).forEach(edge -> {
int newDegree = inDegree.merge(edge.toStepId(), -1, Integer::sum);
if (newDegree == 0) {
queue.offer(edge.toStepId());
}
});
}
return sorted;
}
/**
* Validates the graph structure.
*/
private static void validateGraph(Map<String, StepNode> nodes, Map<String, List<Edge>> edges) {
// Check for orphaned edges
for (Map.Entry<String, List<Edge>> entry : edges.entrySet()) {
String fromId = entry.getKey();
if (!nodes.containsKey(fromId)) {
throw new IllegalArgumentException("Edge references non-existent source node: " + fromId);
}
for (Edge edge : entry.getValue()) {
if (!nodes.containsKey(edge.toStepId())) {
throw new IllegalArgumentException(
"Edge references non-existent target node: " + edge.toStepId()
);
}
}
}
// Warn about unreachable nodes (except initial)
Set<String> reachable = new HashSet<>();
Queue<String> toVisit = new LinkedList<>();
// Find initial nodes
nodes.values().stream()
.filter(StepNode::isInitial)
.map(StepNode::id)
.forEach(id -> {
reachable.add(id);
toVisit.offer(id);
});
// If no explicit initial nodes, assume nodes with no incoming edges
if (reachable.isEmpty()) {
Set<String> hasIncoming = edges.values().stream()
.flatMap(List::stream)
.map(Edge::toStepId)
.collect(Collectors.toSet());
nodes.keySet().stream()
.filter(id -> !hasIncoming.contains(id))
.forEach(id -> {
reachable.add(id);
toVisit.offer(id);
});
}
// Traverse the graph
while (!toVisit.isEmpty()) {
String current = toVisit.poll();
List<Edge> outgoing = edges.getOrDefault(current, Collections.emptyList());
for (Edge edge : outgoing) {
if (!reachable.contains(edge.toStepId())) {
reachable.add(edge.toStepId());
toVisit.offer(edge.toStepId());
}
}
}
// Check for unreachable nodes
Set<String> unreachable = new HashSet<>(nodes.keySet());
unreachable.removeAll(reachable);
if (!unreachable.isEmpty()) {
log.debug("Workflow graph contains nodes without direct edges: {}. These may be reached via runtime type-based routing.", unreachable);
}
}
/**
* Creates a string representation suitable for debugging.
*/
@Override
public String toString() {
return "WorkflowGraph{" +
"id='" + id + '\'' +
", version='" + version + '\'' +
", nodes=" + nodes.size() +
", edges=" + edges.values().stream().mapToInt(List::size).sum() +
", initial='" + initialStepId + '\'' +
'}';
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/monitoring/RetryMetricsExporter.java
|
package ai.driftkit.workflow.engine.monitoring;
import ai.driftkit.workflow.engine.core.RetryMetrics;
import lombok.Builder;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* Exports retry metrics in various formats for monitoring systems.
*/
@Slf4j
public class RetryMetricsExporter {
private final RetryMetrics metrics;
public RetryMetricsExporter(RetryMetrics metrics) {
this.metrics = metrics;
}
/**
* Exports metrics as key-value pairs for logging or metrics systems.
*/
public MetricsSnapshot exportSnapshot() {
RetryMetrics.GlobalMetrics global = metrics.getGlobalMetrics();
return MetricsSnapshot.builder()
.timestamp(Instant.now())
.totalRetryAttempts(global.getTotalAttempts())
.totalRetrySuccesses(global.getTotalSuccesses())
.totalRetryFailures(global.getTotalFailures())
.totalRetryExhausted(global.getTotalExhausted())
.totalRetryAborted(0L) // TODO: Add aborted tracking to GlobalMetrics
.globalSuccessRate(global.getSuccessRate())
.stepMetrics(exportStepMetrics())
.build();
}
/**
* Exports metrics in Prometheus format.
*/
public String exportPrometheus() {
MetricsSnapshot snapshot = exportSnapshot();
StringBuilder sb = new StringBuilder();
// Global metrics
sb.append("# HELP retry_attempts_total Total number of retry attempts\n");
sb.append("# TYPE retry_attempts_total counter\n");
sb.append("retry_attempts_total ").append(snapshot.getTotalRetryAttempts()).append("\n\n");
sb.append("# HELP retry_successes_total Total number of successful retries\n");
sb.append("# TYPE retry_successes_total counter\n");
sb.append("retry_successes_total ").append(snapshot.getTotalRetrySuccesses()).append("\n\n");
sb.append("# HELP retry_failures_total Total number of failed retries\n");
sb.append("# TYPE retry_failures_total counter\n");
sb.append("retry_failures_total ").append(snapshot.getTotalRetryFailures()).append("\n\n");
sb.append("# HELP retry_exhausted_total Total number of exhausted retries\n");
sb.append("# TYPE retry_exhausted_total counter\n");
sb.append("retry_exhausted_total ").append(snapshot.getTotalRetryExhausted()).append("\n\n");
sb.append("# HELP retry_success_rate Global retry success rate\n");
sb.append("# TYPE retry_success_rate gauge\n");
sb.append("retry_success_rate ").append(snapshot.getGlobalSuccessRate()).append("\n\n");
// Per-step metrics
sb.append("# HELP retry_step_attempts Retry attempts per step\n");
sb.append("# TYPE retry_step_attempts counter\n");
for (Map.Entry<String, StepMetricsSnapshot> entry : snapshot.getStepMetrics().entrySet()) {
String stepId = entry.getKey();
StepMetricsSnapshot step = entry.getValue();
sb.append("retry_step_attempts{step=\"").append(stepId).append("\"} ")
.append(step.getTotalAttempts()).append("\n");
}
return sb.toString();
}
/**
* Exports metrics as JSON-friendly map.
*/
public Map<String, Object> exportJson() {
MetricsSnapshot snapshot = exportSnapshot();
return Map.of(
"timestamp", snapshot.getTimestamp().toString(),
"global", Map.of(
"totalAttempts", snapshot.getTotalRetryAttempts(),
"totalSuccesses", snapshot.getTotalRetrySuccesses(),
"totalFailures", snapshot.getTotalRetryFailures(),
"totalExhausted", snapshot.getTotalRetryExhausted(),
"totalAborted", snapshot.getTotalRetryAborted(),
"successRate", snapshot.getGlobalSuccessRate()
),
"steps", snapshot.getStepMetrics().entrySet().stream()
.collect(Collectors.toMap(
Map.Entry::getKey,
e -> Map.of(
"totalAttempts", e.getValue().getTotalAttempts(),
"successCount", e.getValue().getSuccessCount(),
"failureCount", e.getValue().getFailureCount(),
"exhaustedCount", e.getValue().getExhaustedCount(),
"successRate", e.getValue().getSuccessRate(),
"avgDuration", e.getValue().getAverageDuration()
)
))
);
}
/**
* Logs current metrics at INFO level.
*/
public void logMetrics() {
MetricsSnapshot snapshot = exportSnapshot();
log.info("Retry Metrics Summary: attempts={}, successes={}, failures={}, exhausted={}, successRate={}%",
snapshot.getTotalRetryAttempts(),
snapshot.getTotalRetrySuccesses(),
snapshot.getTotalRetryFailures(),
snapshot.getTotalRetryExhausted(),
String.format("%.2f", snapshot.getGlobalSuccessRate()));
// Log top failing steps
List<Map.Entry<String, StepMetricsSnapshot>> topFailures = snapshot.getStepMetrics().entrySet().stream()
.filter(e -> e.getValue().getFailureCount() > 0)
.sorted((a, b) -> Long.compare(b.getValue().getFailureCount(), a.getValue().getFailureCount()))
.limit(5)
.collect(Collectors.toList());
if (!topFailures.isEmpty()) {
log.info("Top failing steps:");
for (Map.Entry<String, StepMetricsSnapshot> entry : topFailures) {
log.info(" - {}: {} failures, {}% success rate",
entry.getKey(),
entry.getValue().getFailureCount(),
String.format("%.2f", entry.getValue().getSuccessRate()));
}
}
}
private Map<String, StepMetricsSnapshot> exportStepMetrics() {
return metrics.getAllStepMetrics().entrySet().stream()
.collect(Collectors.toMap(
Map.Entry::getKey,
e -> {
RetryMetrics.StepMetrics step = e.getValue();
return StepMetricsSnapshot.builder()
.totalAttempts(step.getTotalAttempts())
.successCount(step.getSuccessCount().get())
.failureCount(step.getFailureCount().get())
.exhaustedCount(step.getExhaustedCount().get())
.abortedCount(step.getAbortedCount())
.successRate(step.getSuccessRate())
.averageDuration(step.getAverageDuration())
.build();
}
));
}
@Getter
@Builder
public static class MetricsSnapshot {
private final Instant timestamp;
private final long totalRetryAttempts;
private final long totalRetrySuccesses;
private final long totalRetryFailures;
private final long totalRetryExhausted;
private final long totalRetryAborted;
private final double globalSuccessRate;
private final Map<String, StepMetricsSnapshot> stepMetrics;
}
@Getter
@Builder
public static class StepMetricsSnapshot {
private final long totalAttempts;
private final long successCount;
private final long failureCount;
private final long exhaustedCount;
private final long abortedCount;
private final double successRate;
private final double averageDuration;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence/AsyncStepStateRepository.java
|
package ai.driftkit.workflow.engine.persistence;
import ai.driftkit.workflow.engine.domain.AsyncStepState;
import java.util.Optional;
/**
* Repository interface for managing asynchronous step states.
* This replaces the incorrect storage of ChatResponse for async operations.
* Async step states are temporary and should be stored separately from workflow instances.
*/
public interface AsyncStepStateRepository {
/**
* Save or update an async step state.
*
* @param state The async step state to save
* @return The saved state
*/
AsyncStepState save(AsyncStepState state);
/**
* Find an async step state by message ID.
*
* @param messageId The unique message ID for this async execution
* @return Optional containing the state if found
*/
Optional<AsyncStepState> findByMessageId(String messageId);
/**
* Delete an async step state.
*
* @param messageId The message ID to delete
*/
void deleteByMessageId(String messageId);
/**
* Check if an async step state exists.
*
* @param messageId The message ID
* @return true if exists, false otherwise
*/
boolean existsByMessageId(String messageId);
/**
* Delete all states older than the given timestamp.
* Useful for cleanup of old async states.
*
* @param timestampMillis The timestamp in milliseconds
* @return Number of deleted states
*/
int deleteOlderThan(long timestampMillis);
/**
* Update the progress of an async step state.
*
* @param messageId The message ID
* @param percentComplete The completion percentage (0-100)
* @param statusMessage The status message
* @return true if updated, false if not found
*/
boolean updateProgress(String messageId, int percentComplete, String statusMessage);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence/ChatSessionRepository.java
|
package ai.driftkit.workflow.engine.persistence;
import ai.driftkit.workflow.engine.domain.ChatSession;
import ai.driftkit.workflow.engine.domain.PageRequest;
import ai.driftkit.workflow.engine.domain.PageResult;
import java.util.Optional;
/**
* Repository interface for managing chat sessions.
* Provides abstraction over storage implementation without Spring dependencies.
*/
public interface ChatSessionRepository {
/**
* Save or update a chat session.
*
* @param session The chat session to save
* @return The saved chat session
*/
ChatSession save(ChatSession session);
/**
* Find a chat session by ID.
*
* @param chatId The chat ID
* @return Optional containing the chat session if found
*/
Optional<ChatSession> findById(String chatId);
/**
* Find all chat sessions for a user.
*
* @param userId The user ID
* @param pageRequest Pagination information
* @return Page of chat sessions
*/
PageResult<ChatSession> findByUserId(String userId, PageRequest pageRequest);
/**
* Find active (non-archived) chat sessions for a user.
*
* @param userId The user ID
* @param pageRequest Pagination information
* @return Page of active chat sessions
*/
PageResult<ChatSession> findActiveByUserId(String userId, PageRequest pageRequest);
/**
* Delete a chat session.
*
* @param chatId The chat ID to delete
*/
void deleteById(String chatId);
/**
* Check if a chat session exists.
*
* @param chatId The chat ID
* @return true if exists, false otherwise
*/
boolean existsById(String chatId);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence/InMemoryRetryStateStore.java
|
package ai.driftkit.workflow.engine.persistence;
import ai.driftkit.workflow.engine.domain.RetryContext;
import ai.driftkit.workflow.engine.core.CircuitBreaker;
import lombok.extern.slf4j.Slf4j;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
/**
* In-memory implementation of RetryStateStore for testing and development.
* This implementation is not suitable for production as state is lost on restart.
*/
@Slf4j
public class InMemoryRetryStateStore implements RetryStateStore {
private final Map<String, RetryContext> retryContexts = new ConcurrentHashMap<>();
private final Map<String, CircuitBreaker.CircuitStateSnapshot> circuitBreakerStates = new ConcurrentHashMap<>();
@Override
public CompletableFuture<Void> saveRetryContext(String workflowId, String stepId, RetryContext context) {
String key = createKey(workflowId, stepId);
retryContexts.put(key, context);
log.debug("Saved retry context for {}", key);
return CompletableFuture.completedFuture(null);
}
@Override
public CompletableFuture<Optional<RetryContext>> loadRetryContext(String workflowId, String stepId) {
String key = createKey(workflowId, stepId);
RetryContext context = retryContexts.get(key);
log.debug("Loaded retry context for {}: {}", key, context != null ? "found" : "not found");
return CompletableFuture.completedFuture(Optional.ofNullable(context));
}
@Override
public CompletableFuture<Void> deleteRetryContext(String workflowId, String stepId) {
String key = createKey(workflowId, stepId);
retryContexts.remove(key);
log.debug("Deleted retry context for {}", key);
return CompletableFuture.completedFuture(null);
}
@Override
public CompletableFuture<Void> saveCircuitBreakerState(String workflowId, String stepId,
CircuitBreaker.CircuitStateSnapshot state) {
String key = createKey(workflowId, stepId);
circuitBreakerStates.put(key, state);
log.debug("Saved circuit breaker state for {}: {}", key, state.state());
return CompletableFuture.completedFuture(null);
}
@Override
public CompletableFuture<Optional<CircuitBreaker.CircuitStateSnapshot>> loadCircuitBreakerState(String workflowId,
String stepId) {
String key = createKey(workflowId, stepId);
CircuitBreaker.CircuitStateSnapshot state = circuitBreakerStates.get(key);
log.debug("Loaded circuit breaker state for {}: {}", key,
state != null ? state.state() : "not found");
return CompletableFuture.completedFuture(Optional.ofNullable(state));
}
@Override
public CompletableFuture<Void> deleteWorkflowState(String workflowId) {
// Remove all entries for this workflow
retryContexts.entrySet().removeIf(entry -> entry.getKey().startsWith(workflowId + ":"));
circuitBreakerStates.entrySet().removeIf(entry -> entry.getKey().startsWith(workflowId + ":"));
log.debug("Deleted all retry state for workflow {}", workflowId);
return CompletableFuture.completedFuture(null);
}
private String createKey(String workflowId, String stepId) {
return workflowId + ":" + stepId;
}
/**
* Clear all stored state (useful for testing).
*/
public void clearAll() {
retryContexts.clear();
circuitBreakerStates.clear();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence/PersistentCircuitBreaker.java
|
package ai.driftkit.workflow.engine.persistence;
import ai.driftkit.workflow.engine.core.CircuitBreaker;
import lombok.extern.slf4j.Slf4j;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
/**
* Circuit breaker wrapper that adds persistence capabilities.
* Automatically saves and loads circuit breaker state to/from a RetryStateStore.
*/
@Slf4j
public class PersistentCircuitBreaker extends CircuitBreaker {
private final RetryStateStore stateStore;
private final String workflowId;
private final Set<String> loadedSteps = ConcurrentHashMap.newKeySet();
private final boolean asyncPersistence;
private final long persistenceTimeoutMs;
/**
* Creates a persistent circuit breaker with default configuration.
*
* @param stateStore The state store for persistence
* @param workflowId The workflow instance ID
*/
public PersistentCircuitBreaker(RetryStateStore stateStore, String workflowId) {
this(CircuitBreakerConfig.defaultConfig(), stateStore, workflowId, true, 5000);
}
/**
* Creates a persistent circuit breaker with custom configuration.
*
* @param config Circuit breaker configuration
* @param stateStore The state store for persistence
* @param workflowId The workflow instance ID
* @param asyncPersistence Whether to persist state asynchronously
* @param persistenceTimeoutMs Timeout for persistence operations
*/
public PersistentCircuitBreaker(CircuitBreakerConfig config,
RetryStateStore stateStore,
String workflowId,
boolean asyncPersistence,
long persistenceTimeoutMs) {
super(config);
this.stateStore = stateStore;
this.workflowId = workflowId;
this.asyncPersistence = asyncPersistence;
this.persistenceTimeoutMs = persistenceTimeoutMs;
}
@Override
public boolean allowExecution(String stepId) {
// Load state on first access
ensureStateLoaded(stepId);
boolean allowed = super.allowExecution(stepId);
// Persist state changes
persistState(stepId);
return allowed;
}
@Override
public void recordSuccess(String stepId) {
ensureStateLoaded(stepId);
super.recordSuccess(stepId);
persistState(stepId);
}
@Override
public void recordFailure(String stepId, Exception exception) {
ensureStateLoaded(stepId);
super.recordFailure(stepId, exception);
persistState(stepId);
}
@Override
public void reset(String stepId) {
super.reset(stepId);
loadedSteps.remove(stepId);
// Delete persisted state
CompletableFuture<Void> future = stateStore.deleteRetryContext(workflowId, stepId);
if (!asyncPersistence) {
try {
future.get(persistenceTimeoutMs, TimeUnit.MILLISECONDS);
} catch (Exception e) {
log.error("Failed to delete persisted state for step {}", stepId, e);
}
}
}
@Override
public void resetAll() {
super.resetAll();
loadedSteps.clear();
// Delete all persisted state for this workflow
CompletableFuture<Void> future = stateStore.deleteWorkflowState(workflowId);
if (!asyncPersistence) {
try {
future.get(persistenceTimeoutMs, TimeUnit.MILLISECONDS);
} catch (Exception e) {
log.error("Failed to delete all persisted state for workflow {}", workflowId, e);
}
}
}
private void ensureStateLoaded(String stepId) {
if (loadedSteps.contains(stepId)) {
return;
}
try {
stateStore.loadCircuitBreakerState(workflowId, stepId)
.get(persistenceTimeoutMs, TimeUnit.MILLISECONDS)
.ifPresent(state -> {
importState(stepId, state);
log.info("Loaded circuit breaker state for step {}: {}", stepId, state.state());
});
loadedSteps.add(stepId);
} catch (Exception e) {
log.error("Failed to load circuit breaker state for step {}", stepId, e);
// Continue without loaded state
loadedSteps.add(stepId);
}
}
private void persistState(String stepId) {
CircuitStateSnapshot snapshot = exportState(stepId);
if (snapshot == null) {
return;
}
CompletableFuture<Void> future = stateStore.saveCircuitBreakerState(workflowId, stepId, snapshot);
if (!asyncPersistence) {
try {
future.get(persistenceTimeoutMs, TimeUnit.MILLISECONDS);
log.debug("Persisted circuit breaker state for step {}: {}", stepId, snapshot.state());
} catch (Exception e) {
log.error("Failed to persist circuit breaker state for step {}", stepId, e);
}
} else {
future.thenRun(() ->
log.debug("Async persisted circuit breaker state for step {}: {}", stepId, snapshot.state())
).exceptionally(e -> {
log.error("Failed to async persist circuit breaker state for step {}", stepId, e);
return null;
});
}
}
/**
* Loads all persisted states for this workflow.
* Useful for pre-loading state on workflow recovery.
*
* @return Future that completes when loading is done
*/
public CompletableFuture<Void> loadAllStates() {
log.info("Loading all circuit breaker states for workflow {}", workflowId);
// In a full implementation, this would iterate through all persisted states
// For now, states are loaded on-demand
return CompletableFuture.completedFuture(null);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence/PersistentWorkflowEngineFactory.java
|
package ai.driftkit.workflow.engine.persistence;
import ai.driftkit.workflow.engine.core.*;
import ai.driftkit.workflow.engine.domain.WorkflowEngineConfig;
import lombok.Builder;
import lombok.Getter;
import java.util.ArrayList;
import java.util.List;
/**
* Factory for creating WorkflowEngine instances with persistence capabilities.
* This factory configures the engine with persistent retry executors and circuit breakers.
*/
public class PersistentWorkflowEngineFactory {
/**
* Configuration for persistent workflow engine.
*/
@Getter
@Builder
public static class PersistenceConfig {
@Builder.Default
private RetryStateStore stateStore = new InMemoryRetryStateStore();
@Builder.Default
private boolean asyncPersistence = true;
@Builder.Default
private long persistenceTimeoutMs = 5000;
@Builder.Default
private boolean enableCircuitBreakerPersistence = true;
@Builder.Default
private boolean enableRetryContextPersistence = true;
@Builder.Default
private CircuitBreaker.CircuitBreakerConfig circuitBreakerConfig =
CircuitBreaker.CircuitBreakerConfig.defaultConfig();
}
/**
* Creates a WorkflowEngineConfig with persistence enabled.
*
* @param workflowId The workflow instance ID
* @param persistenceConfig The persistence configuration
* @return The configured WorkflowEngineConfig
*/
public static WorkflowEngineConfig createPersistentConfig(String workflowId,
PersistenceConfig persistenceConfig) {
// Create circuit breaker
CircuitBreaker circuitBreaker;
if (persistenceConfig.enableCircuitBreakerPersistence) {
circuitBreaker = new PersistentCircuitBreaker(
persistenceConfig.circuitBreakerConfig,
persistenceConfig.stateStore,
workflowId,
persistenceConfig.asyncPersistence,
persistenceConfig.persistenceTimeoutMs
);
} else {
circuitBreaker = new CircuitBreaker(persistenceConfig.circuitBreakerConfig);
}
// Create retry listeners
List<RetryListener> listeners = new ArrayList<>();
if (persistenceConfig.enableRetryContextPersistence) {
listeners.add(new RetryStatePersistenceListener(
persistenceConfig.stateStore,
workflowId,
persistenceConfig.asyncPersistence,
persistenceConfig.persistenceTimeoutMs
));
}
// Create retry executor
RetryExecutor retryExecutor = new RetryExecutor(
new ConditionalRetryStrategy(),
circuitBreaker,
new RetryMetrics(),
listeners
);
return WorkflowEngineConfig.builder()
.retryExecutor(retryExecutor)
.build();
}
/**
* Creates a WorkflowEngine with persistence enabled using default settings.
*
* @param workflowId The workflow instance ID
* @return The configured WorkflowEngine
*/
public static WorkflowEngine createPersistentEngine(String workflowId) {
return createPersistentEngine(workflowId, PersistenceConfig.builder().build());
}
/**
* Creates a WorkflowEngine with persistence enabled.
*
* @param workflowId The workflow instance ID
* @param persistenceConfig The persistence configuration
* @return The configured WorkflowEngine
*/
public static WorkflowEngine createPersistentEngine(String workflowId,
PersistenceConfig persistenceConfig) {
WorkflowEngineConfig config = createPersistentConfig(workflowId, persistenceConfig);
return new WorkflowEngine(config);
}
/**
* Recovers retry state for a workflow from persistence.
*
* @param workflowId The workflow instance ID
* @param persistenceConfig The persistence configuration
* @return The RetryStatePersistenceListener that can be used to load persisted contexts
*/
public static RetryStatePersistenceListener recoverRetryState(String workflowId,
PersistenceConfig persistenceConfig) {
return new RetryStatePersistenceListener(
persistenceConfig.stateStore,
workflowId,
persistenceConfig.asyncPersistence,
persistenceConfig.persistenceTimeoutMs
);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence/RetryStatePersistenceListener.java
|
package ai.driftkit.workflow.engine.persistence;
import ai.driftkit.workflow.engine.annotations.RetryPolicy;
import ai.driftkit.workflow.engine.core.RetryListener;
import ai.driftkit.workflow.engine.domain.RetryContext;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
/**
* Retry listener that persists retry state to enable recovery after failures.
* This listener integrates with the RetryExecutor to save state at key points.
*/
@Slf4j
@RequiredArgsConstructor
public class RetryStatePersistenceListener implements RetryListener {
private final RetryStateStore stateStore;
private final String workflowId;
private final boolean asyncPersistence;
private final long persistenceTimeoutMs;
/**
* Creates a persistence listener with default settings.
*
* @param stateStore The state store for persistence
* @param workflowId The workflow instance ID
*/
public RetryStatePersistenceListener(RetryStateStore stateStore, String workflowId) {
this(stateStore, workflowId, true, 5000);
}
@Override
public void beforeRetry(String stepId, RetryContext retryContext, RetryPolicy retryPolicy) {
// Save retry context before each attempt
persistRetryContext(stepId, retryContext);
}
@Override
public void onRetrySuccess(String stepId, RetryContext retryContext, Object result) {
// Clean up persisted state on success
cleanupPersistedState(stepId);
}
@Override
public void onRetryFailure(String stepId, RetryContext retryContext, Exception exception, boolean willRetry) {
if (!willRetry) {
// Clean up if no more retries will be attempted
cleanupPersistedState(stepId);
} else {
// Update persisted context with failure info
persistRetryContext(stepId, retryContext);
}
}
@Override
public void onRetryExhausted(String stepId, RetryContext retryContext, Exception lastException) {
// Clean up persisted state when retries are exhausted
cleanupPersistedState(stepId);
}
@Override
public void onRetryAborted(String stepId, RetryContext retryContext, Exception exception) {
// Clean up persisted state when retry is aborted
cleanupPersistedState(stepId);
}
private void persistRetryContext(String stepId, RetryContext context) {
CompletableFuture<Void> future = stateStore.saveRetryContext(workflowId, stepId, context);
if (!asyncPersistence) {
try {
future.get(persistenceTimeoutMs, TimeUnit.MILLISECONDS);
log.debug("Persisted retry context for step {} at attempt {}",
stepId, context.getAttemptNumber());
} catch (Exception e) {
log.error("Failed to persist retry context for step {}", stepId, e);
// Continue execution even if persistence fails
}
} else {
future.thenRun(() ->
log.debug("Async persisted retry context for step {} at attempt {}",
stepId, context.getAttemptNumber())
).exceptionally(e -> {
log.error("Failed to async persist retry context for step {}", stepId, e);
return null;
});
}
}
private void cleanupPersistedState(String stepId) {
CompletableFuture<Void> future = stateStore.deleteRetryContext(workflowId, stepId);
if (!asyncPersistence) {
try {
future.get(persistenceTimeoutMs, TimeUnit.MILLISECONDS);
log.debug("Cleaned up persisted state for step {}", stepId);
} catch (Exception e) {
log.error("Failed to cleanup persisted state for step {}", stepId, e);
}
} else {
future.thenRun(() ->
log.debug("Async cleaned up persisted state for step {}", stepId)
).exceptionally(e -> {
log.error("Failed to async cleanup persisted state for step {}", stepId, e);
return null;
});
}
}
/**
* Loads persisted retry context for a step.
*
* @param stepId The step ID
* @return The persisted retry context, or null if none exists
*/
public RetryContext loadPersistedContext(String stepId) {
try {
return stateStore.loadRetryContext(workflowId, stepId)
.get(persistenceTimeoutMs, TimeUnit.MILLISECONDS)
.orElse(null);
} catch (Exception e) {
log.error("Failed to load persisted retry context for step {}", stepId, e);
return null;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence/RetryStateStore.java
|
package ai.driftkit.workflow.engine.persistence;
import ai.driftkit.workflow.engine.domain.RetryContext;
import ai.driftkit.workflow.engine.core.CircuitBreaker;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
/**
* Interface for persisting retry state to enable recovery after failures.
* Implementations can use various backends like databases, caches, or files.
*/
public interface RetryStateStore {
/**
* Saves retry context for a workflow step.
*
* @param workflowId The workflow instance ID
* @param stepId The step ID
* @param context The retry context to save
* @return Future that completes when save is done
*/
CompletableFuture<Void> saveRetryContext(String workflowId, String stepId, RetryContext context);
/**
* Loads retry context for a workflow step.
*
* @param workflowId The workflow instance ID
* @param stepId The step ID
* @return Optional containing the retry context if found
*/
CompletableFuture<Optional<RetryContext>> loadRetryContext(String workflowId, String stepId);
/**
* Deletes retry context for a workflow step.
*
* @param workflowId The workflow instance ID
* @param stepId The step ID
* @return Future that completes when delete is done
*/
CompletableFuture<Void> deleteRetryContext(String workflowId, String stepId);
/**
* Saves circuit breaker state for a workflow.
*
* @param workflowId The workflow instance ID
* @param stepId The step ID
* @param state The circuit breaker state
* @return Future that completes when save is done
*/
CompletableFuture<Void> saveCircuitBreakerState(String workflowId, String stepId,
CircuitBreaker.CircuitStateSnapshot state);
/**
* Loads circuit breaker state for a workflow.
*
* @param workflowId The workflow instance ID
* @param stepId The step ID
* @return Optional containing the circuit breaker state if found
*/
CompletableFuture<Optional<CircuitBreaker.CircuitStateSnapshot>> loadCircuitBreakerState(String workflowId,
String stepId);
/**
* Deletes all retry state for a workflow.
*
* @param workflowId The workflow instance ID
* @return Future that completes when delete is done
*/
CompletableFuture<Void> deleteWorkflowState(String workflowId);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence/SuspensionDataRepository.java
|
package ai.driftkit.workflow.engine.persistence;
import ai.driftkit.workflow.engine.domain.SuspensionData;
import java.util.Optional;
/**
* Repository for managing workflow suspension data separately from WorkflowInstance.
* This allows workflows to be suspended without polluting the main instance state.
*/
public interface SuspensionDataRepository {
/**
* Save suspension data for a workflow instance.
*
* @param instanceId The workflow instance ID
* @param suspensionData The suspension data to save
*/
void save(String instanceId, SuspensionData suspensionData);
/**
* Find suspension data by workflow instance ID.
*
* @param instanceId The workflow instance ID
* @return Optional containing the suspension data if found
*/
Optional<SuspensionData> findByInstanceId(String instanceId);
/**
* Find suspension data by message ID.
*
* @param messageId The message ID associated with the suspension
* @return Optional containing the suspension data if found
*/
Optional<SuspensionData> findByMessageId(String messageId);
/**
* Delete suspension data for a workflow instance.
*
* @param instanceId The workflow instance ID
*/
void deleteByInstanceId(String instanceId);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence/WorkflowContextRepository.java
|
package ai.driftkit.workflow.engine.persistence;
import ai.driftkit.workflow.engine.core.WorkflowContext;
import java.util.Optional;
/**
* Repository interface for persisting and retrieving workflow contexts.
* Implementations should handle the storage of workflow execution contexts
* in a thread-safe manner.
*/
public interface WorkflowContextRepository {
/**
* Saves a workflow context.
*
* @param context The workflow context to save
* @return The saved workflow context
*/
WorkflowContext save(WorkflowContext context);
/**
* Finds a workflow context by instance ID.
*
* @param instanceId The instance ID to search for
* @return Optional containing the context if found, empty otherwise
*/
Optional<WorkflowContext> findByInstanceId(String instanceId);
/**
* Deletes a workflow context by instance ID.
*
* @param instanceId The instance ID of the context to delete
* @return true if deleted, false if not found
*/
boolean deleteByInstanceId(String instanceId);
/**
* Checks if a context exists for the given instance ID.
*
* @param instanceId The instance ID to check
* @return true if exists, false otherwise
*/
boolean existsByInstanceId(String instanceId);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence/WorkflowInstance.java
|
package ai.driftkit.workflow.engine.persistence;
import ai.driftkit.workflow.engine.core.WorkflowContext;
import ai.driftkit.workflow.engine.graph.WorkflowGraph;
import ai.driftkit.workflow.engine.chat.ChatContextHelper;
import ai.driftkit.common.domain.chat.ChatRequest;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
/**
* Mutable representation of a running workflow instance.
* This class maintains the current state of execution, including context,
* status, and execution history.
*
* <p>While WorkflowGraph is the immutable blueprint, WorkflowInstance
* represents the runtime state that can be persisted and resumed.</p>
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class WorkflowInstance {
/**
* Unique identifier for this workflow instance (same as runId in context).
*/
private String instanceId;
/**
* Reference to the workflow definition.
*/
private String workflowId;
/**
* Version of the workflow being executed.
*/
private String workflowVersion;
/**
* Chat ID associated with this workflow instance (optional).
* Used for chat-based workflows to group multiple executions within the same conversation.
*/
private String chatId;
/**
* Current execution context containing step outputs and state.
*/
private WorkflowContext context;
/**
* Current status of the workflow execution.
*/
private WorkflowStatus status;
/**
* ID of the current step being executed or waiting.
*/
private String currentStepId;
/**
* ID of the next step to be executed (if determined).
*/
private String nextStepId;
/**
* When the workflow instance was created.
*/
private long createdAt;
/**
* When the workflow instance was last updated.
*/
private long updatedAt;
/**
* When the workflow instance was completed (if applicable).
*/
private long completedAt;
/**
* Execution history for debugging and tracing.
*/
@Builder.Default
private List<StepExecutionRecord> executionHistory = new ArrayList<>();
/**
* Metadata associated with this instance.
*/
@Builder.Default
private Map<String, Object> metadata = new ConcurrentHashMap<>();
/**
* Error information if the workflow failed.
*/
private ErrorInfo errorInfo;
/**
* Creates a new workflow instance for a fresh run.
*/
public static WorkflowInstance newInstance(WorkflowGraph<?, ?> graph, Object triggerData) {
String instanceId = UUID.randomUUID().toString();
return newInstance(graph, triggerData, instanceId);
}
/**
* Creates a new workflow instance with specific instance ID.
*/
public static WorkflowInstance newInstance(WorkflowGraph<?, ?> graph, Object triggerData, String instanceId) {
return newInstance(graph, triggerData, instanceId, null);
}
/**
* Creates a new workflow instance with specific instance ID and chat ID.
*/
public static WorkflowInstance newInstance(WorkflowGraph<?, ?> graph, Object triggerData, String instanceId, String chatId) {
WorkflowContext context = WorkflowContext.newRun(triggerData, instanceId);
// Auto-initialize context for ChatRequest
if (triggerData instanceof ChatRequest) {
ChatRequest chatRequest = (ChatRequest) triggerData;
ChatContextHelper.setChatId(context, chatRequest.getChatId());
ChatContextHelper.setUserId(context, chatRequest.getUserId());
}
long now = System.currentTimeMillis();
return WorkflowInstance.builder()
.instanceId(instanceId)
.workflowId(graph.id())
.workflowVersion(graph.version())
.chatId(chatId)
.context(context)
.status(WorkflowStatus.RUNNING)
.currentStepId(graph.initialStepId())
.createdAt(now)
.updatedAt(now)
.build();
}
/**
* Records the execution of a step.
*/
public void recordStepExecution(String stepId, Object input, Object output,
long durationMs, boolean success) {
StepExecutionRecord record = StepExecutionRecord.builder()
.stepId(stepId)
.input(input)
.output(output)
.executedAt(System.currentTimeMillis())
.durationMs(durationMs)
.success(success)
.build();
executionHistory.add(record);
updatedAt = System.currentTimeMillis();
}
/**
* Updates the workflow status.
*/
public void updateStatus(WorkflowStatus newStatus) {
this.status = newStatus;
this.updatedAt = System.currentTimeMillis();
if (newStatus == WorkflowStatus.COMPLETED || newStatus == WorkflowStatus.FAILED) {
this.completedAt = System.currentTimeMillis();
}
}
/**
* Suspends the workflow.
* Note: SuspensionData is now stored separately in SuspensionDataRepository
*/
public void suspend() {
this.status = WorkflowStatus.SUSPENDED;
this.updatedAt = System.currentTimeMillis();
}
/**
* Resumes the workflow from suspension.
*/
public void resume() {
if (status != WorkflowStatus.SUSPENDED) {
throw new IllegalStateException("Cannot resume workflow that is not suspended");
}
this.status = WorkflowStatus.RUNNING;
this.updatedAt = System.currentTimeMillis();
}
/**
* Marks the workflow as failed with error information.
*/
public void fail(Throwable error, String stepId) {
this.status = WorkflowStatus.FAILED;
this.errorInfo = new ErrorInfo(
error.getClass().getName(),
error.getMessage(),
stepId,
System.currentTimeMillis(),
getStackTrace(error)
);
this.updatedAt = System.currentTimeMillis();
this.completedAt = System.currentTimeMillis();
}
/**
* Updates the context with a new step output.
*/
public void updateContext(String stepId, Object output) {
this.context.setStepOutput(stepId, output);
this.updatedAt = System.currentTimeMillis();
}
/**
* Sets a custom value in the workflow context.
*/
public void setContextValue(String key, Object value) {
this.context.setContextValue(key, value);
this.updatedAt = System.currentTimeMillis();
}
/**
* Gets the total execution duration.
*/
public long getTotalDurationMs() {
if (createdAt == 0) {
return 0;
}
long endTime = completedAt != 0 ? completedAt : System.currentTimeMillis();
return endTime - createdAt;
}
/**
* Checks if the workflow is in a terminal state.
*/
public boolean isTerminal() {
return status == WorkflowStatus.COMPLETED ||
status == WorkflowStatus.FAILED ||
status == WorkflowStatus.CANCELLED;
}
/**
* Extracts stack trace from exception.
*/
private static String getStackTrace(Throwable error) {
if (error == null) {
return null;
}
StringBuilder sb = new StringBuilder();
sb.append(error).append("\n");
StackTraceElement[] stackTrace = error.getStackTrace();
int limit = Math.min(stackTrace.length, 10); // Limit stack trace depth
for (int i = 0; i < limit; i++) {
sb.append("\tat ").append(stackTrace[i]).append("\n");
}
if (stackTrace.length > limit) {
sb.append("\t... ").append(stackTrace.length - limit).append(" more\n");
}
if (error.getCause() != null) {
sb.append("Caused by: ").append(getStackTrace(error.getCause()));
}
return sb.toString();
}
/**
* Workflow execution status.
*/
public enum WorkflowStatus {
/**
* Workflow is actively executing.
*/
RUNNING,
/**
* Workflow is suspended waiting for external input.
*/
SUSPENDED,
/**
* Workflow completed successfully.
*/
COMPLETED,
/**
* Workflow failed with an error.
*/
FAILED,
/**
* Workflow was cancelled by user or system.
*/
CANCELLED
}
/**
* Record of a single step execution.
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class StepExecutionRecord {
private String stepId;
private Object input;
private Object output;
private long executedAt;
private long durationMs;
private boolean success;
private String errorMessage;
}
/**
* Information about workflow error.
*/
public record ErrorInfo(
String errorType,
String errorMessage,
String stepId,
long occurredAt,
String stackTrace
) {}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence/WorkflowStateRepository.java
|
package ai.driftkit.workflow.engine.persistence;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
/**
* Repository interface for persisting and retrieving workflow instance state.
* Implementations can provide various storage backends (in-memory, database, etc.).
*
* <p>This interface supports both synchronous and asynchronous operations
* to accommodate different storage technologies and performance requirements.</p>
*/
public interface WorkflowStateRepository {
/**
* Saves or updates a workflow instance.
*
* @param instance The workflow instance to save
* @throws PersistenceException if the save operation fails
*/
void save(WorkflowInstance instance);
/**
* Asynchronously saves or updates a workflow instance.
*
* @param instance The workflow instance to save
* @return A future that completes when the save is done
*/
default CompletableFuture<Void> saveAsync(WorkflowInstance instance) {
return CompletableFuture.runAsync(() -> save(instance));
}
/**
* Loads a workflow instance by its ID.
*
* @param instanceId The unique instance ID
* @return The workflow instance if found, empty otherwise
*/
Optional<WorkflowInstance> load(String instanceId);
/**
* Asynchronously loads a workflow instance by its ID.
*
* @param instanceId The unique instance ID
* @return A future containing the workflow instance if found
*/
default CompletableFuture<Optional<WorkflowInstance>> loadAsync(String instanceId) {
return CompletableFuture.supplyAsync(() -> load(instanceId));
}
/**
* Deletes a workflow instance.
*
* @param instanceId The instance ID to delete
* @return true if the instance was deleted, false if it didn't exist
*/
boolean delete(String instanceId);
/**
* Finds all workflow instances with the given status.
*
* @param status The workflow status to filter by
* @return List of matching workflow instances
*/
List<WorkflowInstance> findByStatus(WorkflowInstance.WorkflowStatus status);
/**
* Finds all workflow instances for a specific workflow definition.
*
* @param workflowId The workflow definition ID
* @return List of matching workflow instances
*/
List<WorkflowInstance> findByWorkflowId(String workflowId);
/**
* Finds workflow instances by workflow ID and status.
*
* @param workflowId The workflow definition ID
* @param status The workflow status
* @return List of matching workflow instances
*/
List<WorkflowInstance> findByWorkflowIdAndStatus(String workflowId,
WorkflowInstance.WorkflowStatus status);
/**
* Counts workflow instances by status.
*
* @param status The workflow status
* @return The count of instances with the given status
*/
long countByStatus(WorkflowInstance.WorkflowStatus status);
/**
* Deletes all completed workflow instances older than the specified age.
* Useful for cleanup operations.
*
* @param ageInDays Age threshold in days
* @return The number of instances deleted
*/
int deleteCompletedOlderThan(int ageInDays);
/**
* Checks if a workflow instance exists.
*
* @param instanceId The instance ID to check
* @return true if the instance exists, false otherwise
*/
default boolean exists(String instanceId) {
return load(instanceId).isPresent();
}
/**
* Finds the most recent workflow instance for a given chat ID.
* This is useful for chat-based workflows where multiple executions
* can occur within the same conversation.
*
* @param chatId The chat ID to search for
* @return The most recent workflow instance for the chat, if any
*/
Optional<WorkflowInstance> findLatestByChatId(String chatId);
/**
* Finds the most recent suspended workflow instance for a given chat ID.
* This is useful for resuming chat-based workflows.
*
* @param chatId The chat ID to search for
* @return The most recent suspended workflow instance for the chat, if any
*/
Optional<WorkflowInstance> findLatestSuspendedByChatId(String chatId);
/**
* Exception thrown when persistence operations fail.
*/
class PersistenceException extends RuntimeException {
public PersistenceException(String message) {
super(message);
}
public PersistenceException(String message, Throwable cause) {
super(message, cause);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence/inmemory/InMemoryAsyncStepStateRepository.java
|
package ai.driftkit.workflow.engine.persistence.inmemory;
import ai.driftkit.workflow.engine.domain.AsyncStepState;
import ai.driftkit.workflow.engine.persistence.AsyncStepStateRepository;
import lombok.extern.slf4j.Slf4j;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
/**
* In-memory implementation of AsyncStepStateRepository.
* Suitable for single-node deployments and testing.
*/
@Slf4j
public class InMemoryAsyncStepStateRepository implements AsyncStepStateRepository {
private final Map<String, AsyncStepState> storage = new ConcurrentHashMap<>();
@Override
public AsyncStepState save(AsyncStepState state) {
if (state == null || state.getMessageId() == null) {
throw new IllegalArgumentException("State and messageId cannot be null");
}
storage.put(state.getMessageId(), state);
log.debug("Saved async step state for messageId: {}", state.getMessageId());
return state;
}
@Override
public Optional<AsyncStepState> findByMessageId(String messageId) {
if (messageId == null) {
return Optional.empty();
}
return Optional.ofNullable(storage.get(messageId));
}
@Override
public void deleteByMessageId(String messageId) {
if (messageId != null) {
storage.remove(messageId);
log.debug("Deleted async step state for messageId: {}", messageId);
}
}
@Override
public boolean existsByMessageId(String messageId) {
return messageId != null && storage.containsKey(messageId);
}
@Override
public int deleteOlderThan(long timestampMillis) {
int deletedCount = 0;
for (Map.Entry<String, AsyncStepState> entry : storage.entrySet()) {
AsyncStepState state = entry.getValue();
if (state.getStartTime() < timestampMillis) {
storage.remove(entry.getKey());
deletedCount++;
}
}
log.debug("Deleted {} async step states older than {}", deletedCount, timestampMillis);
return deletedCount;
}
@Override
public boolean updateProgress(String messageId, int percentComplete, String statusMessage) {
if (messageId == null) {
return false;
}
AsyncStepState state = storage.get(messageId);
if (state == null) {
return false;
}
state.updateProgress(percentComplete, statusMessage);
log.debug("Updated progress for messageId: {} to {}% - {}", messageId, percentComplete, statusMessage);
return true;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence/inmemory/InMemoryChatSessionRepository.java
|
package ai.driftkit.workflow.engine.persistence.inmemory;
import ai.driftkit.workflow.engine.domain.ChatSession;
import ai.driftkit.workflow.engine.domain.PageRequest;
import ai.driftkit.workflow.engine.domain.PageResult;
import ai.driftkit.workflow.engine.persistence.ChatSessionRepository;
import lombok.extern.slf4j.Slf4j;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
/**
* In-memory implementation of ChatSessionRepository.
* Suitable for development and testing, not for production use.
*/
@Slf4j
public class InMemoryChatSessionRepository implements ChatSessionRepository {
private final Map<String, ChatSession> sessions = new ConcurrentHashMap<>();
@Override
public ChatSession save(ChatSession session) {
sessions.put(session.getChatId(), session);
log.debug("Saved chat session: {}", session.getChatId());
return session;
}
@Override
public Optional<ChatSession> findById(String chatId) {
return Optional.ofNullable(sessions.get(chatId));
}
@Override
public PageResult<ChatSession> findByUserId(String userId, PageRequest pageRequest) {
List<ChatSession> userSessions = sessions.values().stream()
.filter(session -> userId.equals(session.getUserId()))
.sorted(getSortComparator(pageRequest))
.collect(Collectors.toList());
return createPageResult(userSessions, pageRequest);
}
@Override
public PageResult<ChatSession> findActiveByUserId(String userId, PageRequest pageRequest) {
List<ChatSession> activeSessions = sessions.values().stream()
.filter(session -> userId.equals(session.getUserId()))
.filter(session -> !session.isArchived())
.sorted(getSortComparator(pageRequest))
.collect(Collectors.toList());
return createPageResult(activeSessions, pageRequest);
}
@Override
public void deleteById(String chatId) {
sessions.remove(chatId);
log.debug("Deleted chat session: {}", chatId);
}
@Override
public boolean existsById(String chatId) {
return sessions.containsKey(chatId);
}
private Comparator<ChatSession> getSortComparator(PageRequest pageRequest) {
Comparator<ChatSession> comparator = switch (pageRequest.getSortBy()) {
case "lastMessageTime" -> Comparator.comparing(ChatSession::getLastMessageTime);
case "createdAt" -> Comparator.comparing(ChatSession::getCreatedAt);
case "name" -> Comparator.comparing(ChatSession::getName, String.CASE_INSENSITIVE_ORDER);
default -> Comparator.comparing(ChatSession::getChatId);
};
return pageRequest.getSortDirection() == PageRequest.SortDirection.DESC
? comparator.reversed()
: comparator;
}
private PageResult<ChatSession> createPageResult(List<ChatSession> sessions, PageRequest pageRequest) {
int start = (int) pageRequest.getOffset();
int end = Math.min((start + pageRequest.getPageSize()), sessions.size());
if (start >= sessions.size()) {
return PageResult.empty(pageRequest.getPageNumber(), pageRequest.getPageSize());
}
List<ChatSession> pageContent = sessions.subList(start, end);
return PageResult.<ChatSession>builder()
.content(pageContent)
.pageNumber(pageRequest.getPageNumber())
.pageSize(pageRequest.getPageSize())
.totalElements(sessions.size())
.build();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence/inmemory/InMemorySuspensionDataRepository.java
|
package ai.driftkit.workflow.engine.persistence.inmemory;
import ai.driftkit.workflow.engine.domain.SuspensionData;
import ai.driftkit.workflow.engine.persistence.SuspensionDataRepository;
import lombok.extern.slf4j.Slf4j;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
/**
* In-memory implementation of SuspensionDataRepository.
*/
@Slf4j
public class InMemorySuspensionDataRepository implements SuspensionDataRepository {
private final Map<String, SuspensionData> suspensionDataByInstanceId = new ConcurrentHashMap<>();
private final Map<String, String> instanceIdByMessageId = new ConcurrentHashMap<>();
@Override
public void save(String instanceId, SuspensionData suspensionData) {
if (instanceId == null || suspensionData == null) {
throw new IllegalArgumentException("Instance ID and suspension data cannot be null");
}
suspensionDataByInstanceId.put(instanceId, suspensionData);
// Also index by message ID for quick lookup
if (suspensionData.messageId() != null) {
instanceIdByMessageId.put(suspensionData.messageId(), instanceId);
}
log.debug("Saved suspension data for instance: {}", instanceId);
}
@Override
public Optional<SuspensionData> findByInstanceId(String instanceId) {
if (instanceId == null) {
return Optional.empty();
}
return Optional.ofNullable(suspensionDataByInstanceId.get(instanceId));
}
@Override
public Optional<SuspensionData> findByMessageId(String messageId) {
if (messageId == null) {
return Optional.empty();
}
String instanceId = instanceIdByMessageId.get(messageId);
if (instanceId == null) {
return Optional.empty();
}
return findByInstanceId(instanceId);
}
/**
* Get instance ID by message ID for reverse lookup.
*/
public Optional<String> getInstanceIdByMessageId(String messageId) {
if (messageId == null) {
return Optional.empty();
}
return Optional.ofNullable(instanceIdByMessageId.get(messageId));
}
@Override
public void deleteByInstanceId(String instanceId) {
if (instanceId == null) {
return;
}
SuspensionData removed = suspensionDataByInstanceId.remove(instanceId);
if (removed != null && removed.messageId() != null) {
instanceIdByMessageId.remove(removed.messageId());
log.debug("Deleted suspension data for instance: {}", instanceId);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence/inmemory/InMemoryWorkflowContextRepository.java
|
package ai.driftkit.workflow.engine.persistence.inmemory;
import ai.driftkit.workflow.engine.core.WorkflowContext;
import ai.driftkit.workflow.engine.persistence.WorkflowContextRepository;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
/**
* In-memory implementation of WorkflowContextRepository.
* This implementation stores workflow contexts in memory using a ConcurrentHashMap.
* Suitable for single-instance deployments and testing.
*/
@Slf4j
public class InMemoryWorkflowContextRepository implements WorkflowContextRepository {
private final Map<String, WorkflowContext> contexts = new ConcurrentHashMap<>();
@Override
public WorkflowContext save(WorkflowContext context) {
if (context == null) {
throw new IllegalArgumentException("Context cannot be null");
}
String instanceId = context.getInstanceId();
if (StringUtils.isBlank(instanceId)) {
throw new IllegalArgumentException("Instance ID cannot be null or blank");
}
// Create a copy to prevent external modifications
WorkflowContext copy = WorkflowContext.fromExisting(
context.getRunId(),
context.getTriggerData(),
context.getStepOutputs(),
context.getCustomData(),
instanceId
);
contexts.put(instanceId, copy);
log.debug("Saved workflow context for instance: {}", instanceId);
return copy;
}
@Override
public Optional<WorkflowContext> findByInstanceId(String instanceId) {
if (StringUtils.isBlank(instanceId)) {
return Optional.empty();
}
WorkflowContext context = contexts.get(instanceId);
if (context == null) {
return Optional.empty();
}
// Return a copy to prevent external modifications
WorkflowContext copy = WorkflowContext.fromExisting(
context.getRunId(),
context.getTriggerData(),
context.getStepOutputs(),
context.getCustomData(),
context.getInstanceId()
);
return Optional.of(copy);
}
@Override
public boolean deleteByInstanceId(String instanceId) {
if (StringUtils.isBlank(instanceId)) {
return false;
}
WorkflowContext removed = contexts.remove(instanceId);
if (removed != null) {
log.debug("Deleted workflow context for instance: {}", instanceId);
return true;
}
return false;
}
@Override
public boolean existsByInstanceId(String instanceId) {
if (StringUtils.isBlank(instanceId)) {
return false;
}
return contexts.containsKey(instanceId);
}
/**
* Gets the current number of stored contexts.
* Useful for monitoring and testing.
*
* @return The number of contexts in memory
*/
public int size() {
return contexts.size();
}
/**
* Clears all stored contexts.
* Useful for testing.
*/
public void clear() {
contexts.clear();
log.debug("Cleared all workflow contexts");
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/persistence/inmemory/InMemoryWorkflowStateRepository.java
|
package ai.driftkit.workflow.engine.persistence.inmemory;
import ai.driftkit.workflow.engine.persistence.WorkflowInstance;
import ai.driftkit.workflow.engine.persistence.WorkflowStateRepository;
import lombok.extern.slf4j.Slf4j;
import java.util.concurrent.TimeUnit;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.stream.Collectors;
/**
* In-memory implementation of WorkflowStateRepository.
* Suitable for testing and development, or for production use cases
* where persistence across restarts is not required.
*
* <p>This implementation is thread-safe and uses concurrent data structures
* with read-write locks for optimal performance.</p>
*/
@Slf4j
public class InMemoryWorkflowStateRepository implements WorkflowStateRepository {
private final Map<String, WorkflowInstance> instances = new ConcurrentHashMap<>();
private final ReadWriteLock lock = new ReentrantReadWriteLock();
/**
* Maximum number of instances to keep in memory.
* Older completed instances will be evicted when this limit is reached.
*/
private final int maxInstances;
/**
* Creates a repository with default settings (max 10,000 instances).
*/
public InMemoryWorkflowStateRepository() {
this(10_000);
}
/**
* Creates a repository with a specified maximum capacity.
*
* @param maxInstances Maximum number of instances to keep in memory
*/
public InMemoryWorkflowStateRepository(int maxInstances) {
if (maxInstances <= 0) {
throw new IllegalArgumentException("Max instances must be positive");
}
this.maxInstances = maxInstances;
log.info("Created in-memory workflow repository with max capacity: {}", maxInstances);
}
@Override
public void save(WorkflowInstance instance) {
if (instance == null) {
throw new IllegalArgumentException("Instance cannot be null");
}
if (instance.getInstanceId() == null || instance.getInstanceId().isBlank()) {
throw new IllegalArgumentException("Instance ID cannot be null or blank");
}
lock.writeLock().lock();
try {
// Check capacity and evict if necessary
if (instances.size() >= maxInstances && !instances.containsKey(instance.getInstanceId())) {
evictOldestCompleted();
}
// Clone the instance to prevent external modifications
WorkflowInstance cloned = cloneInstance(instance);
instances.put(instance.getInstanceId(), cloned);
log.debug("Saved workflow instance: {} (status: {})",
instance.getInstanceId(), instance.getStatus());
} finally {
lock.writeLock().unlock();
}
}
@Override
public Optional<WorkflowInstance> load(String instanceId) {
if (instanceId == null || instanceId.isBlank()) {
return Optional.empty();
}
lock.readLock().lock();
try {
WorkflowInstance instance = instances.get(instanceId);
// Return a clone to prevent external modifications
return Optional.ofNullable(instance).map(this::cloneInstance);
} finally {
lock.readLock().unlock();
}
}
@Override
public boolean delete(String instanceId) {
if (instanceId == null || instanceId.isBlank()) {
return false;
}
lock.writeLock().lock();
try {
WorkflowInstance removed = instances.remove(instanceId);
if (removed != null) {
log.debug("Deleted workflow instance: {}", instanceId);
return true;
}
return false;
} finally {
lock.writeLock().unlock();
}
}
@Override
public List<WorkflowInstance> findByStatus(WorkflowInstance.WorkflowStatus status) {
if (status == null) {
throw new IllegalArgumentException("Status cannot be null");
}
lock.readLock().lock();
try {
return instances.values().stream()
.filter(instance -> instance.getStatus() == status)
.map(this::cloneInstance)
.sorted(Comparator.comparing(WorkflowInstance::getCreatedAt).reversed())
.collect(Collectors.toList());
} finally {
lock.readLock().unlock();
}
}
@Override
public List<WorkflowInstance> findByWorkflowId(String workflowId) {
if (workflowId == null || workflowId.isBlank()) {
return Collections.emptyList();
}
lock.readLock().lock();
try {
return instances.values().stream()
.filter(instance -> workflowId.equals(instance.getWorkflowId()))
.map(this::cloneInstance)
.sorted(Comparator.comparing(WorkflowInstance::getCreatedAt).reversed())
.collect(Collectors.toList());
} finally {
lock.readLock().unlock();
}
}
@Override
public List<WorkflowInstance> findByWorkflowIdAndStatus(String workflowId,
WorkflowInstance.WorkflowStatus status) {
if (workflowId == null || workflowId.isBlank() || status == null) {
return Collections.emptyList();
}
lock.readLock().lock();
try {
return instances.values().stream()
.filter(instance -> workflowId.equals(instance.getWorkflowId()) &&
instance.getStatus() == status)
.map(this::cloneInstance)
.sorted(Comparator.comparing(WorkflowInstance::getCreatedAt).reversed())
.collect(Collectors.toList());
} finally {
lock.readLock().unlock();
}
}
@Override
public long countByStatus(WorkflowInstance.WorkflowStatus status) {
if (status == null) {
return 0;
}
lock.readLock().lock();
try {
return instances.values().stream()
.filter(instance -> instance.getStatus() == status)
.count();
} finally {
lock.readLock().unlock();
}
}
@Override
public int deleteCompletedOlderThan(int ageInDays) {
if (ageInDays < 0) {
throw new IllegalArgumentException("Age in days must be non-negative");
}
long cutoffTime = System.currentTimeMillis() - TimeUnit.DAYS.toMillis(ageInDays);
lock.writeLock().lock();
try {
List<String> toDelete = instances.entrySet().stream()
.filter(entry -> {
WorkflowInstance instance = entry.getValue();
return instance.isTerminal() &&
instance.getCompletedAt() != 0 &&
instance.getCompletedAt() < cutoffTime;
})
.map(Map.Entry::getKey)
.collect(Collectors.toList());
toDelete.forEach(instances::remove);
if (!toDelete.isEmpty()) {
log.info("Deleted {} completed workflow instances older than {} days",
toDelete.size(), ageInDays);
}
return toDelete.size();
} finally {
lock.writeLock().unlock();
}
}
/**
* Gets the current number of stored instances.
*
* @return The number of instances in memory
*/
public int size() {
lock.readLock().lock();
try {
return instances.size();
} finally {
lock.readLock().unlock();
}
}
/**
* Clears all instances from memory.
*/
public void clear() {
lock.writeLock().lock();
try {
int count = instances.size();
instances.clear();
log.info("Cleared {} workflow instances from memory", count);
} finally {
lock.writeLock().unlock();
}
}
/**
* Evicts the oldest completed instance to make room for new ones.
* Called internally when capacity is reached.
*/
private void evictOldestCompleted() {
Optional<Map.Entry<String, WorkflowInstance>> oldest = instances.entrySet().stream()
.filter(entry -> entry.getValue().isTerminal())
.min(Comparator.comparing(entry -> entry.getValue().getCompletedAt()));
if (oldest.isPresent()) {
instances.remove(oldest.get().getKey());
log.debug("Evicted oldest completed instance: {} to maintain capacity",
oldest.get().getKey());
} else {
// No completed instances to evict, remove oldest running/suspended
Optional<Map.Entry<String, WorkflowInstance>> oldestAny = instances.entrySet().stream()
.min(Comparator.comparing(entry -> entry.getValue().getCreatedAt()));
oldestAny.ifPresent(entry -> {
instances.remove(entry.getKey());
log.warn("Evicted non-completed instance: {} to maintain capacity", entry.getKey());
});
}
}
/**
* Creates a deep clone of a workflow instance to prevent external modifications.
*
* @param instance The instance to clone
* @return A deep copy of the instance
*/
private WorkflowInstance cloneInstance(WorkflowInstance instance) {
// For a production implementation, consider using a proper deep cloning library
// or implement proper clone methods on all nested objects
return WorkflowInstance.builder()
.instanceId(instance.getInstanceId())
.workflowId(instance.getWorkflowId())
.workflowVersion(instance.getWorkflowVersion())
.chatId(instance.getChatId())
.context(instance.getContext()) // Note: WorkflowContext is immutable
.status(instance.getStatus())
.currentStepId(instance.getCurrentStepId())
.nextStepId(instance.getNextStepId())
.createdAt(instance.getCreatedAt())
.updatedAt(instance.getUpdatedAt())
.completedAt(instance.getCompletedAt())
.executionHistory(new ArrayList<>(instance.getExecutionHistory()))
.metadata(new ConcurrentHashMap<>(instance.getMetadata()))
.errorInfo(instance.getErrorInfo())
.build();
}
@Override
public Optional<WorkflowInstance> findLatestByChatId(String chatId) {
if (chatId == null) {
return Optional.empty();
}
lock.readLock().lock();
try {
return instances.values().stream()
.filter(instance -> chatId.equals(instance.getChatId()))
.max(Comparator.comparing(WorkflowInstance::getCreatedAt))
.map(this::cloneInstance);
} finally {
lock.readLock().unlock();
}
}
@Override
public Optional<WorkflowInstance> findLatestSuspendedByChatId(String chatId) {
if (chatId == null) {
return Optional.empty();
}
lock.readLock().lock();
try {
return instances.values().stream()
.filter(instance -> chatId.equals(instance.getChatId()))
.filter(instance -> instance.getStatus() == WorkflowInstance.WorkflowStatus.SUSPENDED)
.max(Comparator.comparing(WorkflowInstance::getCreatedAt))
.map(this::cloneInstance);
} finally {
lock.readLock().unlock();
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/schema/AIFunctionSchema.java
|
package ai.driftkit.workflow.engine.schema;
import ai.driftkit.common.domain.chat.ChatMessage.PropertyType;
import ai.driftkit.workflow.engine.utils.ReflectionUtils;
import com.fasterxml.jackson.annotation.JsonGetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import java.io.Serializable;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class AIFunctionSchema implements Serializable {
private static final Map<Class<?>, AIFunctionSchema> schemaCache = new ConcurrentHashMap<>();
private static final ThreadLocal<Set<Class<?>>> processingClasses = ThreadLocal.withInitial(HashSet::new);
private static final ObjectMapper MAPPER = new ObjectMapper();
String schemaName;
String description;
List<AIFunctionProperty> properties;
boolean isArray;
boolean composable;
boolean system;
@JsonIgnore
Class<?> targetClass;
public AIFunctionSchema(List<AIFunctionProperty> properties) {
this.properties = properties;
}
public AIFunctionSchema(String schemaName, List<AIFunctionProperty> properties) {
this(properties);
this.schemaName = schemaName;
}
public static AIFunctionSchema fromClass(Class<?> clazz) {
return fromClass(clazz, SchemaGenerationStrategy.RECURSIVE);
}
public static AIFunctionSchema fromClass(Class<?> clazz, SchemaGenerationStrategy strategy) {
if (strategy == SchemaGenerationStrategy.JACKSON) {
return fromClassUsingJackson(clazz);
} else {
return fromClassRecursive(clazz, null);
}
}
private static AIFunctionSchema fromClassUsingJackson(Class<?> clazz) {
if (schemaCache.containsKey(clazz)) {
return schemaCache.get(clazz);
}
try {
AIFunctionSchema schema = fromClassRecursive(clazz, null);
schemaCache.put(clazz, schema);
return schema;
} catch (Exception e) {
throw new RuntimeException("Failed to generate schema for class: " + clazz.getName(), e);
}
}
private static AIFunctionSchema fromClassRecursive(Class<?> clazz, Type genericType) {
if (schemaCache.containsKey(clazz)) {
return schemaCache.get(clazz);
}
Set<Class<?>> currentlyProcessing = processingClasses.get();
if (currentlyProcessing.contains(clazz)) {
return new AIFunctionSchema(clazz.getSimpleName(), new ArrayList<>());
}
currentlyProcessing.add(clazz);
try {
List<AIFunctionProperty> properties = new ArrayList<>();
String schemaName = clazz.getSimpleName();
SchemaName schemaNameAnnotation = clazz.getAnnotation(SchemaName.class);
if (schemaNameAnnotation != null && StringUtils.isNotBlank(schemaNameAnnotation.value())) {
schemaName = schemaNameAnnotation.value();
}
String description = null;
SchemaDescription descriptionAnnotation = clazz.getAnnotation(SchemaDescription.class);
if (descriptionAnnotation != null && StringUtils.isNotBlank(descriptionAnnotation.value())) {
description = descriptionAnnotation.value();
}
// Get all accessible fields from the class and its superclasses
List<Field> accessibleFields = ReflectionUtils.getAccessibleFields(clazz, true);
for (Field field : accessibleFields) {
// Skip ignored fields
if (field.isAnnotationPresent(JsonIgnore.class) ||
field.isAnnotationPresent(SchemaIgnore.class)) {
continue;
}
AIFunctionProperty property = createPropertyFromField(field);
if (property != null) {
properties.add(property);
}
}
AIFunctionSchema schema = new AIFunctionSchema(schemaName, properties);
schema.setTargetClass(clazz);
if (description != null) {
schema.setDescription(description);
}
SchemaArray schemaArrayAnnotation = clazz.getAnnotation(SchemaArray.class);
if (schemaArrayAnnotation != null) {
schema.setArray(true);
}
// Check for @SchemaSystem annotation
SchemaSystem schemaSystemAnnotation = clazz.getAnnotation(SchemaSystem.class);
if (schemaSystemAnnotation != null) {
schema.setSystem(schemaSystemAnnotation.value());
}
schemaCache.put(clazz, schema);
return schema;
} finally {
currentlyProcessing.remove(clazz);
}
}
private static AIFunctionProperty createPropertyFromField(Field field) {
String name = field.getName();
Class<?> type = field.getType();
Type genericType = field.getGenericType();
SchemaProperty propertyAnnotation = field.getAnnotation(SchemaProperty.class);
AIFunctionProperty.AIFunctionPropertyBuilder builder = AIFunctionProperty.builder()
.name(name)
.nameId(name);
if (propertyAnnotation != null) {
if (StringUtils.isNotBlank(propertyAnnotation.nameId())) {
builder.nameId(propertyAnnotation.nameId());
}
if (StringUtils.isNotBlank(propertyAnnotation.dataNameId())) {
builder.dataNameId(propertyAnnotation.dataNameId());
}
if (StringUtils.isNotBlank(propertyAnnotation.description())) {
builder.description(propertyAnnotation.description());
} else {
builder.description("Property " + name);
}
if (StringUtils.isNotBlank(propertyAnnotation.defaultValue())) {
builder.defaultValue(propertyAnnotation.defaultValue());
}
if (propertyAnnotation.minValue() != Integer.MIN_VALUE) {
builder.minValue(propertyAnnotation.minValue());
}
if (propertyAnnotation.maxValue() != Integer.MAX_VALUE) {
builder.maxValue(propertyAnnotation.maxValue());
}
if (propertyAnnotation.minLength() > 0) {
builder.minLength(propertyAnnotation.minLength());
}
if (propertyAnnotation.maxLength() > 0) {
builder.maxLength(propertyAnnotation.maxLength());
}
builder.isRequired(propertyAnnotation.required());
builder.isMultiSelect(propertyAnnotation.multiSelect());
builder.isArray(propertyAnnotation.array());
builder.valueAsNameId(propertyAnnotation.valueAsNameId());
if (propertyAnnotation.values() != null && propertyAnnotation.values().length > 0) {
builder.type(PropertyType.ENUM);
builder.values(List.of(propertyAnnotation.values()));
} else if (propertyAnnotation.type() != Void.class) {
if (propertyAnnotation.type().isEnum()) {
setEnumValues(propertyAnnotation.type(), builder);
}
}
} else {
builder.description("Property " + name);
}
if (type.isArray()) {
builder.isArray(true);
Class<?> componentType = type.getComponentType();
if (isSimpleType(componentType)) {
builder.type(mapClassToPropertyType(componentType));
} else {
builder.type(PropertyType.OBJECT);
builder.nestedSchema(fromClassRecursive(componentType, null));
}
}
if (type.isEnum() && CollectionUtils.isEmpty(builder.values)) {
setEnumValues(type, builder);
}
if (genericType.getClass().isEnum()) {
setEnumValues(genericType.getClass(), builder);
}
else if (Collection.class.isAssignableFrom(type)) {
builder.isArray(true);
if (genericType instanceof ParameterizedType paramType) {
Type actualTypeArg = paramType.getActualTypeArguments()[0];
if (actualTypeArg instanceof Class<?> genericClass) {
if (isSimpleType(genericClass)) {
builder.type(mapClassToPropertyType(genericClass));
} else {
builder.type(PropertyType.OBJECT);
builder.nestedSchema(fromClassRecursive(genericClass, null));
}
} else if (actualTypeArg instanceof ParameterizedType nestedParamType) {
Class<?> rawType = (Class<?>) nestedParamType.getRawType();
if (Collection.class.isAssignableFrom(rawType)) {
builder.type(PropertyType.ARRAY);
Type nestedTypeArg = nestedParamType.getActualTypeArguments()[0];
if (nestedTypeArg instanceof Class<?> nestedClass) {
AIFunctionSchema nestedSchema = new AIFunctionSchema();
nestedSchema.setArray(true);
if (isSimpleType(nestedClass)) {
nestedSchema.setSchemaName(nestedClass.getSimpleName() + "Array");
AIFunctionProperty itemProp = AIFunctionProperty.builder()
.name("item")
.type(mapClassToPropertyType(nestedClass))
.build();
nestedSchema.setProperties(List.of(itemProp));
} else {
nestedSchema.setSchemaName(nestedClass.getSimpleName() + "Array");
nestedSchema.setProperties(fromClassRecursive(nestedClass, null).getProperties());
}
builder.nestedSchema(nestedSchema);
}
} else {
builder.type(PropertyType.OBJECT);
builder.nestedSchema(new AIFunctionSchema(rawType.getSimpleName(), new ArrayList<>()));
}
}
} else {
builder.type(PropertyType.OBJECT);
}
}
else if (Map.class.isAssignableFrom(type)) {
builder.type(PropertyType.MAP);
if (genericType instanceof ParameterizedType paramType) {
Type keyType = paramType.getActualTypeArguments()[0];
Type valueType = paramType.getActualTypeArguments()[1];
if (valueType instanceof Class<?> valueClass && !isSimpleType(valueClass)) {
AIFunctionSchema valueSchema = fromClassRecursive(valueClass, null);
builder.nestedSchema(valueSchema);
}
Map<String, Object> additionalProps = new HashMap<>();
additionalProps.put("keyType", keyType.getTypeName());
additionalProps.put("valueType", valueType.getTypeName());
builder.additionalProperties(additionalProps);
}
}
else if (isSimpleType(type)) {
if (builder.type == null) {
builder.type(mapClassToPropertyType(type));
if (type.isEnum()) {
List<String> enumValues = new ArrayList<>();
for (Object enumConstant : type.getEnumConstants()) {
enumValues.add(enumConstant.toString());
}
builder.values(enumValues);
SchemaEnumValues enumValuesAnnotation = field.getAnnotation(SchemaEnumValues.class);
if (enumValuesAnnotation != null && enumValuesAnnotation.value().length > 0) {
builder.values(Arrays.asList(enumValuesAnnotation.value()));
}
}
}
}
else {
builder.type(PropertyType.OBJECT);
builder.nestedSchema(fromClassRecursive(type, genericType));
}
return builder.build();
}
private static void setEnumValues(Class<?> type, AIFunctionProperty.AIFunctionPropertyBuilder builder) {
builder.type(PropertyType.ENUM);
Object[] enumConstants = type.getEnumConstants();
List<String> enumValues = new ArrayList<>();
for (Object constant : enumConstants) {
enumValues.add(constant.toString());
}
builder.values(enumValues);
}
private static boolean isSimpleType(Class<?> type) {
return type.isPrimitive() ||
type.equals(String.class) ||
type.equals(Integer.class) ||
type.equals(Double.class) ||
type.equals(Float.class) ||
type.equals(Boolean.class) ||
type.equals(Long.class) ||
type.equals(Date.class) ||
type.equals(UUID.class) ||
type.isEnum();
}
private static PropertyType mapClassToPropertyType(Class<?> clazz) {
if (clazz.equals(String.class) || clazz.equals(UUID.class) || clazz.equals(Date.class)) {
return PropertyType.STRING;
} else if (clazz.equals(Integer.class) || clazz.equals(int.class) ||
clazz.equals(Long.class) || clazz.equals(long.class) ||
clazz.equals(Short.class) || clazz.equals(short.class) ||
clazz.equals(Byte.class) || clazz.equals(byte.class)) {
return PropertyType.INTEGER;
} else if (clazz.equals(Double.class) || clazz.equals(double.class) ||
clazz.equals(Float.class) || clazz.equals(float.class)) {
return PropertyType.DOUBLE;
} else if (clazz.equals(Boolean.class) || clazz.equals(boolean.class)) {
return PropertyType.BOOLEAN;
} else if (clazz.isEnum()) {
return PropertyType.ENUM;
} else if (Collection.class.isAssignableFrom(clazz)) {
return PropertyType.ARRAY;
} else if (Map.class.isAssignableFrom(clazz)) {
return PropertyType.MAP;
} else {
return PropertyType.OBJECT;
}
}
public static void clearCache() {
schemaCache.clear();
}
public enum SchemaGenerationStrategy {
RECURSIVE,
JACKSON
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class AIFunctionProperty implements Serializable {
public static AIFunctionPropertyBuilder builder() {
return new AIFunctionPropertyBuilder();
}
public static class AIFunctionPropertyBuilder {
private String name;
private String nameId;
private List<String> nameIds;
private String description;
private PropertyType type;
private String dataNameId;
private String defaultValue;
private Integer maxValue;
private Integer minValue;
private boolean isRequired;
private List<String> values;
private Integer minLength;
private Integer maxLength;
private boolean isArray;
private boolean valueAsNameId;
private boolean isMultiSelect;
private AIFunctionSchema nestedSchema;
private Map<String, Object> additionalProperties;
public AIFunctionPropertyBuilder name(String name) {
this.name = name;
return this;
}
public AIFunctionPropertyBuilder nameId(String nameId) {
this.nameId = nameId;
return this;
}
public AIFunctionPropertyBuilder nameIds(List<String> nameIds) {
this.nameIds = nameIds;
return this;
}
public AIFunctionPropertyBuilder description(String description) {
this.description = description;
return this;
}
public AIFunctionPropertyBuilder type(PropertyType type) {
this.type = type;
return this;
}
public AIFunctionPropertyBuilder dataNameId(String dataNameId) {
this.dataNameId = dataNameId;
return this;
}
public AIFunctionPropertyBuilder defaultValue(String defaultValue) {
this.defaultValue = defaultValue;
return this;
}
public AIFunctionPropertyBuilder maxValue(Integer maxValue) {
this.maxValue = maxValue;
return this;
}
public AIFunctionPropertyBuilder minValue(Integer minValue) {
this.minValue = minValue;
return this;
}
public AIFunctionPropertyBuilder isRequired(boolean isRequired) {
this.isRequired = isRequired;
return this;
}
public AIFunctionPropertyBuilder values(List<String> values) {
this.values = values;
return this;
}
public AIFunctionPropertyBuilder minLength(Integer minLength) {
this.minLength = minLength;
return this;
}
public AIFunctionPropertyBuilder maxLength(Integer maxLength) {
this.maxLength = maxLength;
return this;
}
public AIFunctionPropertyBuilder isArray(boolean isArray) {
this.isArray = isArray;
return this;
}
public AIFunctionPropertyBuilder valueAsNameId(boolean valueAsNameId) {
this.valueAsNameId = valueAsNameId;
return this;
}
public AIFunctionPropertyBuilder isMultiSelect(boolean isMultiSelect) {
this.isMultiSelect = isMultiSelect;
return this;
}
public AIFunctionPropertyBuilder nestedSchema(AIFunctionSchema nestedSchema) {
this.nestedSchema = nestedSchema;
return this;
}
public AIFunctionPropertyBuilder additionalProperties(Map<String, Object> additionalProperties) {
this.additionalProperties = additionalProperties;
return this;
}
public AIFunctionProperty build() {
AIFunctionProperty property = new AIFunctionProperty();
property.name = this.name;
property.nameId = this.nameId;
property.nameIds = this.nameIds;
property.description = this.description;
property.type = this.type;
property.dataNameId = this.dataNameId;
property.defaultValue = this.defaultValue;
property.maxValue = this.maxValue;
property.minValue = this.minValue;
property.isRequired = this.isRequired;
property.values = this.values;
property.minLength = this.minLength;
property.maxLength = this.maxLength;
property.isArray = this.isArray;
property.valueAsNameId = this.valueAsNameId;
property.isMultiSelect = this.isMultiSelect;
property.nestedSchema = this.nestedSchema;
property.additionalProperties = this.additionalProperties;
return property;
}
}
private String name;
private String nameId;
private List<String> nameIds;
private String description;
private PropertyType type;
private String dataNameId;
private String defaultValue;
private Integer maxValue;
private Integer minValue;
private boolean isRequired;
private List<String> values;
private Integer minLength;
private Integer maxLength;
private boolean isArray;
private boolean valueAsNameId;
private boolean isMultiSelect;
private AIFunctionSchema nestedSchema;
private Map<String, Object> additionalProperties;
@JsonGetter
public PropertyType getType() {
if (CollectionUtils.isNotEmpty(values)) {
return PropertyType.ENUM;
}
return type;
}
public void addAdditionalProperty(String key, Object value) {
if (additionalProperties == null) {
additionalProperties = new HashMap<>();
}
additionalProperties.put(key, value);
}
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface SchemaArray {
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface SchemaIgnore {
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface SchemaProperty {
String nameId() default "";
String description() default "";
String defaultValue() default "";
String dataNameId() default "";
int minValue() default Integer.MIN_VALUE;
int maxValue() default Integer.MAX_VALUE;
int minLength() default 0;
int maxLength() default 0;
boolean required() default false;
boolean multiSelect() default false;
boolean array() default false;
boolean valueAsNameId() default false;
Class<?> type() default Void.class;
String[] values() default {};
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface SchemaEnumValues {
String[] value();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/schema/InMemorySchemaRegistry.java
|
package ai.driftkit.workflow.engine.schema;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import lombok.extern.slf4j.Slf4j;
/**
* In-memory implementation of SchemaRegistry using concurrent data structures.
*/
@Slf4j
public class InMemorySchemaRegistry implements SchemaRegistry {
private final Map<String, Class<?>> nameToClass = new ConcurrentHashMap<>();
private final Map<Class<?>, String> classToName = new ConcurrentHashMap<>();
@Override
public void registerSchema(String schemaName, Class<?> schemaClass) {
if (schemaName == null || schemaClass == null) {
throw new IllegalArgumentException("Schema name and class cannot be null");
}
// Check for conflicts
Class<?> existingClass = nameToClass.get(schemaName);
if (existingClass != null && !existingClass.equals(schemaClass)) {
log.warn("Overwriting schema registration for name '{}': {} -> {}",
schemaName, existingClass.getName(), schemaClass.getName());
}
String existingName = classToName.get(schemaClass);
if (existingName != null && !existingName.equals(schemaName)) {
log.warn("Overwriting schema registration for class {}: '{}' -> '{}'",
schemaClass.getName(), existingName, schemaName);
// Remove old mapping
nameToClass.remove(existingName);
}
nameToClass.put(schemaName, schemaClass);
classToName.put(schemaClass, schemaName);
log.debug("Registered schema '{}' -> {}", schemaName, schemaClass.getName());
}
@Override
public Optional<Class<?>> getSchemaClass(String schemaName) {
return Optional.ofNullable(nameToClass.get(schemaName));
}
@Override
public Optional<String> getSchemaName(Class<?> schemaClass) {
return Optional.ofNullable(classToName.get(schemaClass));
}
@Override
public void clear() {
nameToClass.clear();
classToName.clear();
log.debug("Cleared all schema registrations");
}
/**
* Get the number of registered schemas.
*
* @return The number of registered schemas
*/
public int size() {
return nameToClass.size();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/schema/SchemaDescription.java
|
package ai.driftkit.workflow.engine.schema;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation to provide a description for a class or field in the AI function schema.
* This description helps AI models understand the purpose and expected content.
*/
@Target({ElementType.TYPE, ElementType.FIELD})
@Retention(RetentionPolicy.RUNTIME)
public @interface SchemaDescription {
/**
* The description text for the schema element.
*
* @return The description
*/
String value();
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/schema/SchemaName.java
|
package ai.driftkit.workflow.engine.schema;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation to specify a custom name for a class or field in the AI function schema.
* This name will be used instead of the default class/field name in the generated schema.
*/
@Target({ElementType.TYPE, ElementType.FIELD})
@Retention(RetentionPolicy.RUNTIME)
public @interface SchemaName {
/**
* The custom name to use in the schema.
*
* @return The schema name
*/
String value();
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/schema/SchemaProperty.java
|
package ai.driftkit.workflow.engine.schema;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation to provide detailed metadata for a property in the AI function schema.
* This helps AI models understand the constraints and expectations for each field.
*/
@Target(ElementType.FIELD)
@Retention(RetentionPolicy.RUNTIME)
public @interface SchemaProperty {
/**
* Whether this property is required.
*
* @return true if the property is required
*/
boolean required() default false;
/**
* Description of the property for AI understanding.
*
* @return The property description
*/
String description() default "";
/**
* Optional name ID for localization or reference.
*
* @return The name ID
*/
String nameId() default "";
/**
* Allowed values for this property (for enums or constrained fields).
*
* @return Array of allowed values
*/
String[] values() default {};
/**
* Minimum value (for numeric types).
*
* @return The minimum value as a string
*/
String min() default "";
/**
* Maximum value (for numeric types).
*
* @return The maximum value as a string
*/
String max() default "";
/**
* Pattern for string validation (regex).
*
* @return The validation pattern
*/
String pattern() default "";
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/schema/SchemaRegistry.java
|
package ai.driftkit.workflow.engine.schema;
import java.util.Optional;
/**
* Registry for mapping schema names to their corresponding Java classes.
* This eliminates the need to look up suspension data just to get schema information.
*/
public interface SchemaRegistry {
/**
* Register a schema class with a given name.
*
* @param schemaName The name of the schema
* @param schemaClass The Java class representing the schema
*/
void registerSchema(String schemaName, Class<?> schemaClass);
/**
* Get the Java class for a given schema name.
*
* @param schemaName The name of the schema
* @return The Java class if registered, empty otherwise
*/
Optional<Class<?>> getSchemaClass(String schemaName);
/**
* Get the schema name for a given Java class.
*
* @param schemaClass The Java class
* @return The schema name if registered, empty otherwise
*/
Optional<String> getSchemaName(Class<?> schemaClass);
/**
* Check if a schema name is registered.
*
* @param schemaName The name of the schema
* @return true if registered, false otherwise
*/
default boolean hasSchema(String schemaName) {
return getSchemaClass(schemaName).isPresent();
}
/**
* Clear all registered schemas.
*/
void clear();
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/schema/SchemaSystem.java
|
package ai.driftkit.workflow.engine.schema;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation to mark a schema object as a system message.
* When applied to a class, the generated ChatMessageTask will have the system flag set to true.
*/
@Target({ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
public @interface SchemaSystem {
/**
* Whether this schema represents a system message.
* Default is true when the annotation is present.
*
* @return true if this is a system message, false otherwise
*/
boolean value() default true;
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/schema/SchemaUtils.java
|
package ai.driftkit.workflow.engine.schema;
import ai.driftkit.workflow.engine.schema.AIFunctionSchema.AIFunctionProperty;
import ai.driftkit.workflow.engine.schema.annotations.SchemaClass;
import ai.driftkit.workflow.engine.utils.ReflectionUtils;
import ai.driftkit.common.utils.JsonUtils;
import com.fasterxml.jackson.annotation.JsonAlias;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.extern.slf4j.Slf4j;
import java.lang.reflect.Field;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.stream.Collectors;
/**
* Utility class for schema management and operations.
* Ported from driftkit-chat-assistant-framework with adaptations for workflow engine.
*/
@Slf4j
public class SchemaUtils {
private static final Map<Class<?>, List<AIFunctionSchema>> composableSchemaCache = new ConcurrentHashMap<>();
private static final List<AIFunctionSchema> schemasList = new CopyOnWriteArrayList<>();
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
// Schema registry for name->class mappings
private static final SchemaRegistry SCHEMA_REGISTRY = new InMemorySchemaRegistry();
/**
* Gets all registered schemas.
*/
public static List<AIFunctionSchema> getSchemas() {
List<AIFunctionSchema> allSchemas = new ArrayList<>();
allSchemas.addAll(schemasList);
composableSchemaCache.values().forEach(allSchemas::addAll);
return allSchemas;
}
/**
* Adds a schema to the registry.
*/
public static void addSchema(AIFunctionSchema schema) {
schemasList.add(schema);
}
/**
* Gets all schemas from a class, handling composable schemas.
* Alias for generateComposableSchemas to match SchemaProvider interface.
*/
public static List<AIFunctionSchema> generateComposableSchemas(Class<?> schemaClass) {
return getAllSchemasFromClass(schemaClass);
}
/**
* Gets all schemas from a class, handling composable schemas.
*/
public static List<AIFunctionSchema> getAllSchemasFromClass(Class<?> schemaClass) {
if (schemaClass == null || schemaClass == void.class) {
return List.of();
}
SchemaClass schemaAnnotation = schemaClass.getAnnotation(SchemaClass.class);
if (schemaAnnotation != null && schemaAnnotation.composable()) {
if (composableSchemaCache.containsKey(schemaClass)) {
return composableSchemaCache.get(schemaClass);
}
List<AIFunctionSchema> composableSchemas = createComposableSchemas(schemaClass, schemaAnnotation);
composableSchemaCache.put(schemaClass, composableSchemas);
return composableSchemas;
} else {
AIFunctionSchema schema = getSchemaFromClass(schemaClass);
return schema != null ? List.of(schema) : List.of();
}
}
/**
* Creates composable schemas by splitting a class into individual field schemas.
*/
private static List<AIFunctionSchema> createComposableSchemas(Class<?> schemaClass, SchemaClass annotation) {
List<AIFunctionSchema> schemas = new ArrayList<>();
AIFunctionSchema baseSchema = AIFunctionSchema.fromClass(schemaClass);
String baseSchemaId = !annotation.id().isEmpty() ? annotation.id() : schemaClass.getSimpleName();
for (AIFunctionProperty property : baseSchema.getProperties()) {
AIFunctionSchema fieldSchema = new AIFunctionSchema(
baseSchemaId + "_" + property.getName(),
List.of(property)
);
fieldSchema.setDescription(property.getDescription());
fieldSchema.setComposable(true);
schemas.add(fieldSchema);
}
return schemas;
}
/**
* Generates a schema from a class (alias for getSchemaFromClass).
* Matches SchemaProvider interface.
*/
public static AIFunctionSchema generateSchema(Class<?> schemaClass) {
return getSchemaFromClass(schemaClass);
}
/**
* Gets a schema from a class, using cache when possible.
*/
public static AIFunctionSchema getSchemaFromClass(Class<?> schemaClass) {
if (schemaClass == null || schemaClass == void.class) {
return null;
}
AIFunctionSchema schema = AIFunctionSchema.fromClass(schemaClass);
// Additional processing for SchemaClass annotation (composable, etc.)
SchemaClass annotation = schemaClass.getAnnotation(SchemaClass.class);
if (annotation != null) {
if (annotation.composable()) {
schema.setComposable(true);
log.debug("Schema {} is marked as composable", schemaClass.getName());
}
}
// Register in schema registry
String schemaName = getSchemaId(schemaClass);
if (schemaName != null) {
SCHEMA_REGISTRY.registerSchema(schemaName, schemaClass);
log.debug("Registered schema in registry: {} -> {}", schemaName, schemaClass.getName());
}
return schema;
}
/**
* Gets the schema ID for a class.
*/
public static String getSchemaId(Class<?> schemaClass) {
if (schemaClass == null || schemaClass == void.class) {
return null;
}
SchemaName schemaNameAnnotation = schemaClass.getAnnotation(SchemaName.class);
if (schemaNameAnnotation != null) {
return schemaNameAnnotation.value();
}
SchemaClass annotation = schemaClass.getAnnotation(SchemaClass.class);
if (annotation != null && !annotation.id().isEmpty()) {
return annotation.id();
}
return schemaClass.getSimpleName();
}
/**
* Clears the schema cache.
*/
public static void clearCache() {
composableSchemaCache.clear();
AIFunctionSchema.clearCache();
SCHEMA_REGISTRY.clear();
}
/**
* Gets the Java class for a given schema name.
*
* @param schemaName The name of the schema
* @return The Java class if registered, null otherwise
*/
public static Class<?> getSchemaClass(String schemaName) {
if (schemaName == null) {
return null;
}
return SCHEMA_REGISTRY.getSchemaClass(schemaName).orElse(null);
}
/**
* Converts from map to object (alias for createInstance).
* Matches SchemaProvider interface.
*/
public static <T> T convertFromMap(Map<String, String> properties, Class<T> schemaClass) {
return createInstance(schemaClass, properties);
}
/**
* Creates an instance of a schema class from a map of properties.
*/
public static <T> T createInstance(Class<T> schemaClass, Map<String, String> properties) {
if (schemaClass == null || schemaClass == void.class) {
return null;
}
try {
T instance = schemaClass.getDeclaredConstructor().newInstance();
if (properties != null && !properties.isEmpty()) {
// Get all accessible fields from the class including inherited fields
List<Field> accessibleFields = ReflectionUtils.getAccessibleFields(schemaClass, true);
for (Field field : accessibleFields) {
String propertyName = field.getName();
// Handle JsonAlias annotations
JsonAlias jsonAlias = field.getAnnotation(JsonAlias.class);
if (jsonAlias != null) {
String matchedProperty = null;
String matchedValue = null;
if (properties.containsKey(propertyName)) {
matchedProperty = propertyName;
matchedValue = properties.get(propertyName);
} else {
for (String alias : jsonAlias.value()) {
if (properties.containsKey(alias)) {
matchedProperty = alias;
matchedValue = properties.get(alias);
log.debug("Found property via JsonAlias: {} -> {} for field {}",
alias, matchedValue, propertyName);
break;
}
}
}
if (matchedProperty != null) {
setFieldValue(field, instance, matchedValue);
}
} else if (properties.containsKey(propertyName)) {
String propertyValue = properties.get(propertyName);
setFieldValue(field, instance, propertyValue);
}
}
}
return instance;
} catch (Exception e) {
log.error("Error creating instance of {}: {}", schemaClass.getName(), e.getMessage(), e);
return null;
}
}
/**
* Converts object to map (alias for extractProperties).
* Matches SchemaProvider interface.
*/
public static Map<String, String> convertToMap(Object object) {
return extractProperties(object);
}
/**
* Extracts properties from an object into a map.
*/
public static Map<String, String> extractProperties(Object object) {
if (object == null) {
return Map.of();
}
Map<String, String> properties = new HashMap<>();
try {
// Get all accessible fields from the class including inherited fields
List<Field> accessibleFields = ReflectionUtils.getAccessibleFields(object.getClass(), true);
for (Field field : accessibleFields) {
String propertyName = field.getName();
Object value = ReflectionUtils.getFieldValue(field, object);
if (value != null) {
if (value instanceof String || value instanceof Number || value instanceof Boolean || value instanceof Enum) {
properties.put(propertyName, value.toString());
} else if (value instanceof Collection || value instanceof Map || value.getClass().isArray()) {
try {
properties.put(propertyName, OBJECT_MAPPER.writeValueAsString(value));
} catch (Exception e) {
log.warn("Error serializing collection field {}: {}", propertyName, e.getMessage());
properties.put(propertyName, value.toString());
}
} else {
try {
properties.put(propertyName, OBJECT_MAPPER.writeValueAsString(value));
} catch (Exception e) {
log.warn("Error serializing complex field {}: {}", propertyName, e.getMessage());
properties.put(propertyName, value.toString());
}
}
}
}
} catch (Exception e) {
log.error("Error extracting properties from {}: {}",
object.getClass().getName(), e.getMessage(), e);
}
return properties;
}
/**
* Combines composable schema data, checking if all required fields are present.
*/
public static Map<String, String> combineComposableSchemaData(
Class<?> schemaClass,
Map<String, String> existingProperties,
Map<String, String> newProperties,
String schemaId) {
if (schemaClass == null) {
return newProperties;
}
SchemaClass annotation = schemaClass.getAnnotation(SchemaClass.class);
if (annotation == null || !annotation.composable()) {
return newProperties;
}
Map<String, String> combinedProperties = existingProperties != null
? new LinkedHashMap<>(existingProperties)
: new LinkedHashMap<>();
if (newProperties != null) {
combinedProperties.putAll(newProperties);
}
AIFunctionSchema schema = getSchemaFromClass(schemaClass);
List<String> requiredFields = schema.getProperties().stream()
.filter(AIFunctionProperty::isRequired)
.map(AIFunctionProperty::getName)
.collect(Collectors.toList());
boolean isComplete = requiredFields.stream().allMatch(combinedProperties::containsKey);
log.debug("Composable schema data for {}: combined={}, required={}, isComplete={}",
schemaId, combinedProperties.keySet(), requiredFields, isComplete);
return isComplete ? combinedProperties : null;
}
/**
* Sets a field value with type conversion.
*/
private static void setFieldValue(Field field, Object instance, String value) {
Class<?> fieldType = field.getType();
Object convertedValue = null;
try {
if (fieldType == String.class) {
convertedValue = value;
} else if (fieldType == int.class || fieldType == Integer.class) {
convertedValue = Integer.parseInt(value);
} else if (fieldType == long.class || fieldType == Long.class) {
convertedValue = Long.parseLong(value);
} else if (fieldType == double.class || fieldType == Double.class) {
convertedValue = Double.parseDouble(value);
} else if (fieldType == float.class || fieldType == Float.class) {
convertedValue = Float.parseFloat(value);
} else if (fieldType == boolean.class || fieldType == Boolean.class) {
convertedValue = Boolean.parseBoolean(value);
} else if (fieldType.isEnum()) {
@SuppressWarnings("unchecked")
Enum<?> enumValue = Enum.valueOf(fieldType.asSubclass(Enum.class), value);
convertedValue = enumValue;
} else if (List.class.isAssignableFrom(fieldType)) {
// Simple JSON array parsing
if (value.startsWith("[") && value.endsWith("]")) {
try {
List<?> list = OBJECT_MAPPER.readValue(value, List.class);
convertedValue = list;
} catch (Exception e) {
// Fallback to simple string split
String trimmedValue = value.substring(1, value.length() - 1);
String[] items = trimmedValue.split(",");
List<String> list = new ArrayList<>();
for (String item : items) {
list.add(item.trim().replaceAll("\"", ""));
}
convertedValue = list;
}
}
} else if (Map.class.isAssignableFrom(fieldType)) {
// Simple JSON object parsing
if (value.startsWith("{") && value.endsWith("}")) {
try {
Map<?, ?> map = OBJECT_MAPPER.readValue(value, Map.class);
convertedValue = map;
} catch (Exception e) {
log.warn("Failed to parse map value for field {}: {}", field.getName(), e.getMessage());
}
}
} else if (JsonUtils.isJSON(value)) {
// If value is JSON and field type is an Object (custom class), try to parse it
try {
Object parsedValue = JsonUtils.fromJson(value, fieldType);
convertedValue = parsedValue;
} catch (Exception e) {
log.warn("Failed to parse JSON value for field {} of type {}: {}",
field.getName(), fieldType.getName(), e.getMessage());
}
} else {
log.warn("Unsupported field type: {} for field: {}", fieldType.getName(), field.getName());
return;
}
// Use ReflectionUtils to safely set the field value
if (convertedValue != null || !fieldType.isPrimitive()) {
if (!ReflectionUtils.setFieldValue(field, instance, convertedValue)) {
log.error("Failed to set field {} with converted value", field.getName());
}
}
} catch (Exception e) {
log.error("Error converting value '{}' for field {} of type {}: {}",
value, field.getName(), fieldType.getName(), e.getMessage(), e);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/schema
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/schema/annotations/SchemaClass.java
|
package ai.driftkit.workflow.engine.schema.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Marks a class for schema generation with additional metadata.
* Ported from driftkit-chat-assistant-framework
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface SchemaClass {
/**
* The unique identifier for this schema.
*/
String id() default "";
/**
* Description of the schema.
*/
String description() default "";
/**
* Whether this schema is composable (can be split into multiple parts).
*/
boolean composable() default false;
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/service/DefaultWorkflowExecutionService.java
|
package ai.driftkit.workflow.engine.service;
import ai.driftkit.common.domain.Language;
import ai.driftkit.common.domain.chat.ChatMessage;
import ai.driftkit.common.domain.chat.ChatMessage.DataProperty;
import ai.driftkit.common.domain.chat.ChatRequest;
import ai.driftkit.common.domain.chat.ChatResponse;
import ai.driftkit.common.service.ChatStore;
import ai.driftkit.workflow.engine.domain.PageRequest;
import ai.driftkit.workflow.engine.domain.PageResult;
import ai.driftkit.workflow.engine.chat.ChatContextHelper;
import ai.driftkit.workflow.engine.chat.ChatMessageTask;
import ai.driftkit.workflow.engine.chat.ChatResponseExtensions;
import ai.driftkit.workflow.engine.chat.converter.ChatMessageTaskConverter;
import ai.driftkit.workflow.engine.core.WorkflowContext;
import ai.driftkit.workflow.engine.core.WorkflowEngine;
import ai.driftkit.workflow.engine.core.WorkflowEngine.WorkflowExecution;
import ai.driftkit.workflow.engine.utils.WorkflowInputOutputHandler;
import ai.driftkit.workflow.engine.domain.ChatSession;
import ai.driftkit.workflow.engine.domain.AsyncStepState;
import ai.driftkit.workflow.engine.domain.StepMetadata;
import ai.driftkit.workflow.engine.domain.SuspensionData;
import ai.driftkit.workflow.engine.domain.WorkflowDetails;
import ai.driftkit.workflow.engine.domain.WorkflowMetadata;
import ai.driftkit.workflow.engine.graph.StepNode;
import ai.driftkit.workflow.engine.graph.WorkflowGraph;
import ai.driftkit.workflow.engine.persistence.AsyncStepStateRepository;
import ai.driftkit.workflow.engine.persistence.ChatSessionRepository;
import ai.driftkit.workflow.engine.persistence.SuspensionDataRepository;
import ai.driftkit.workflow.engine.persistence.WorkflowInstance;
import ai.driftkit.workflow.engine.persistence.WorkflowStateRepository;
import ai.driftkit.workflow.engine.schema.AIFunctionSchema;
import ai.driftkit.workflow.engine.schema.SchemaUtils;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.MapUtils;
import org.apache.commons.lang3.StringUtils;
import java.util.*;
import java.util.concurrent.TimeUnit;
/**
* Default implementation of WorkflowExecutionService.
* Core business logic extracted from Spring-specific WorkflowService.
*/
@Slf4j
public class DefaultWorkflowExecutionService implements WorkflowExecutionService {
private static final int MAX_WAIT_ITERATIONS = 10000; // 100 seconds max wait
private static final long WAIT_INTERVAL_MS = 10;
private final WorkflowEngine engine;
private final ChatSessionRepository sessionRepository;
private final AsyncStepStateRepository asyncStepStateRepository;
private final SuspensionDataRepository suspensionDataRepository;
private final WorkflowStateRepository stateRepository;
private final ChatStore chatStore;
// Optional event publisher for WebSocket notifications
private WorkflowEventPublisher eventPublisher;
// Constructor
public DefaultWorkflowExecutionService(WorkflowEngine engine,
ChatSessionRepository sessionRepository,
AsyncStepStateRepository asyncStepStateRepository,
SuspensionDataRepository suspensionDataRepository,
WorkflowStateRepository stateRepository,
ChatStore chatStore) {
this.engine = engine;
this.sessionRepository = sessionRepository;
this.asyncStepStateRepository = asyncStepStateRepository;
this.suspensionDataRepository = suspensionDataRepository;
this.stateRepository = stateRepository;
this.chatStore = chatStore;
}
/**
* Sets the event publisher for WebSocket/event notifications.
* This is optional and can be set by the Spring layer if needed.
*/
public void setEventPublisher(WorkflowEventPublisher eventPublisher) {
this.eventPublisher = eventPublisher;
}
// ========== Chat Workflow Execution ==========
@Override
public ChatResponse executeChat(ChatRequest request) {
try {
// Store the request in chat history
chatStore.add(request);
// Determine workflow to use
String workflowId = request.getWorkflowId();
if (workflowId == null) {
throw new IllegalArgumentException("No workflow specified in request");
}
String chatId = request.getChatId();
// Check if there's a suspended workflow for this chat
Optional<WorkflowInstance> suspendedInstance = stateRepository.findLatestSuspendedByChatId(chatId);
WorkflowExecution<?> execution;
if (suspendedInstance.isPresent()) {
// Resume the suspended workflow
log.info("Found suspended workflow {} for chat {}, resuming",
suspendedInstance.get().getInstanceId(), chatId);
execution = engine.resume(suspendedInstance.get().getInstanceId(), request);
} else {
// Generate unique instance ID for new execution
String instanceId = chatId + "_" + System.currentTimeMillis() + "_" + UUID.randomUUID().toString().substring(0, 8);
// Execute new workflow instance with chatId
execution = engine.execute(workflowId, request, instanceId, chatId);
}
String runId = execution.getRunId();
// Notify event publisher if available
if (eventPublisher != null) {
eventPublisher.publishWorkflowStarted(runId, workflowId);
}
// Wait for workflow to reach a terminal state
WorkflowInstance instance = waitForTerminalState(runId);
log.info("Workflow {} reached state: {}",
runId, instance.getStatus());
// Create ChatResponse based on workflow state
ChatResponse response = createChatResponseFromWorkflowState(
request.getChatId(),
request.getUserId(),
request.getLanguage(),
workflowId,
instance
);
// Update chat history with response
chatStore.add(response);
// Update session last message time
updateSessionLastMessageTime(response.getChatId(), response.getTimestamp());
return response;
} catch (Exception e) {
log.error("Error processing chat request", e);
// Return error response in proper ChatResponse format
ChatResponse errorResponse = createErrorResponse(request, e);
chatStore.add(errorResponse);
updateSessionLastMessageTime(errorResponse.getChatId(), errorResponse.getTimestamp());
return errorResponse;
}
}
@Override
public ChatResponse resumeChat(String messageId, ChatRequest request) {
try {
// Store the resume request in chat history
chatStore.add(request);
// Find the original message in chat history
ChatMessage originalMessage = chatStore.getById(messageId);
if (originalMessage == null || !(originalMessage instanceof ChatResponse)) {
throw new IllegalArgumentException("No chat response found for messageId: " + messageId);
}
ChatResponse originalResponse = (ChatResponse) originalMessage;
// Find the suspended workflow instance for this chat
String chatId = originalResponse.getChatId();
Optional<WorkflowInstance> instanceOpt = stateRepository.findLatestSuspendedByChatId(chatId);
if (!instanceOpt.isPresent()) {
throw new IllegalArgumentException("No suspended workflow instance found for chatId: " + chatId);
}
WorkflowInstance instance = instanceOpt.get();
if (instance.getStatus() != WorkflowInstance.WorkflowStatus.SUSPENDED) {
throw new IllegalStateException("Workflow is not in suspended state: " + instance.getStatus());
}
// Get expected input type from schema registry
Object resumeInput;
String schemaName = request.getRequestSchemaName();
if (schemaName != null) {
log.debug("Looking for schema in registry: {}", schemaName);
Class<?> expectedInputClass = SchemaUtils.getSchemaClass(schemaName);
if (expectedInputClass != null) {
log.debug("Found schema class: {}", expectedInputClass.getName());
// Convert properties map to expected type
resumeInput = SchemaUtils.createInstance(
expectedInputClass,
request.getPropertiesMap()
);
} else {
log.warn("Schema not found in registry: {}", schemaName);
resumeInput = request;
}
} else {
// No schema specified, use request as-is
resumeInput = request;
}
// Resume the workflow with converted input
var execution = engine.resume(instance.getInstanceId(), resumeInput);
String runId = execution.getRunId();
// Notify event publisher if available
if (eventPublisher != null) {
eventPublisher.publishWorkflowResumed(runId, instance.getWorkflowId());
}
// Wait for workflow to reach next terminal state
instance = waitForTerminalState(runId);
// Create ChatResponse based on new workflow state
ChatResponse response = createChatResponseFromWorkflowState(
request.getChatId(),
request.getUserId(),
request.getLanguage(),
instance.getWorkflowId(),
instance
);
// Update chat history with response
chatStore.add(response);
updateSessionLastMessageTime(response.getChatId(), response.getTimestamp());
return response;
} catch (Exception e) {
log.error("Error resuming chat for messageId: {}", messageId, e);
ChatResponse errorResponse = createErrorResponse(request, e);
chatStore.add(errorResponse);
updateSessionLastMessageTime(errorResponse.getChatId(), errorResponse.getTimestamp());
return errorResponse;
}
}
@Override
public Optional<ChatResponse> getAsyncStatus(String messageId) {
// Get async state
Optional<AsyncStepState> asyncStateOpt = asyncStepStateRepository.findByMessageId(messageId);
if (asyncStateOpt.isEmpty()) {
return Optional.empty();
}
AsyncStepState asyncState = asyncStateOpt.get();
// Get original response from history
ChatMessage originalMessage = chatStore.getById(messageId);
if (originalMessage == null || !(originalMessage instanceof ChatResponse)) {
return Optional.empty();
}
ChatResponse original = (ChatResponse) originalMessage;
// Create updated response based on current async state
ChatResponse response = new ChatResponse(
messageId,
original.getChatId(),
original.getWorkflowId(),
original.getLanguage(),
asyncState.isCompleted(),
asyncState.getPercentComplete(),
original.getUserId(),
new HashMap<>()
);
// Update properties based on state
if (asyncState.isCompleted() && asyncState.getResultData() != null) {
// Use final result data
response.setPropertiesMap(WorkflowInputOutputHandler.extractPropertiesFromData(asyncState.getResultData()));
} else {
// Use initial data with updated progress
response.setPropertiesMap(WorkflowInputOutputHandler.extractPropertiesFromData(asyncState.getInitialData()));
}
return Optional.of(response);
}
// ========== Session Management ==========
@Override
public ChatSession getOrCreateSession(String chatId, String userId, String initialMessage) {
Optional<ChatSession> existing = sessionRepository.findById(chatId);
if (existing.isPresent()) {
return existing.get();
}
String name = StringUtils.isEmpty(initialMessage)
? "New Chat"
: abbreviate(initialMessage, 50);
ChatSession session = ChatSession.create(chatId, userId, name);
return sessionRepository.save(session);
}
@Override
public Optional<ChatSession> getChatSession(String chatId) {
return sessionRepository.findById(chatId);
}
@Override
public ChatSession createChatSession(String userId, String name) {
String chatId = UUID.randomUUID().toString();
ChatSession session = ChatSession.create(chatId, userId, name);
return sessionRepository.save(session);
}
@Override
public void archiveChatSession(String chatId) {
sessionRepository.findById(chatId).ifPresent(session -> {
ChatSession archived = session.archive();
sessionRepository.save(archived);
});
}
@Override
public PageResult<ChatSession> listChatsForUser(String userId, PageRequest pageRequest) {
if (StringUtils.isEmpty(userId)) {
return PageResult.empty(pageRequest.getPageNumber(), pageRequest.getPageSize());
}
return sessionRepository.findActiveByUserId(userId, pageRequest);
}
// ========== Chat History ==========
@Override
public PageResult<ChatMessage> getChatHistory(String chatId, PageRequest pageRequest, boolean includeContext) {
// For now, return all messages - pagination can be implemented later in ChatStore
List<ChatMessage> messages = chatStore.getAll(chatId);
// Create PageResult manually
int start = pageRequest.getPageNumber() * pageRequest.getPageSize();
int end = Math.min(start + pageRequest.getPageSize(), messages.size());
List<ChatMessage> pageContent = start < messages.size() ?
messages.subList(start, end) : Collections.emptyList();
return new PageResult<>(
pageContent,
pageRequest.getPageNumber(),
pageRequest.getPageSize(),
messages.size()
);
}
@Override
public List<ChatMessageTask> convertMessageToTasks(ChatMessage message) {
return ChatMessageTaskConverter.convert(message);
}
// ========== Workflow Management ==========
@Override
public List<WorkflowMetadata> listWorkflows() {
return engine.getRegisteredWorkflows().stream()
.map(workflowId -> {
var graph = engine.getWorkflowGraph(workflowId);
if (graph == null) {
return null;
}
return createWorkflowMetadata(graph);
})
.filter(Objects::nonNull)
.toList();
}
@Override
public WorkflowDetails getWorkflowDetails(String workflowId) {
var graph = engine.getWorkflowGraph(workflowId);
if (graph == null) {
return null;
}
// Convert steps to metadata
List<StepMetadata> steps = graph.nodes().values().stream()
.map(this::convertToStepMetadata)
.toList();
AIFunctionSchema initialSchema = getInitialSchema(workflowId);
WorkflowMetadata metadata = createWorkflowMetadata(graph);
return new WorkflowDetails(
metadata,
steps,
graph.initialStepId(),
initialSchema
);
}
@Override
public AIFunctionSchema getInitialSchema(String workflowId) {
var graph = engine.getWorkflowGraph(workflowId);
if (graph == null) {
return null;
}
// Get the initial step
String initialStepId = graph.initialStepId();
if (initialStepId == null) {
return null;
}
StepNode initialStep = graph.nodes().get(initialStepId);
if (initialStep == null || initialStep.executor() == null) {
return null;
}
Class<?> inputType = initialStep.executor().getInputType();
if (inputType == null || inputType == void.class) {
return null;
}
return SchemaUtils.getSchemaFromClass(inputType);
}
@Override
public List<AIFunctionSchema> getWorkflowSchemas(String workflowId) {
var graph = engine.getWorkflowGraph(workflowId);
if (graph == null) {
log.warn("Workflow not found: {}", workflowId);
return List.of();
}
Set<AIFunctionSchema> uniqueSchemas = new HashSet<>();
// Process each step in the workflow
for (StepNode step : graph.nodes().values()) {
try {
// Get input schema from executor
if (step.executor() != null) {
Class<?> inputType = step.executor().getInputType();
if (inputType != null && inputType != void.class && inputType != Void.class) {
AIFunctionSchema inputSchema = SchemaUtils.getSchemaFromClass(inputType);
if (inputSchema != null) {
uniqueSchemas.add(inputSchema);
}
}
}
// Get output schema from executor
Class<?> outputType = step.executor().getOutputType();
if (outputType != null && outputType != void.class && outputType != Void.class) {
AIFunctionSchema outputSchema = SchemaUtils.getSchemaFromClass(outputType);
if (outputSchema != null) {
uniqueSchemas.add(outputSchema);
}
}
} catch (Exception e) {
log.error("Error generating schema for step {} in workflow {}", step.id(), workflowId, e);
}
}
return new ArrayList<>(uniqueSchemas);
}
@Override
public Optional<WorkflowInstance> getWorkflowState(String runId) {
return engine.getWorkflowInstance(runId);
}
// ========== Private Helper Methods ==========
private WorkflowInstance waitForTerminalState(String runId) throws InterruptedException {
for (int i = 0; i < MAX_WAIT_ITERATIONS; i++) {
Optional<WorkflowInstance> instanceOpt = engine.getWorkflowInstance(runId);
if (!instanceOpt.isPresent()) {
throw new IllegalStateException("Workflow instance not found: " + runId);
}
WorkflowInstance instance = instanceOpt.get();
WorkflowInstance.WorkflowStatus status = instance.getStatus();
log.debug("Workflow {} status: {}, currentStep: {}", runId, status, instance.getCurrentStepId());
// Check for terminal states
if (status == WorkflowInstance.WorkflowStatus.SUSPENDED ||
status == WorkflowInstance.WorkflowStatus.COMPLETED ||
status == WorkflowInstance.WorkflowStatus.FAILED) {
return instance;
}
// For RUNNING status, check if we have suspension data with async flag
if (status == WorkflowInstance.WorkflowStatus.RUNNING) {
// Check if we have suspension data
Optional<SuspensionData> suspensionDataOpt = suspensionDataRepository.findByInstanceId(instance.getInstanceId());
if (suspensionDataOpt.isPresent()) {
SuspensionData suspensionData = suspensionDataOpt.get();
// Check if this is an async suspension by looking for async step state
Optional<AsyncStepState> asyncStateOpt = asyncStepStateRepository.findByMessageId(suspensionData.messageId());
if (asyncStateOpt.isPresent()) {
// Workflow has async steps - this is also a terminal state for initial response
return instance;
}
}
}
// Small sleep to avoid busy waiting
Thread.sleep(WAIT_INTERVAL_MS);
}
// Timeout reached
throw new IllegalStateException("Workflow execution timeout after " +
(MAX_WAIT_ITERATIONS * WAIT_INTERVAL_MS / 1000) + " seconds for runId: " + runId);
}
private WorkflowMetadata createWorkflowMetadata(WorkflowGraph<?, ?> graph) {
return new WorkflowMetadata(
graph.id(),
graph.version(),
null, // No hardcoded description
graph.inputType(),
graph.outputType()
);
}
/**
* Creates a chat response from workflow state.
*/
private ChatResponse createChatResponseFromWorkflowState(
String chatId,
String userId,
Language language,
String workflowId,
WorkflowInstance instance) {
switch (instance.getStatus()) {
case SUSPENDED:
SuspensionData suspensionData = suspensionDataRepository.findByInstanceId(instance.getInstanceId()).orElse(null);
if (suspensionData != null) {
// Check if this is an async suspension
Optional<AsyncStepState> asyncStateOpt = asyncStepStateRepository.findByMessageId(suspensionData.messageId());
if (asyncStateOpt.isPresent()) {
// Async suspension
AsyncStepState asyncState = asyncStateOpt.get();
return createAsyncResponse(
chatId, userId, language, workflowId,
asyncState.getInitialData(),
asyncState.getMessageId(),
asyncState.getPercentComplete(),
asyncState.getStatusMessage()
);
} else {
// Regular suspension
AIFunctionSchema nextSchema = null;
if (suspensionData.nextInputClass() != null) {
nextSchema = SchemaUtils.getSchemaFromClass(suspensionData.nextInputClass());
}
return createSuspendResponse(
chatId, userId, language, workflowId,
suspensionData.promptToUser(),
nextSchema,
suspensionData.messageId()
);
}
}
break;
case COMPLETED:
// Get the last step result
List<WorkflowInstance.StepExecutionRecord> history = instance.getExecutionHistory();
Object finalResult = null;
if (!history.isEmpty()) {
WorkflowInstance.StepExecutionRecord lastStep = history.get(history.size() - 1);
finalResult = lastStep.getOutput();
}
return createCompletedResponse(chatId, userId, language, workflowId, finalResult);
case FAILED:
// Get error message
String errorMessage = "Unknown error";
WorkflowInstance.ErrorInfo errorInfo = instance.getErrorInfo();
if (errorInfo != null) {
errorMessage = errorInfo.errorMessage();
}
return createErrorResponse(chatId, userId, language, workflowId, errorMessage);
case RUNNING:
// Should not happen in normal flow
log.warn("Workflow {} is still RUNNING when creating response", instance.getInstanceId());
break;
}
// Fallback response
return createErrorResponse(chatId, userId, language, workflowId,
"Unexpected workflow state: " + instance.getStatus());
}
/**
* Creates a suspension response.
*/
private ChatResponse createSuspendResponse(String chatId, String userId, Language language,
String workflowId, Object promptData,
AIFunctionSchema nextSchema, String messageId) {
Map<String, String> properties = WorkflowInputOutputHandler.extractPropertiesFromData(promptData);
ChatResponse response = new ChatResponse(
messageId != null ? messageId : UUID.randomUUID().toString(),
chatId,
workflowId,
language != null ? language : Language.GENERAL,
true, // Suspended responses are "completed" from UI perspective
100,
userId,
properties
);
if (nextSchema != null) {
ChatResponseExtensions.setNextSchemaAsSchema(response, nextSchema);
}
return response;
}
/**
* Creates an async response.
*/
private ChatResponse createAsyncResponse(String chatId, String userId, Language language,
String workflowId, Object immediateData,
String messageId, int percentComplete,
String statusMessage) {
Map<String, String> properties = WorkflowInputOutputHandler.extractPropertiesFromData(immediateData);
if (statusMessage != null) {
properties.put("status", statusMessage);
}
properties.put("progressPercent", String.valueOf(percentComplete));
return new ChatResponse(
messageId != null ? messageId : UUID.randomUUID().toString(),
chatId,
workflowId,
language != null ? language : Language.GENERAL,
false, // NOT completed
percentComplete,
userId,
properties
);
}
/**
* Creates a completed response.
*/
private ChatResponse createCompletedResponse(String chatId, String userId, Language language,
String workflowId, Object result) {
Map<String, String> properties = WorkflowInputOutputHandler.extractPropertiesFromData(result);
return new ChatResponse(
UUID.randomUUID().toString(),
chatId,
workflowId,
language != null ? language : Language.GENERAL,
true,
100,
userId,
properties
);
}
/**
* Creates an error response.
*/
private ChatResponse createErrorResponse(String chatId, String userId, Language language,
String workflowId, String errorMessage) {
Map<String, String> properties = new HashMap<>();
properties.put("error", errorMessage != null ? errorMessage : "Unknown error");
return new ChatResponse(
UUID.randomUUID().toString(),
chatId,
workflowId,
language != null ? language : Language.GENERAL,
true,
100,
userId,
properties
);
}
private ChatResponse createErrorResponse(ChatRequest request, Exception e) {
Map<String, String> errorProps = new HashMap<>();
errorProps.put("error", e.getMessage());
return new ChatResponse(
UUID.randomUUID().toString(),
request.getChatId(),
request.getWorkflowId(),
request.getLanguage() != null ? request.getLanguage() : Language.GENERAL,
true,
100,
request.getUserId(),
errorProps
);
}
private StepMetadata convertToStepMetadata(StepNode step) {
AIFunctionSchema inputSchema = null;
AIFunctionSchema outputSchema = null;
// Get input type from the step executor
if (step.executor() != null && step.executor().getInputType() != null
&& step.executor().getInputType() != void.class) {
inputSchema = SchemaUtils.getSchemaFromClass(step.executor().getInputType());
}
// Get output type from the step executor
if (step.executor() != null && step.executor().getOutputType() != null
&& step.executor().getOutputType() != void.class) {
outputSchema = SchemaUtils.getSchemaFromClass(step.executor().getOutputType());
}
return new StepMetadata(
step.id(),
step.description(),
step.isAsync(),
inputSchema,
outputSchema
);
}
private void updateSessionLastMessageTime(String chatId, long timestamp) {
sessionRepository.findById(chatId).ifPresent(session -> {
ChatSession updated = session.withLastMessageTime(timestamp);
sessionRepository.save(updated);
});
}
private String abbreviate(String str, int maxLength) {
if (str == null || str.length() <= maxLength) {
return str;
}
return str.substring(0, maxLength - 3) + "...";
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/service/WorkflowEventPublisher.java
|
package ai.driftkit.workflow.engine.service;
/**
* Interface for publishing workflow events.
* This allows the core service to notify about workflow state changes
* without depending on specific event infrastructure (e.g., WebSocket).
*/
public interface WorkflowEventPublisher {
/**
* Publish workflow started event.
*
* @param runId The workflow run ID
* @param workflowId The workflow ID
*/
void publishWorkflowStarted(String runId, String workflowId);
/**
* Publish workflow resumed event.
*
* @param runId The workflow run ID
* @param workflowId The workflow ID
*/
void publishWorkflowResumed(String runId, String workflowId);
/**
* Publish workflow completed event.
*
* @param runId The workflow run ID
* @param workflowId The workflow ID
* @param result The workflow result
*/
void publishWorkflowCompleted(String runId, String workflowId, Object result);
/**
* Publish workflow failed event.
*
* @param runId The workflow run ID
* @param workflowId The workflow ID
* @param error The error that caused the failure
*/
void publishWorkflowFailed(String runId, String workflowId, Throwable error);
/**
* Publish workflow suspended event.
*
* @param runId The workflow run ID
* @param workflowId The workflow ID
* @param suspensionReason The reason for suspension
*/
void publishWorkflowSuspended(String runId, String workflowId, String suspensionReason);
/**
* Publish async step progress event.
*
* @param runId The workflow run ID
* @param stepId The step ID
* @param percentComplete The completion percentage
* @param message Progress message
*/
void publishAsyncProgress(String runId, String stepId, int percentComplete, String message);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/service/WorkflowExecutionService.java
|
package ai.driftkit.workflow.engine.service;
import ai.driftkit.common.domain.Language;
import ai.driftkit.common.domain.chat.ChatMessage;
import ai.driftkit.common.domain.chat.ChatRequest;
import ai.driftkit.common.domain.chat.ChatResponse;
import ai.driftkit.workflow.engine.domain.PageRequest;
import ai.driftkit.workflow.engine.domain.PageResult;
import ai.driftkit.workflow.engine.chat.ChatMessageTask;
import ai.driftkit.workflow.engine.domain.ChatSession;
import ai.driftkit.workflow.engine.domain.StepMetadata;
import ai.driftkit.workflow.engine.domain.WorkflowDetails;
import ai.driftkit.workflow.engine.domain.WorkflowMetadata;
import ai.driftkit.workflow.engine.persistence.WorkflowInstance;
import ai.driftkit.workflow.engine.schema.AIFunctionSchema;
import java.util.List;
import java.util.Optional;
/**
* Core workflow execution service interface.
* Provides workflow execution and chat management capabilities.
*/
public interface WorkflowExecutionService {
// ========== Chat Workflow Execution ==========
/**
* Execute a chat request through the workflow engine.
* The workflowId is obtained from the request.
*
* @param request The chat request containing workflowId and other parameters
* @return The chat response
*/
ChatResponse executeChat(ChatRequest request);
/**
* Resume a suspended workflow with user input.
*
* @param messageId The message ID of the suspended workflow
* @param request The chat request with user input
* @return The chat response
*/
ChatResponse resumeChat(String messageId, ChatRequest request);
/**
* Get the status of an async workflow execution.
*
* @param messageId The message ID to check
* @return The current chat response status
*/
Optional<ChatResponse> getAsyncStatus(String messageId);
// ========== Session Management ==========
/**
* Get or create a chat session.
*
* @param chatId The chat ID
* @param userId The user ID
* @param initialMessage The initial message (optional)
* @return The chat session
*/
ChatSession getOrCreateSession(String chatId, String userId, String initialMessage);
/**
* Get a chat session by ID.
*
* @param chatId The chat ID
* @return The chat session if found
*/
Optional<ChatSession> getChatSession(String chatId);
/**
* Create a new chat session.
*
* @param userId The user ID
* @param name The session name
* @return The created chat session
*/
ChatSession createChatSession(String userId, String name);
/**
* Archive a chat session.
*
* @param chatId The chat ID to archive
*/
void archiveChatSession(String chatId);
/**
* List active chats for a user.
*
* @param userId The user ID
* @param pageRequest Pagination parameters
* @return Page of chat sessions
*/
PageResult<ChatSession> listChatsForUser(String userId, PageRequest pageRequest);
// ========== Chat History ==========
/**
* Get chat history with pagination.
*
* @param chatId The chat ID
* @param pageRequest Pagination parameters
* @param includeContext Whether to include context data
* @return Page of chat messages
*/
PageResult<ChatMessage> getChatHistory(String chatId, PageRequest pageRequest, boolean includeContext);
/**
* Convert a chat message to tasks for display.
*
* @param message The chat message
* @return List of chat message tasks
*/
List<ChatMessageTask> convertMessageToTasks(ChatMessage message);
// ========== Workflow Management ==========
/**
* List all available workflows.
*
* @return List of workflow metadata
*/
List<WorkflowMetadata> listWorkflows();
/**
* Get detailed information about a workflow.
*
* @param workflowId The workflow ID
* @return The workflow details
*/
WorkflowDetails getWorkflowDetails(String workflowId);
/**
* Get the initial input schema for a workflow.
*
* @param workflowId The workflow ID
* @return The initial schema or null if not applicable
*/
AIFunctionSchema getInitialSchema(String workflowId);
/**
* Get all schemas used by a workflow.
*
* @param workflowId The workflow ID
* @return List of schemas
*/
List<AIFunctionSchema> getWorkflowSchemas(String workflowId);
/**
* Get workflow instance state by run ID.
*
* @param runId The workflow run ID
* @return The workflow instance if found
*/
Optional<WorkflowInstance> getWorkflowState(String runId);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/utils/BranchStepExecutor.java
|
package ai.driftkit.workflow.engine.utils;
import ai.driftkit.workflow.engine.builder.StepDefinition;
import ai.driftkit.workflow.engine.core.InternalStepListener;
import ai.driftkit.workflow.engine.core.RetryExecutor;
import ai.driftkit.workflow.engine.core.StepResult;
import ai.driftkit.workflow.engine.core.WorkflowContext;
import ai.driftkit.workflow.engine.graph.StepNode;
import ai.driftkit.workflow.engine.persistence.WorkflowInstance;
import lombok.extern.slf4j.Slf4j;
import java.util.List;
import java.util.function.BiFunction;
/**
* Utility class for executing steps within branches with proper retry and interception support.
* Eliminates code duplication between TypedBranchStep and MultiBranchStep.
*/
@Slf4j
public final class BranchStepExecutor {
private BranchStepExecutor() {
// Utility class
}
/**
* Executes a list of steps sequentially within a branch context.
* Handles retry policies, interception, and proper error handling.
*
* @param steps List of steps to execute
* @param initialInput Initial input for the first step
* @param ctx Workflow context
* @param branchName Name of the branch for logging
* @return Result of the branch execution
*/
public static StepResult<?> executeBranchSteps(
List<StepDefinition> steps,
Object initialInput,
WorkflowContext ctx,
String branchName) {
Object currentInput = initialInput;
Object lastResult = null;
for (StepDefinition stepDef : steps) {
try {
StepResult<?> stepResult = executeSingleBranchStep(
stepDef, currentInput, ctx, branchName);
// Process the result
switch (stepResult) {
case StepResult.Continue<?> cont -> {
lastResult = cont.data();
currentInput = lastResult; // Pass output to next step
}
case StepResult.Fail<?> fail -> {
return StepResult.fail(fail.error());
}
case StepResult.Finish<?> finish -> {
return StepResult.finish(finish.result());
}
case StepResult.Suspend<?> suspend -> {
return stepResult; // Return suspension as-is
}
case StepResult.Async<?> async -> {
return stepResult; // Return async as-is for the engine to handle
}
case StepResult.Branch<?> branch -> {
return stepResult; // Handle branch events
}
}
} catch (Exception e) {
log.error("Branch {} step {} failed", branchName, stepDef.getId(), e);
// Notify listener about error
InternalStepListener listener = ctx.getInternalStepListener();
if (listener != null) {
listener.onInternalStepError(stepDef.getId(), e, ctx);
}
// Re-throw the exception so RetryExecutor can handle it
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
} else {
throw new RuntimeException("Step execution failed", e);
}
}
}
// Return the result from the last step in the branch
return StepResult.continueWith(lastResult != null ? lastResult : initialInput);
}
/**
* Executes a single step within a branch with retry and interception support.
*/
private static StepResult<?> executeSingleBranchStep(
StepDefinition stepDef,
Object input,
WorkflowContext ctx,
String branchName) throws Exception {
// Notify context about internal step execution for test tracking
ctx.notifyInternalStepExecution(stepDef.getId(), input);
log.debug("Processing internal step {} in {}, has retry policy: {}",
stepDef.getId(), branchName, stepDef.getRetryPolicy() != null);
// Check if listener wants to intercept this step
StepResult<?> stepResult;
InternalStepListener listener = ctx.getInternalStepListener();
if (listener != null) {
var intercepted = listener.interceptInternalStep(stepDef.getId(), input, ctx);
if (intercepted.isPresent()) {
stepResult = handleInterceptedStep(stepDef, input, ctx, listener, intercepted.get());
} else {
stepResult = executeStepWithRetry(stepDef, input, ctx);
}
} else {
stepResult = executeStepWithRetry(stepDef, input, ctx);
}
// Notify listener about completion
if (listener != null) {
listener.afterInternalStep(stepDef.getId(), stepResult, ctx);
}
return stepResult;
}
/**
* Handles intercepted step execution with proper retry support.
*/
private static StepResult<?> handleInterceptedStep(
StepDefinition stepDef,
Object input,
WorkflowContext ctx,
InternalStepListener listener,
StepResult<?> interceptedResult) throws Exception {
// If the intercepted result is a failure and the step has a retry policy,
// we need to handle it properly by letting executeStepWithRetry handle the retry
if (interceptedResult instanceof StepResult.Fail && stepDef.getRetryPolicy() != null) {
log.debug("Intercepted mock returned failure for step {} with retry policy, delegating to retry executor",
stepDef.getId());
// Wrap the mock's behavior in the step executor
@SuppressWarnings("unchecked")
BiFunction<Object, WorkflowContext, StepResult<Object>> wrappedExecutor =
(Object input2, WorkflowContext ctx2) -> {
// Try to intercept again
var intercepted2 = listener.interceptInternalStep(stepDef.getId(), input2, ctx2);
if (intercepted2.isPresent()) {
return (StepResult<Object>) intercepted2.get();
} else {
// Fall back to original executor
try {
return (StepResult<Object>) stepDef.getExecutor().execute(input2, ctx2);
} catch (Exception e) {
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
}
throw new RuntimeException("Step execution failed", e);
}
}
};
StepDefinition wrappedStep = StepDefinition.of(stepDef.getId(), wrappedExecutor)
.withRetryPolicy(stepDef.getRetryPolicy())
.withInvocationLimit(stepDef.getInvocationLimit())
.withOnInvocationsLimit(stepDef.getOnInvocationsLimit());
return executeStepWithRetry(wrappedStep, input, ctx);
}
return interceptedResult;
}
/**
* Executes a step with retry support if it has a retry policy.
* This is used internally when executing steps within branches.
*/
private static StepResult<?> executeStepWithRetry(
StepDefinition stepDef,
Object input,
WorkflowContext ctx) throws Exception {
if (stepDef.getRetryPolicy() != null) {
log.debug("Executing step {} with retry policy: maxAttempts={}, delay={}ms",
stepDef.getId(),
stepDef.getRetryPolicy().maxAttempts(),
stepDef.getRetryPolicy().delay());
// Create a minimal RetryExecutor for this specific step
RetryExecutor retryExecutor = new RetryExecutor();
// Create a fake WorkflowInstance and StepNode just for retry execution
WorkflowInstance fakeInstance = WorkflowInstance.builder()
.instanceId(ctx.getRunId())
.context(ctx)
.build();
StepNode fakeStepNode = new StepNode(
stepDef.getId(),
stepDef.getDescription(),
new StepNode.StepExecutor() {
@Override
public Object execute(Object input, WorkflowContext context) throws Exception {
return stepDef.getExecutor().execute(input, context);
}
@Override
public Class<?> getInputType() {
return stepDef.getInputType();
}
@Override
public Class<?> getOutputType() {
return stepDef.getOutputType();
}
@Override
public boolean requiresContext() {
return true;
}
},
false,
false,
stepDef.getRetryPolicy(),
stepDef.getInvocationLimit(),
stepDef.getOnInvocationsLimit()
);
// Execute with retry
return retryExecutor.executeWithRetry(fakeInstance, fakeStepNode,
(inst, stp) -> {
Object result = stepDef.getExecutor().execute(input, inst.getContext());
if (result instanceof StepResult<?>) {
return (StepResult<?>) result;
} else {
// The executor returns the raw result, wrap it in StepResult
return StepResult.continueWith(result);
}
});
} else {
// No retry policy, execute directly
Object result = stepDef.getExecutor().execute(input, ctx);
if (result instanceof StepResult<?>) {
return (StepResult<?>) result;
} else {
// The executor returns the raw result, wrap it in StepResult
return StepResult.continueWith(result);
}
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/utils/ReflectionUtils.java
|
package ai.driftkit.workflow.engine.utils;
import ai.driftkit.workflow.engine.async.TaskProgressReporter;
import ai.driftkit.workflow.engine.core.StepResult;
import ai.driftkit.workflow.engine.core.WorkflowContext;
import lombok.extern.slf4j.Slf4j;
import java.io.Serializable;
import java.lang.invoke.SerializedLambda;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import java.lang.reflect.Field;
import java.lang.reflect.InaccessibleObjectException;
import java.util.ArrayList;
import java.util.List;
import java.lang.reflect.Modifier;
/**
* Utility class for common reflection operations used throughout the workflow engine.
* Extracted from WorkflowBuilder, StepDefinition, and AsyncStepHandler to centralize reflection logic.
*/
@Slf4j
public final class ReflectionUtils {
private static final AtomicInteger STEP_COUNTER = new AtomicInteger(0);
private ReflectionUtils() {}
/**
* Extracts method name from a lambda expression or method reference.
* This is the exact logic from WorkflowBuilder.extractLambdaMethodName().
*
* @param lambda The lambda or method reference
* @return The extracted method name or generated ID
*/
public static String extractLambdaMethodName(Object lambda) {
if (lambda == null) {
throw new IllegalArgumentException("Lambda cannot be null");
}
// Try SerializedLambda approach first (works when lambda is Serializable)
if (lambda instanceof Serializable) {
try {
Method writeReplace = lambda.getClass().getDeclaredMethod("writeReplace");
writeReplace.setAccessible(true);
SerializedLambda serializedLambda = (SerializedLambda) writeReplace.invoke(lambda);
String implMethodName = serializedLambda.getImplMethodName();
// Check if it's a synthetic lambda (starts with "lambda$")
if (implMethodName.startsWith("lambda$")) {
// Generate a more meaningful ID
return generateStepId();
}
// It's a method reference, return the method name
log.debug("Extracted method name from SerializedLambda: {}", implMethodName);
return implMethodName;
} catch (Exception e) {
log.debug("Could not extract method name from SerializedLambda: {}", e.getMessage());
}
}
// Fallback: try to extract from class name
String className = lambda.getClass().getName();
if (className.contains("$$Lambda$")) {
// Extract the base class name as a hint
int lambdaIndex = className.indexOf("$$Lambda$");
String baseClass = className.substring(0, lambdaIndex);
int lastDot = baseClass.lastIndexOf('.');
if (lastDot >= 0) {
baseClass = baseClass.substring(lastDot + 1);
}
return baseClass.toLowerCase() + "_" + generateStepId().substring(5); // Remove "step_" prefix
}
// Ultimate fallback
return generateStepId();
}
/**
* Generate a unique step ID.
* This is the exact logic from WorkflowBuilder.generateStepId().
*
* @return A unique step ID
*/
public static String generateStepId() {
return "step_" + UUID.randomUUID().toString().substring(0, 8);
}
/**
* Extracts method information from a serializable lambda.
* This is the exact logic from StepDefinition.extractMethodInfo().
*
* @param lambda The serializable lambda
* @return MethodInfo containing method details
*/
public static MethodInfo extractMethodInfo(Serializable lambda) {
try {
Method writeReplace = lambda.getClass().getDeclaredMethod("writeReplace");
writeReplace.setAccessible(true);
SerializedLambda serializedLambda = (SerializedLambda) writeReplace.invoke(lambda);
String implClass = serializedLambda.getImplClass().replace('/', '.');
String implMethodName = serializedLambda.getImplMethodName();
// Load the class and find the method
Class<?> clazz = Class.forName(implClass);
// Find the method - we need to search through all methods
// since we don't know the exact parameter types
Method targetMethod = null;
for (Method method : clazz.getDeclaredMethods()) {
if (method.getName().equals(implMethodName)) {
targetMethod = method;
break;
}
}
if (targetMethod == null) {
throw new IllegalStateException("Could not find method: " + implMethodName);
}
// Extract input type (first parameter that's not WorkflowContext)
Class<?> inputType = null;
for (Class<?> paramType : targetMethod.getParameterTypes()) {
if (!WorkflowContext.class.isAssignableFrom(paramType)) {
inputType = paramType;
break;
}
}
// Extract output type from StepResult<T>
Class<?> outputType = extractStepResultType(targetMethod.getGenericReturnType());
log.debug("Extracted method info: class={}, method={}, input={}, output={}",
implClass, implMethodName, inputType, outputType);
return new MethodInfo(implMethodName, inputType, outputType);
} catch (Exception e) {
log.error("Failed to extract method info from lambda", e);
throw new IllegalArgumentException(
"Cannot extract method name from lambda. Please use explicit ID.", e);
}
}
/**
* Builds method arguments for async step invocation.
* This is the exact logic from AsyncStepHandler.buildAsyncMethodArgs().
*
* @param method The method to invoke
* @param asyncResult The async result object
* @param context The workflow context
* @param progressReporter The progress reporter
* @return Array of method arguments in correct order
*/
public static Object[] buildAsyncMethodArgs(Method method, Object asyncResult,
WorkflowContext context, TaskProgressReporter progressReporter) {
Class<?>[] paramTypes = method.getParameterTypes();
Object[] args = new Object[paramTypes.length];
boolean hasTaskArgs = false;
boolean hasContext = false;
boolean hasProgress = false;
// Fill arguments based on parameter types
for (int i = 0; i < paramTypes.length; i++) {
Class<?> paramType = paramTypes[i];
if (!hasTaskArgs && (Map.class.isAssignableFrom(paramType) || paramType.isInstance(asyncResult))) {
args[i] = asyncResult;
hasTaskArgs = true;
} else if (!hasContext && WorkflowContext.class.isAssignableFrom(paramType)) {
args[i] = context;
hasContext = true;
} else if (!hasProgress && TaskProgressReporter.class.isAssignableFrom(paramType)) {
args[i] = progressReporter;
hasProgress = true;
} else {
throw new IllegalArgumentException(
"Async method " + method.getName() + " has unexpected parameter type at position " + i +
": " + paramType.getName()
);
}
}
// Validate that all required parameters are present
if (!hasProgress) {
throw new IllegalArgumentException(
"Async method " + method.getName() + " must accept TaskProgressReporter parameter"
);
}
return args;
}
/**
* Extracts the generic type parameter from StepResult<T>.
* This is the exact logic from StepDefinition.extractStepResultType().
*
* @param returnType The return type to analyze
* @return The extracted type or Object.class if not found
*/
public static Class<?> extractStepResultType(Type returnType) {
if (returnType instanceof ParameterizedType) {
ParameterizedType paramType = (ParameterizedType) returnType;
// Check if it's StepResult<T>
if (paramType.getRawType() == StepResult.class) {
Type[] typeArgs = paramType.getActualTypeArguments();
if (typeArgs.length > 0 && typeArgs[0] instanceof Class) {
return (Class<?>) typeArgs[0];
}
}
}
// Default to Object if we can't determine the type
return Object.class;
}
/**
* Container for method reflection information.
* This matches the MethodInfo class from StepDefinition.
*/
public static class MethodInfo {
private final String methodName;
private final Class<?> inputType;
private final Class<?> outputType;
public MethodInfo(String methodName, Class<?> inputType, Class<?> outputType) {
this.methodName = methodName;
this.inputType = inputType;
this.outputType = outputType;
}
public String getMethodName() { return methodName; }
public Class<?> getInputType() { return inputType; }
public Class<?> getOutputType() { return outputType; }
@Override
public String toString() {
return methodName + "(" +
(inputType != null ? inputType.getSimpleName() : "?") +
") -> " +
(outputType != null ? outputType.getSimpleName() : "?");
}
}
/**
* Makes a field accessible, handling InaccessibleObjectException for Java module system.
* Skips fields from java.* packages that cannot be accessed due to module restrictions.
*
* @param field The field to make accessible
* @return true if the field was made accessible, false if it should be skipped
*/
public static boolean trySetAccessible(Field field) {
// Skip fields from java.* packages to avoid InaccessibleObjectException
if (field.getDeclaringClass().getName().startsWith("java.")) {
log.trace("Skipping field from java.* package: {}.{}",
field.getDeclaringClass().getName(), field.getName());
return false;
}
try {
field.setAccessible(true);
return true;
} catch (InaccessibleObjectException e) {
log.debug("Cannot make field accessible: {}.{} - {}",
field.getDeclaringClass().getName(), field.getName(), e.getMessage());
return false;
}
}
/**
* Checks if a field should be included in schema generation.
* Excludes static, transient, synthetic fields and fields from java.* packages.
*
* @param field The field to check
* @return true if the field should be included, false otherwise
*/
public static boolean shouldIncludeField(Field field) {
// Skip static, transient, or synthetic fields
if (Modifier.isStatic(field.getModifiers()) ||
Modifier.isTransient(field.getModifiers()) ||
field.isSynthetic()) {
return false;
}
// Skip fields from java.* packages
if (field.getDeclaringClass().getName().startsWith("java.")) {
return false;
}
return true;
}
/**
* Gets all fields from a class and its superclasses, filtering out inaccessible ones.
*
* @param clazz The class to get fields from
* @param includeInherited Whether to include fields from superclasses
* @return List of accessible fields
*/
public static List<Field> getAccessibleFields(Class<?> clazz, boolean includeInherited) {
List<Field> accessibleFields = new ArrayList<>();
Class<?> currentClass = clazz;
while (currentClass != null && currentClass != Object.class) {
for (Field field : currentClass.getDeclaredFields()) {
if (shouldIncludeField(field) && trySetAccessible(field)) {
accessibleFields.add(field);
}
}
if (!includeInherited) {
break;
}
currentClass = currentClass.getSuperclass();
}
return accessibleFields;
}
/**
* Safely gets the value of a field from an object.
*
* @param field The field to get value from
* @param instance The object instance
* @return The field value, or null if it cannot be accessed
*/
public static Object getFieldValue(Field field, Object instance) {
if (!trySetAccessible(field)) {
return null;
}
try {
return field.get(instance);
} catch (IllegalAccessException e) {
log.debug("Cannot access field {}.{}: {}",
field.getDeclaringClass().getName(), field.getName(), e.getMessage());
return null;
}
}
/**
* Safely sets the value of a field on an object.
*
* @param field The field to set
* @param instance The object instance
* @param value The value to set
* @return true if the field was set successfully, false otherwise
*/
public static boolean setFieldValue(Field field, Object instance, Object value) {
if (!trySetAccessible(field)) {
return false;
}
try {
field.set(instance, value);
return true;
} catch (IllegalAccessException e) {
log.debug("Cannot set field {}.{}: {}",
field.getDeclaringClass().getName(), field.getName(), e.getMessage());
return false;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine
|
java-sources/ai/driftkit/driftkit-workflow-engine-core/0.8.1/ai/driftkit/workflow/engine/utils/WorkflowInputOutputHandler.java
|
package ai.driftkit.workflow.engine.utils;
import ai.driftkit.common.domain.chat.ChatMessage.DataProperty;
import ai.driftkit.workflow.engine.builder.InternalRoutingMarker;
import ai.driftkit.workflow.engine.core.StepOutput;
import ai.driftkit.workflow.engine.core.WorkflowContext;
import ai.driftkit.workflow.engine.graph.StepNode;
import ai.driftkit.workflow.engine.persistence.WorkflowInstance;
import ai.driftkit.workflow.engine.schema.SchemaUtils;
import ai.driftkit.workflow.engine.analyzer.TypeUtils;
import ai.driftkit.common.domain.chat.ChatRequest;
import lombok.extern.slf4j.Slf4j;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Utility class for handling user input and input resolution in workflow contexts.
* Centralizes logic for type deserialization, compatibility checking, and finding suitable inputs from history.
*/
@Slf4j
public final class WorkflowInputOutputHandler {
private WorkflowInputOutputHandler() {
// Utility class
}
/**
* Attempts to get user input from context with proper type deserialization.
* Handles type compatibility checking and cleanup after retrieval.
*
* @param instance The workflow instance
* @param step The step expecting input
* @return User input if available and compatible, null otherwise
*/
public static Object getUserInputForStep(WorkflowInstance instance, StepNode step) {
WorkflowContext ctx = instance.getContext();
Class<?> expectedInputType = step.executor().getInputType();
// Check if we have user input
if (!ctx.hasStepResult(WorkflowContext.Keys.USER_INPUT)) {
return null;
}
Object userInput = deserializeUserInput(ctx, expectedInputType);
// Check if user input is compatible with step
if (userInput != null && step.canAcceptInput(userInput.getClass())) {
log.debug("Using user input of type {} for step {}",
userInput.getClass().getSimpleName(), step.id());
// Remove userInput and its type from context after use
clearUserInput(instance);
return userInput;
}
return null;
}
/**
* Deserializes user input with proper type handling.
* Attempts to use saved type information for accurate deserialization.
*
* @param ctx The workflow context
* @param expectedInputType The expected input type for the step
* @return Deserialized user input or null
*/
private static Object deserializeUserInput(WorkflowContext ctx, Class<?> expectedInputType) {
// Get the saved type information if available
String userInputTypeName = ctx.getStepResultOrDefault(
WorkflowContext.Keys.USER_INPUT_TYPE, String.class, null);
// Try to deserialize with the correct type if we have type information
if (userInputTypeName != null && expectedInputType != null) {
try {
Class<?> savedType = Class.forName(userInputTypeName);
// Only use the saved type if it's compatible with the expected type
if (expectedInputType.isAssignableFrom(savedType)) {
Object userInput = ctx.getStepResult(WorkflowContext.Keys.USER_INPUT, savedType);
log.debug("Deserialized user input with saved type {}", savedType.getSimpleName());
return userInput;
} else {
// Type mismatch - fall back to Object.class
log.warn("Saved type {} is not compatible with expected type {}",
savedType.getSimpleName(), expectedInputType.getSimpleName());
return ctx.getStepResult(WorkflowContext.Keys.USER_INPUT, Object.class);
}
} catch (ClassNotFoundException e) {
log.warn("Could not load saved type class: {}, falling back to Object.class",
userInputTypeName);
return ctx.getStepResult(WorkflowContext.Keys.USER_INPUT, Object.class);
}
} else {
// No type information available - use Object.class as before
return ctx.getStepResult(WorkflowContext.Keys.USER_INPUT, Object.class);
}
}
/**
* Clears user input and type information from context.
* Should be called after user input has been consumed.
*
* @param instance The workflow instance
*/
public static void clearUserInput(WorkflowInstance instance) {
instance.updateContext(WorkflowContext.Keys.USER_INPUT, null);
instance.updateContext(WorkflowContext.Keys.USER_INPUT_TYPE, null);
}
/**
* Saves user input with type information to context.
* Used when workflow is resumed with user-provided data.
* Special handling for ChatRequest to convert to actual type.
*
* @param instance The workflow instance
* @param userInput The user input to save
*/
public static void saveUserInput(WorkflowInstance instance, Object userInput) {
if (userInput != null) {
Object actualInput = userInput;
// Special handling for ChatRequest - convert to actual type
if (userInput instanceof ChatRequest) {
ChatRequest chatRequest = (ChatRequest) userInput;
String schemaName = chatRequest.getRequestSchemaName();
if (schemaName != null) {
// Use TypeUtils to resolve the actual input type
Class<?> actualInputClass = TypeUtils.resolveInputType(chatRequest, null);
if (actualInputClass != null) {
// Convert using TypeUtils
Object convertedInput = TypeUtils.convertChatRequestToClass(chatRequest, actualInputClass);
if (convertedInput != null) {
log.debug("Converted ChatRequest to {} using schema name: {}",
actualInputClass.getSimpleName(), schemaName);
actualInput = convertedInput;
} else {
log.warn("Failed to convert ChatRequest to {}, using ChatRequest as-is",
actualInputClass.getName());
}
} else {
log.warn("Schema not found in registry: {}, using ChatRequest as-is", schemaName);
}
}
}
instance.updateContext(WorkflowContext.Keys.USER_INPUT, actualInput);
instance.updateContext(WorkflowContext.Keys.USER_INPUT_TYPE,
actualInput.getClass().getName());
}
}
/**
* Checks if there is user input available in the context.
*
* @param ctx The workflow context
* @return true if user input exists, false otherwise
*/
public static boolean hasUserInput(WorkflowContext ctx) {
return ctx.hasStepResult(WorkflowContext.Keys.USER_INPUT);
}
/**
* Finds the most recent compatible output from execution history.
* Prioritizes exact type matches over compatible types.
*
* @param instance The workflow instance
* @param targetStep The step expecting input
* @return Compatible output if found, null otherwise
*/
public static Object findCompatibleOutputFromHistory(WorkflowInstance instance, StepNode targetStep) {
WorkflowContext ctx = instance.getContext();
Class<?> expectedInputType = targetStep.executor().getInputType();
List<WorkflowInstance.StepExecutionRecord> history = instance.getExecutionHistory();
log.debug("Finding compatible output for step {} expecting type: {}",
targetStep.id(), expectedInputType != null ? expectedInputType.getSimpleName() : "any");
if (history.isEmpty()) {
return null;
}
// First pass: Look for exact type match
Object exactMatch = findExactTypeMatch(history, ctx, targetStep, expectedInputType);
if (exactMatch != null) {
return exactMatch;
}
// Second pass: Look for compatible type
return findCompatibleTypeMatch(history, ctx, targetStep);
}
/**
* Finds an exact type match from execution history.
*/
private static Object findExactTypeMatch(List<WorkflowInstance.StepExecutionRecord> history,
WorkflowContext ctx,
StepNode targetStep,
Class<?> expectedInputType) {
if (expectedInputType == null || expectedInputType == Object.class) {
return null;
}
// Traverse history from most recent to oldest
for (int i = history.size() - 1; i >= 0; i--) {
WorkflowInstance.StepExecutionRecord exec = history.get(i);
// Skip if it's the target step itself
if (exec.getStepId().equals(targetStep.id())) {
continue;
}
StepOutput output = getStepOutput(ctx, exec.getStepId());
if (output != null && !isRoutingMarker(output) && output.getActualClass().equals(expectedInputType)) {
Object result = output.getValue();
log.debug("Found exact type match from step {} (type: {}) for step {}",
exec.getStepId(), output.getActualClass().getSimpleName(), targetStep.id());
return result;
}
}
return null;
}
/**
* Finds a compatible type match from execution history.
*/
private static Object findCompatibleTypeMatch(List<WorkflowInstance.StepExecutionRecord> history,
WorkflowContext ctx,
StepNode targetStep) {
// Traverse history from most recent to oldest
for (int i = history.size() - 1; i >= 0; i--) {
WorkflowInstance.StepExecutionRecord exec = history.get(i);
// Skip if it's the target step itself
if (exec.getStepId().equals(targetStep.id())) {
continue;
}
StepOutput output = getStepOutput(ctx, exec.getStepId());
if (output != null && !isRoutingMarker(output) && targetStep.canAcceptInput(output.getActualClass())) {
Object result = output.getValue();
log.debug("Found compatible output from step {} (type: {}) for step {}",
exec.getStepId(), output.getActualClass().getSimpleName(), targetStep.id());
return result;
}
}
return null;
}
/**
* Safely retrieves step output from context.
*/
private static StepOutput getStepOutput(WorkflowContext ctx, String stepId) {
if (!ctx.hasStepResult(stepId)) {
return null;
}
return ctx.getStepOutputs().get(stepId);
}
/**
* Checks if the output is a routing marker (used for branch decisions).
* These should not be used as input for subsequent steps.
*
* @param output The step output to check
* @return true if this is a routing marker
*/
private static boolean isRoutingMarker(StepOutput output) {
if (output == null || !output.hasValue()) {
return false;
}
Object value = output.getValue();
if (value == null) {
return false;
}
// Check if it implements InternalRoutingMarker interface
return value instanceof InternalRoutingMarker;
}
/**
* Gets a human-readable type name for logging.
*
* @param type The class type
* @return Simple name or "any" for null/Object types
*/
public static String getTypeDisplayName(Class<?> type) {
if (type == null || type == Object.class) {
return "any";
}
return type.getSimpleName();
}
/**
* Extracts properties from workflow data objects.
* Handles various data formats including Maps with DataProperty lists,
* direct property maps, and domain objects.
*
* @param data The data to extract properties from
* @return Map of string key-value pairs
*/
public static Map<String, String> extractPropertiesFromData(Object data) {
if (data == null) {
return new HashMap<>();
}
// If data is already a properties map from a workflow response object
if (data instanceof Map) {
Map<?, ?> map = (Map<?, ?>) data;
// Check if this is a response object with properties
Object propsObj = map.get("properties");
if (propsObj instanceof List) {
// Handle DataProperty list
Map<String, String> properties = new HashMap<>();
List<?> propsList = (List<?>) propsObj;
for (Object prop : propsList) {
if (prop instanceof DataProperty dp) {
properties.put(dp.getName(), dp.getValue());
}
}
return properties;
} else if (propsObj instanceof Map) {
// Direct property map in "properties" field
Map<String, String> properties = new HashMap<>();
Map<?, ?> propsMap = (Map<?, ?>) propsObj;
propsMap.forEach((k, v) -> {
if (k != null && v != null) {
properties.put(k.toString(), v.toString());
}
});
return properties;
} else {
// No "properties" field, treat as plain map
Map<String, String> properties = new HashMap<>();
map.forEach((k, v) -> {
if (k != null && v != null) {
properties.put(k.toString(), v.toString());
}
});
return properties;
}
} else {
// Not a map, use schema utils to extract properties from domain object
return SchemaUtils.extractProperties(data);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/adapter/PageResultAdapter.java
|
package ai.driftkit.workflow.engine.spring.adapter;
import ai.driftkit.workflow.engine.domain.PageRequest;
import ai.driftkit.workflow.engine.domain.PageResult;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
/**
* Adapter to convert between Spring's Page/Pageable and core PageResult/PageRequest.
*/
public class PageResultAdapter {
/**
* Convert Spring Pageable to core PageRequest.
*/
public static PageRequest toPageRequest(Pageable pageable) {
PageRequest.SortDirection direction = PageRequest.SortDirection.ASC;
String sortBy = "id";
if (pageable.getSort().isSorted()) {
Sort.Order order = pageable.getSort().iterator().next();
sortBy = order.getProperty();
direction = order.isAscending()
? PageRequest.SortDirection.ASC
: PageRequest.SortDirection.DESC;
}
return PageRequest.builder()
.pageNumber(pageable.getPageNumber())
.pageSize(pageable.getPageSize())
.sortBy(sortBy)
.sortDirection(direction)
.build();
}
/**
* Convert core PageResult to Spring Page.
*/
public static <T> Page<T> toPage(PageResult<T> pageResult, Pageable pageable) {
return new PageImpl<>(
pageResult.getContent(),
pageable,
pageResult.getTotalElements()
);
}
/**
* Convert Spring Page to core PageResult.
*/
public static <T> PageResult<T> toPageResult(Page<T> page) {
return PageResult.<T>builder()
.content(page.getContent())
.pageNumber(page.getNumber())
.pageSize(page.getSize())
.totalElements(page.getTotalElements())
.build();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/autoconfigure/AsyncTaskConfiguration.java
|
package ai.driftkit.workflow.engine.spring.autoconfigure;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.AutoConfiguration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import java.util.concurrent.Executor;
import java.util.concurrent.ThreadPoolExecutor;
/**
* Spring Boot auto-configuration for async task execution.
* Provides thread pool configuration for async LLM operations.
*/
@Slf4j
@AutoConfiguration
@EnableAsync
@ConditionalOnProperty(
prefix = "driftkit.workflow.async",
name = "enabled",
havingValue = "true",
matchIfMissing = true
)
@EnableConfigurationProperties(AsyncTaskConfiguration.AsyncTaskProperties.class)
public class AsyncTaskConfiguration {
@Bean(name = "taskExecutor")
@ConditionalOnMissingBean(name = "taskExecutor")
public Executor taskExecutor(AsyncTaskProperties properties) {
log.info("Configuring async task executor with core pool size: {}, max pool size: {}",
properties.getCorePoolSize(), properties.getMaxPoolSize());
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(properties.getCorePoolSize());
executor.setMaxPoolSize(properties.getMaxPoolSize());
executor.setQueueCapacity(properties.getQueueCapacity());
executor.setThreadNamePrefix("async-task-");
executor.setKeepAliveSeconds(properties.getKeepAliveSeconds());
executor.setAllowCoreThreadTimeOut(properties.isAllowCoreThreadTimeOut());
// Rejection policy
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
// Wait for tasks to complete on shutdown
executor.setWaitForTasksToCompleteOnShutdown(true);
executor.setAwaitTerminationSeconds(properties.getAwaitTerminationSeconds());
executor.initialize();
return executor;
}
/**
* Configuration properties for async task execution
*/
@Data
@ConfigurationProperties(prefix = "driftkit.workflow.async")
public static class AsyncTaskProperties {
/**
* Whether async task execution is enabled
*/
private boolean enabled = true;
/**
* Core pool size for the thread pool
*/
private int corePoolSize = 10;
/**
* Maximum pool size for the thread pool
*/
private int maxPoolSize = 50;
/**
* Queue capacity for pending tasks
*/
private int queueCapacity = 100;
/**
* Thread keep-alive time in seconds
*/
private int keepAliveSeconds = 60;
/**
* Whether core threads are allowed to time out
*/
private boolean allowCoreThreadTimeOut = false;
/**
* Time to wait for tasks to complete on shutdown (seconds)
*/
private int awaitTerminationSeconds = 60;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/autoconfigure/WorkflowBeanPostProcessor.java
|
package ai.driftkit.workflow.engine.spring.autoconfigure;
import ai.driftkit.workflow.engine.annotations.Workflow;
import ai.driftkit.workflow.engine.core.WorkflowEngine;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.config.BeanPostProcessor;
import org.springframework.core.annotation.AnnotationUtils;
/**
* BeanPostProcessor that automatically registers beans annotated with @Workflow
* in the WorkflowEngine.
*
* <p>This processor ensures that all workflow beans are automatically discovered
* and registered without requiring manual registration.</p>
*/
@Slf4j
@RequiredArgsConstructor
public class WorkflowBeanPostProcessor implements BeanPostProcessor {
private final WorkflowEngine workflowEngine;
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException {
// Check if the bean class is annotated with @Workflow
Workflow workflowAnnotation = AnnotationUtils.findAnnotation(bean.getClass(), Workflow.class);
if (workflowAnnotation != null) {
try {
log.info("Auto-registering workflow bean: {} (id: {})",
beanName, workflowAnnotation.id());
// Register the workflow with the engine
workflowEngine.register(bean);
log.debug("Successfully registered workflow: {} (version: {})",
workflowAnnotation.id(), workflowAnnotation.version());
} catch (Exception e) {
log.error("Failed to register workflow bean: {} (id: {})",
beanName, workflowAnnotation.id(), e);
// Re-throw as BeansException to fail fast
throw new BeansException("Failed to register workflow: " + workflowAnnotation.id(), e) {};
}
}
return bean;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/autoconfigure/WorkflowEngineAutoConfiguration.java
|
package ai.driftkit.workflow.engine.spring.autoconfigure;
import ai.driftkit.workflow.engine.async.InMemoryProgressTracker;
import ai.driftkit.workflow.engine.async.ProgressTracker;
import ai.driftkit.workflow.engine.core.WorkflowEngine;
import ai.driftkit.workflow.engine.core.WorkflowContextFactory;
import ai.driftkit.workflow.engine.domain.WorkflowEngineConfig;
import ai.driftkit.workflow.engine.persistence.*;
import ai.driftkit.workflow.engine.persistence.inmemory.*;
import ai.driftkit.common.service.ChatStore;
import ai.driftkit.common.service.TextTokenizer;
import ai.driftkit.common.service.impl.InMemoryChatStore;
import ai.driftkit.common.service.impl.SimpleTextTokenizer;
import ai.driftkit.workflow.engine.spring.context.SpringWorkflowContextFactory;
import ai.driftkit.workflow.engine.spring.service.WorkflowService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.AutoConfiguration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Import;
/**
* Spring Boot auto-configuration for DriftKit Workflow Engine.
*
* <p>This configuration automatically sets up the workflow engine
* with Spring integration when included in a Spring Boot application.</p>
*/
@Slf4j
@AutoConfiguration
@ConditionalOnClass(WorkflowEngine.class)
@EnableConfigurationProperties(WorkflowEngineProperties.class)
@ConditionalOnProperty(
prefix = "driftkit.workflow.engine",
name = "enabled",
havingValue = "true",
matchIfMissing = true
)
@Import({WorkflowTracingAutoConfiguration.class})
public class WorkflowEngineAutoConfiguration {
@Bean
@ConditionalOnMissingBean
public WorkflowStateRepository workflowStateRepository() {
log.info("Configuring in-memory WorkflowStateRepository");
return new InMemoryWorkflowStateRepository();
}
@Bean
@ConditionalOnMissingBean
public ProgressTracker progressTracker() {
log.info("Configuring in-memory ProgressTracker");
return new InMemoryProgressTracker();
}
@Bean(name = "workflowChatSessionRepository")
@ConditionalOnMissingBean(name = "workflowChatSessionRepository")
public ChatSessionRepository workflowChatSessionRepository() {
log.info("Configuring in-memory ChatSessionRepository for workflow engine");
return new InMemoryChatSessionRepository();
}
@Bean
@ConditionalOnMissingBean
public TextTokenizer textTokenizer() {
log.info("Configuring SimpleTextTokenizer");
return new SimpleTextTokenizer();
}
@Bean
@ConditionalOnMissingBean
public ChatStore chatStore(TextTokenizer textTokenizer) {
log.info("Configuring in-memory ChatStore");
return new InMemoryChatStore(textTokenizer);
}
@Bean
@ConditionalOnMissingBean
public AsyncStepStateRepository asyncStepStateRepository() {
log.info("Configuring in-memory AsyncStepStateRepository");
return new InMemoryAsyncStepStateRepository();
}
@Bean
@ConditionalOnMissingBean
public SuspensionDataRepository suspensionDataRepository() {
log.info("Configuring in-memory SuspensionDataRepository");
return new InMemorySuspensionDataRepository();
}
@Bean
@ConditionalOnMissingBean
public WorkflowContextFactory workflowContextFactory() {
log.info("Configuring SpringWorkflowContextFactory");
return new SpringWorkflowContextFactory();
}
@Bean
@ConditionalOnMissingBean
public WorkflowEngine workflowEngine(
WorkflowEngineProperties properties,
WorkflowStateRepository stateRepository,
ProgressTracker progressTracker,
ChatSessionRepository workflowChatSessionRepository,
ChatStore chatStore,
AsyncStepStateRepository asyncStepStateRepository,
SuspensionDataRepository suspensionDataRepository,
WorkflowContextFactory contextFactory) {
log.info("Configuring WorkflowEngine with properties: {}", properties);
WorkflowEngineConfig config = WorkflowEngineConfig.builder()
.coreThreads(properties.getCoreThreads())
.maxThreads(properties.getMaxThreads())
.scheduledThreads(properties.getScheduledThreads())
.queueCapacity(properties.getQueueCapacity())
.defaultStepTimeoutMs(properties.getDefaultStepTimeoutMs())
.stateRepository(stateRepository)
.progressTracker(progressTracker)
.chatSessionRepository(workflowChatSessionRepository)
.chatStore(chatStore)
.asyncStepStateRepository(asyncStepStateRepository)
.suspensionDataRepository(suspensionDataRepository)
.contextFactory(contextFactory)
.build();
return new WorkflowEngine(config);
}
@Bean
@ConditionalOnMissingBean
public WorkflowService workflowService(
WorkflowEngine engine,
ChatSessionRepository workflowChatSessionRepository,
AsyncStepStateRepository asyncStepStateRepository,
SuspensionDataRepository suspensionDataRepository,
WorkflowStateRepository workflowStateRepository,
ChatStore chatStore) {
log.info("Configuring WorkflowService");
return new WorkflowService(engine, workflowChatSessionRepository,
asyncStepStateRepository, suspensionDataRepository, workflowStateRepository, chatStore);
}
@Bean
@ConditionalOnMissingBean
@ConditionalOnProperty(
prefix = "driftkit.workflow.engine",
name = "auto-register",
havingValue = "true",
matchIfMissing = true
)
public WorkflowBeanPostProcessor workflowBeanPostProcessor(WorkflowEngine engine) {
log.info("Configuring WorkflowBeanPostProcessor for automatic workflow registration");
return new WorkflowBeanPostProcessor(engine);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/autoconfigure/WorkflowEngineProperties.java
|
package ai.driftkit.workflow.engine.spring.autoconfigure;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import java.time.Duration;
/**
* Configuration properties for DriftKit Workflow Engine.
*
* <p>These properties can be configured in application.yml/properties
* under the 'driftkit.workflow.engine' prefix.</p>
*/
@Data
@ConfigurationProperties(prefix = "driftkit.workflow.engine")
public class WorkflowEngineProperties {
/**
* Whether the workflow engine is enabled.
*/
private boolean enabled = true;
/**
* Core number of threads in the workflow executor pool.
*/
private int coreThreads = 10;
/**
* Maximum number of threads in the workflow executor pool.
*/
private int maxThreads = 50;
/**
* Number of threads for scheduled tasks.
*/
private int scheduledThreads = 5;
/**
* Keep-alive time for idle threads.
*/
private Duration keepAliveTime = Duration.ofMinutes(1);
/**
* Queue capacity for workflow tasks.
*/
private int queueCapacity = 1000;
/**
* Default timeout for workflow steps in milliseconds.
*/
private long defaultStepTimeoutMs = 300_000; // 5 minutes
/**
* Controller configuration.
*/
private ControllerProperties controller = new ControllerProperties();
@Data
public static class ControllerProperties {
/**
* Whether the REST controller is enabled.
*/
private boolean enabled = true;
/**
* Base path for workflow endpoints.
*/
private String basePath = "/api/workflows";
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/autoconfigure/WorkflowMongoRepositoriesAutoConfiguration.java
|
package ai.driftkit.workflow.engine.spring.autoconfigure;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.AutoConfiguration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.data.mongo.MongoDataAutoConfiguration;
import org.springframework.boot.autoconfigure.data.mongo.MongoRepositoriesAutoConfiguration;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.repository.config.EnableMongoRepositories;
/**
* Auto-configuration for MongoDB repositories used by workflow engine.
* This configuration is separate to avoid conflicts when multiple modules try to enable repositories.
*/
@Slf4j
@AutoConfiguration(
after = {MongoDataAutoConfiguration.class, MongoRepositoriesAutoConfiguration.class},
before = WorkflowTracingAutoConfiguration.class
)
@ConditionalOnClass(MongoTemplate.class)
@ConditionalOnProperty(
prefix = "driftkit.workflow.mongodb",
name = "enabled",
havingValue = "true",
matchIfMissing = true
)
@EnableMongoRepositories(basePackages = {
"ai.driftkit.workflow.engine.spring.tracing.repository"
})
public class WorkflowMongoRepositoriesAutoConfiguration {
public WorkflowMongoRepositoriesAutoConfiguration() {
log.info("Enabling MongoDB repositories for workflow engine tracing");
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/autoconfigure/WorkflowTracingAutoConfiguration.java
|
package ai.driftkit.workflow.engine.spring.autoconfigure;
import ai.driftkit.workflow.engine.agent.NoOpRequestTracingProvider;
import ai.driftkit.workflow.engine.agent.RequestTracingProvider;
import ai.driftkit.workflow.engine.spring.tracing.SpringRequestTracingProvider;
import ai.driftkit.workflow.engine.spring.tracing.repository.CoreModelRequestTraceRepository;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.AutoConfiguration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.data.mongo.MongoDataAutoConfiguration;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
/**
* Spring Boot auto-configuration for workflow tracing functionality.
* Provides automatic setup of request tracing with MongoDB persistence.
*/
@Slf4j
@AutoConfiguration(after = MongoDataAutoConfiguration.class)
@ConditionalOnClass({RequestTracingProvider.class, CoreModelRequestTraceRepository.class})
@ConditionalOnProperty(
prefix = "driftkit.workflow.tracing",
name = "enabled",
havingValue = "true",
matchIfMissing = true
)
@EnableConfigurationProperties(WorkflowTracingProperties.class)
@ComponentScan(basePackages = "ai.driftkit.workflow.engine.spring.tracing")
public class WorkflowTracingAutoConfiguration {
@Bean(name = "traceExecutor")
@ConditionalOnMissingBean(name = "traceExecutor")
public Executor traceExecutor(WorkflowTracingProperties properties) {
log.info("Configuring trace executor with {} threads", properties.getTraceThreads());
return Executors.newFixedThreadPool(properties.getTraceThreads());
}
@Bean
@ConditionalOnMissingBean(RequestTracingProvider.class)
@ConditionalOnBean(CoreModelRequestTraceRepository.class)
public RequestTracingProvider requestTracingProvider(
CoreModelRequestTraceRepository repository,
Executor traceExecutor) {
log.info("Configuring SpringRequestTracingProvider for workflow tracing");
return new SpringRequestTracingProvider(repository, traceExecutor);
}
@Bean
@ConditionalOnMissingBean(RequestTracingProvider.class)
public RequestTracingProvider noOpRequestTracingProvider() {
log.info("MongoDB not available or tracing disabled - using NoOpRequestTracingProvider");
return new NoOpRequestTracingProvider();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/autoconfigure/WorkflowTracingProperties.java
|
package ai.driftkit.workflow.engine.spring.autoconfigure;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
/**
* Configuration properties for workflow tracing.
*/
@Data
@ConfigurationProperties(prefix = "driftkit.workflow.tracing")
public class WorkflowTracingProperties {
/**
* Enable or disable workflow tracing
*/
private boolean enabled = true;
/**
* Number of threads for asynchronous trace saving
*/
private int traceThreads = 2;
/**
* MongoDB collection name for traces
*/
private String collection = "model_request_traces";
/**
* Enable trace logging
*/
private boolean logTraces = false;
/**
* Maximum trace age in days (for cleanup)
*/
private int maxTraceAgeDays = 30;
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/client/AnalyticsClient.java
|
package ai.driftkit.workflow.engine.spring.client;
import ai.driftkit.workflow.engine.spring.dto.AnalyticsDtos.DailyMetricsResponse;
import ai.driftkit.workflow.engine.spring.dto.AnalyticsDtos.PromptMetricsResponse;
import ai.driftkit.workflow.engine.spring.dto.AnalyticsDtos.TaskVariables;
import ai.driftkit.workflow.engine.spring.tracing.domain.ModelRequestTrace;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.data.domain.Page;
import org.springframework.format.annotation.DateTimeFormat;
import org.springframework.web.bind.annotation.*;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.List;
/**
* Feign client for AnalyticsController.
* Provides remote access to analytics and metrics endpoints.
*/
@FeignClient(name = "analytics-service", path = "/api/v1/analytics", configuration = WorkflowFeignConfiguration.class)
public interface AnalyticsClient {
/**
* Get model request traces within a time range.
*/
@GetMapping("/traces")
Page<ModelRequestTrace> getTraces(
@RequestParam(value = "startTime", required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime startTime,
@RequestParam(value = "endTime", required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime endTime,
@RequestParam(value = "promptId", required = false) String promptId,
@RequestParam(value = "excludePurpose", required = false) String excludePurpose,
@RequestParam(value = "page", defaultValue = "0") int page,
@RequestParam(value = "size", defaultValue = "20") int size
);
/**
* Get traces by context ID.
*/
@GetMapping("/traces/context/{contextId}")
List<ModelRequestTrace> getTracesByContextId(@PathVariable("contextId") String contextId);
/**
* Get message tasks by context IDs.
*/
@PostMapping("/tasks")
List<TaskVariables> getMessageTasksByContextIds(@RequestBody List<String> contextIds);
/**
* Get available prompt methods for analytics.
*/
@GetMapping("/prompts/methods")
List<String> getAvailablePromptMethods();
/**
* Get daily metrics.
*/
@GetMapping("/metrics/daily")
DailyMetricsResponse getDailyMetrics(
@RequestParam(value = "startDate", required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE) LocalDate startDate,
@RequestParam(value = "endDate", required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE) LocalDate endDate
);
/**
* Get metrics for a specific prompt method.
*/
@GetMapping("/metrics/prompt")
PromptMetricsResponse getPromptMetrics(
@RequestParam(value = "startTime", required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime startTime,
@RequestParam(value = "endTime", required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime endTime,
@RequestParam(value = "promptId", required = false) String promptId
);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/client/AssistantClient.java
|
package ai.driftkit.workflow.engine.spring.client;
import ai.driftkit.common.domain.chat.ChatRequest;
import ai.driftkit.common.domain.chat.ChatResponse;
import ai.driftkit.workflow.engine.spring.dto.AssistantDtos.*;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import reactor.core.publisher.Flux;
import java.util.List;
/**
* Feign client for AssistantController.
* Provides remote access to AI assistant chat endpoints.
*/
@FeignClient(name = "assistant-service", path = "/api/v3/assistant", configuration = WorkflowFeignConfiguration.class)
public interface AssistantClient {
/**
* Send a chat message synchronously.
*/
@PostMapping(value = "/chat", produces = MediaType.APPLICATION_JSON_VALUE)
ResponseEntity<ChatResponse> chat(
@RequestBody ChatRequest request,
@RequestHeader(value = "X-Session-Id", required = false) String sessionId,
@RequestHeader(value = "X-User-Id", required = false) String userId,
@RequestHeader(value = "X-Workflow-Id", required = false) String workflowId
);
/**
* Send a chat message with streaming response.
* Note: Feign doesn't support streaming responses well.
* Consider using WebClient for streaming endpoints.
*/
@PostMapping(value = "/chat/stream", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
Flux<StreamEvent> chatStream(
@RequestBody ChatRequest request,
@RequestHeader(value = "X-Session-Id", required = false) String sessionId,
@RequestHeader(value = "X-User-Id", required = false) String userId,
@RequestHeader(value = "X-Workflow-Id", required = false) String workflowId
);
/**
* Start a new chat session.
*/
@PostMapping("/sessions")
ResponseEntity<SessionInfo> createSession(
@RequestBody(required = false) SessionRequest request,
@RequestHeader(value = "X-User-Id", required = false) String userId
);
/**
* Get session info.
*/
@GetMapping("/sessions/{sessionId}")
ResponseEntity<SessionInfo> getSession(
@PathVariable("sessionId") String sessionId,
@RequestHeader(value = "X-User-Id", required = false) String userId
);
/**
* Update session context.
*/
@PutMapping("/sessions/{sessionId}/context")
ResponseEntity<Void> updateSessionContext(
@PathVariable("sessionId") String sessionId,
@RequestBody SessionContext context,
@RequestHeader(value = "X-User-Id", required = false) String userId
);
/**
* Clear session memory.
*/
@DeleteMapping("/sessions/{sessionId}/memory")
ResponseEntity<Void> clearSessionMemory(
@PathVariable("sessionId") String sessionId,
@RequestHeader(value = "X-User-Id", required = false) String userId
);
/**
* End a chat session.
*/
@DeleteMapping("/sessions/{sessionId}")
ResponseEntity<Void> endSession(
@PathVariable("sessionId") String sessionId,
@RequestHeader(value = "X-User-Id", required = false) String userId
);
/**
* Get chat history for a session.
*/
@GetMapping("/sessions/{sessionId}/history")
ResponseEntity<List<ChatMessage>> getChatHistory(
@PathVariable("sessionId") String sessionId,
@RequestParam(value = "limit", defaultValue = "100") int limit,
@RequestHeader(value = "X-User-Id", required = false) String userId
);
/**
* Get available workflows for assistant.
*/
@GetMapping("/workflows")
ResponseEntity<List<WorkflowInfo>> getAvailableWorkflows(
@RequestHeader(value = "X-User-Id", required = false) String userId
);
/**
* Get assistant capabilities.
*/
@GetMapping("/capabilities")
ResponseEntity<AssistantCapabilities> getCapabilities();
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/client/AsyncModelRequestClient.java
|
package ai.driftkit.workflow.engine.spring.client;
import ai.driftkit.common.domain.PromptRequest;
import ai.driftkit.workflow.engine.spring.dto.ModelRequestDtos.AsyncTaskResponse;
import ai.driftkit.workflow.engine.spring.dto.ModelRequestDtos.TaskRating;
import ai.driftkit.workflow.engine.spring.dto.ModelRequestDtos.TextRequest;
import ai.driftkit.workflow.engine.spring.tracing.domain.AsyncTaskEntity;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
/**
* Feign client for AsyncModelRequestController.
* Provides remote access to asynchronous LLM model request endpoints.
*/
@FeignClient(name = "async-model-request-service", path = "/api/v1/model/async", configuration = WorkflowFeignConfiguration.class)
public interface AsyncModelRequestClient {
/**
* Process a prompt request asynchronously - returns task ID immediately.
*/
@PostMapping(value = "/prompt", produces = MediaType.APPLICATION_JSON_VALUE)
AsyncTaskResponse processPromptRequestAsync(
@RequestBody PromptRequest request,
@RequestHeader(value = "X-User-Id", required = false) String userId
);
/**
* Process a text request asynchronously - returns task ID immediately.
*/
@PostMapping(value = "/text", produces = MediaType.APPLICATION_JSON_VALUE)
AsyncTaskResponse processTextRequestAsync(
@RequestBody TextRequest request,
@RequestHeader(value = "X-User-Id", required = false) String userId
);
/**
* Get task status.
*/
@GetMapping("/task/{taskId}/status")
ResponseEntity<AsyncTaskResponse> getTaskStatus(@PathVariable("taskId") String taskId);
/**
* Get task result.
*/
@GetMapping("/task/{taskId}/result")
ResponseEntity<?> getTaskResult(@PathVariable("taskId") String taskId);
/**
* Get full task details.
*/
@GetMapping("/task/{taskId}")
ResponseEntity<AsyncTaskEntity> getTask(@PathVariable("taskId") String taskId);
/**
* Cancel a task.
*/
@DeleteMapping("/task/{taskId}")
ResponseEntity<Void> cancelTask(@PathVariable("taskId") String taskId);
/**
* Rate a task result.
*/
@PostMapping("/task/{taskId}/rate")
ResponseEntity<AsyncTaskEntity> rateTask(
@PathVariable("taskId") String taskId,
@RequestBody TaskRating rating
);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/client/ModelRequestClient.java
|
package ai.driftkit.workflow.engine.spring.client;
import ai.driftkit.common.domain.PromptRequest;
import ai.driftkit.workflow.engine.spring.dto.ModelRequestDtos.TextRequest;
import ai.driftkit.workflow.engine.agent.AgentResponse;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.*;
/**
* Feign client for ModelRequestController.
* Provides remote access to synchronous LLM model request endpoints.
*/
@FeignClient(name = "model-request-service", path = "/api/v1/model", configuration = WorkflowFeignConfiguration.class)
public interface ModelRequestClient {
/**
* Process a prompt request synchronously.
*/
@PostMapping(value = "/prompt", produces = MediaType.APPLICATION_JSON_VALUE)
AgentResponse<?> processPromptRequest(@RequestBody PromptRequest request);
/**
* Process a text request synchronously.
*/
@PostMapping(value = "/text", produces = MediaType.APPLICATION_JSON_VALUE)
AgentResponse<String> processTextRequest(@RequestBody TextRequest request);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/client/WorkflowAdminClient.java
|
package ai.driftkit.workflow.engine.spring.client;
import ai.driftkit.workflow.engine.persistence.WorkflowInstance;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.Map;
/**
* Feign client for WorkflowAdminController.
* Provides remote access to workflow administration endpoints.
*/
@FeignClient(name = "workflow-admin-service", path = "/api/v1/workflow-admin", configuration = WorkflowFeignConfiguration.class)
public interface WorkflowAdminClient {
/**
* Get all workflow instances with optional filtering.
*/
@GetMapping("/instances")
ResponseEntity<List<WorkflowInstance>> getAllInstances(
@RequestParam(value = "workflowId", required = false) String workflowId,
@RequestParam(value = "status", required = false) String status,
@RequestParam(value = "limit", defaultValue = "100") int limit
);
/**
* Get a specific workflow instance.
*/
@GetMapping("/instances/{instanceId}")
ResponseEntity<WorkflowInstance> getInstance(@PathVariable("instanceId") String instanceId);
/**
* Delete a workflow instance.
*/
@DeleteMapping("/instances/{instanceId}")
ResponseEntity<Void> deleteInstance(@PathVariable("instanceId") String instanceId);
/**
* Clean up old workflow instances.
*/
@PostMapping("/cleanup")
ResponseEntity<Map<String, Object>> cleanup(
@RequestParam(value = "daysToKeep", defaultValue = "30") int daysToKeep,
@RequestParam(value = "dryRun", defaultValue = "false") boolean dryRun
);
/**
* Get workflow statistics.
*/
@GetMapping("/stats")
ResponseEntity<Map<String, Object>> getStatistics(
@RequestParam(value = "workflowId", required = false) String workflowId
);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/client/WorkflowClient.java
|
package ai.driftkit.workflow.engine.spring.client;
import ai.driftkit.workflow.engine.spring.dto.WorkflowDtos.*;
import ai.driftkit.workflow.engine.domain.WorkflowDetails;
import ai.driftkit.workflow.engine.domain.WorkflowMetadata;
import ai.driftkit.workflow.engine.persistence.WorkflowInstance;
import ai.driftkit.workflow.engine.schema.AIFunctionSchema;
import ai.driftkit.workflow.engine.async.ProgressTracker.Progress;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.Map;
/**
* Feign client for WorkflowController.
* Provides remote access to workflow execution and management endpoints.
*/
@FeignClient(name = "workflow-service", path = "/api/workflows", configuration = WorkflowFeignConfiguration.class)
public interface WorkflowClient {
/**
* Execute a workflow with the given input.
*/
@PostMapping("/{workflowId}/execute")
ResponseEntity<WorkflowResponse> execute(
@PathVariable("workflowId") String workflowId,
@RequestBody WorkflowExecutionRequest request,
@RequestHeader(value = "X-Session-Id", required = false) String sessionId
);
/**
* Get workflow status.
*/
@GetMapping("/{workflowId}/runs/{runId}/status")
ResponseEntity<WorkflowStatusResponse> getStatus(
@PathVariable("workflowId") String workflowId,
@PathVariable("runId") String runId
);
/**
* Resume a suspended workflow.
*/
@PostMapping("/{workflowId}/runs/{runId}/resume")
ResponseEntity<WorkflowResponse> resume(
@PathVariable("workflowId") String workflowId,
@PathVariable("runId") String runId,
@RequestBody Map<String, Object> input
);
/**
* Cancel a workflow execution.
*/
@DeleteMapping("/{workflowId}/runs/{runId}")
ResponseEntity<Void> cancel(
@PathVariable("workflowId") String workflowId,
@PathVariable("runId") String runId
);
/**
* Get async task status.
*/
@GetMapping("/tasks/{taskId}/status")
ResponseEntity<Progress> getTaskStatus(@PathVariable("taskId") String taskId);
/**
* Wait for async task result.
*/
@GetMapping("/tasks/{taskId}/result")
ResponseEntity<WorkflowResponse> getTaskResult(
@PathVariable("taskId") String taskId,
@RequestParam(value = "timeout", defaultValue = "30000") long timeout
);
/**
* Get the input schema for a workflow.
*/
@GetMapping("/{workflowId}/schema/input")
ResponseEntity<AIFunctionSchema> getInputSchema(@PathVariable("workflowId") String workflowId);
/**
* Get the output schema for a workflow.
*/
@GetMapping("/{workflowId}/schema/output")
ResponseEntity<AIFunctionSchema> getOutputSchema(@PathVariable("workflowId") String workflowId);
/**
* List all registered workflows.
*/
@GetMapping
ResponseEntity<List<WorkflowMetadata>> listWorkflows();
/**
* Get detailed information about a workflow.
*/
@GetMapping("/{workflowId}")
ResponseEntity<WorkflowDetails> getWorkflowDetails(@PathVariable("workflowId") String workflowId);
/**
* Get workflow instances.
*/
@GetMapping("/{workflowId}/instances")
ResponseEntity<List<WorkflowInstance>> getWorkflowInstances(
@PathVariable("workflowId") String workflowId,
@RequestParam(value = "status", required = false) String status,
@RequestParam(value = "limit", defaultValue = "100") int limit
);
/**
* Get a specific workflow instance.
*/
@GetMapping("/{workflowId}/instances/{instanceId}")
ResponseEntity<WorkflowInstance> getWorkflowInstance(
@PathVariable("workflowId") String workflowId,
@PathVariable("instanceId") String instanceId
);
/**
* Retry a failed workflow instance.
*/
@PostMapping("/{workflowId}/instances/{instanceId}/retry")
ResponseEntity<WorkflowResponse> retryInstance(
@PathVariable("workflowId") String workflowId,
@PathVariable("instanceId") String instanceId,
@RequestParam(value = "fromStep", required = false) String fromStep
);
/**
* Get workflow execution history.
*/
@GetMapping("/{workflowId}/history")
ResponseEntity<List<WorkflowHistoryEntry>> getHistory(
@PathVariable("workflowId") String workflowId,
@RequestParam(value = "startDate", required = false) String startDate,
@RequestParam(value = "endDate", required = false) String endDate,
@RequestParam(value = "limit", defaultValue = "100") int limit
);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/client/WorkflowClientsAutoConfiguration.java
|
package ai.driftkit.workflow.engine.spring.client;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.AutoConfiguration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.cloud.openfeign.EnableFeignClients;
import org.springframework.context.annotation.Import;
/**
* Auto-configuration for workflow Feign clients.
* Enables remote access to workflow services via Feign.
*/
@Slf4j
@AutoConfiguration
@ConditionalOnClass(name = "org.springframework.cloud.openfeign.FeignClient")
@ConditionalOnProperty(
prefix = "driftkit.workflow.client",
name = "enabled",
havingValue = "true",
matchIfMissing = false
)
@EnableFeignClients(basePackageClasses = WorkflowClientsAutoConfiguration.class)
@Import(WorkflowFeignConfiguration.class)
public class WorkflowClientsAutoConfiguration {
public WorkflowClientsAutoConfiguration() {
log.info("Initializing Workflow Feign Clients");
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/client/WorkflowFeignConfiguration.java
|
package ai.driftkit.workflow.engine.spring.client;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import feign.Logger;
import feign.RequestInterceptor;
import feign.codec.Decoder;
import feign.codec.Encoder;
import feign.codec.ErrorDecoder;
import feign.jackson.JacksonDecoder;
import feign.jackson.JacksonEncoder;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.HttpStatus;
/**
* Feign configuration for workflow service clients.
* Provides custom encoding, decoding, and error handling.
*/
@Slf4j
@Configuration
public class WorkflowFeignConfiguration {
@Value("${driftkit.workflow.client.log-level:BASIC}")
private String logLevel;
/**
* Configure Feign logging level.
*/
@Bean
public Logger.Level feignLoggerLevel() {
return Logger.Level.valueOf(logLevel);
}
/**
* Configure Jackson encoder for Feign.
*/
@Bean
public Encoder feignEncoder() {
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.registerModule(new JavaTimeModule());
return new JacksonEncoder(objectMapper);
}
/**
* Configure Jackson decoder for Feign.
*/
@Bean
public Decoder feignDecoder() {
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.registerModule(new JavaTimeModule());
return new JacksonDecoder(objectMapper);
}
/**
* Configure custom error decoder.
*/
@Bean
public ErrorDecoder errorDecoder() {
return new WorkflowErrorDecoder();
}
/**
* Add authentication interceptor if configured.
*/
@Bean
@ConditionalOnProperty(name = "driftkit.workflow.client.auth.enabled", havingValue = "true")
public RequestInterceptor authenticationInterceptor(
@Value("${driftkit.workflow.client.auth.token:}") String authToken,
@Value("${driftkit.workflow.client.auth.header:Authorization}") String authHeader
) {
return requestTemplate -> {
if (authToken != null && !authToken.isEmpty()) {
requestTemplate.header(authHeader, "Bearer " + authToken);
}
};
}
/**
* Add custom headers interceptor.
*/
@Bean
public RequestInterceptor customHeadersInterceptor(
@Value("${driftkit.workflow.client.service-name:workflow-client}") String serviceName
) {
return requestTemplate -> {
requestTemplate.header("X-Client-Service", serviceName);
requestTemplate.header("X-Client-Version", "1.0.0");
};
}
/**
* Custom error decoder for workflow service responses.
*/
public static class WorkflowErrorDecoder implements ErrorDecoder {
private final ErrorDecoder defaultErrorDecoder = new Default();
@Override
public Exception decode(String methodKey, feign.Response response) {
HttpStatus status = HttpStatus.valueOf(response.status());
if (status.is4xxClientError()) {
return new WorkflowClientException(
String.format("Client error calling %s: %s", methodKey, response.reason()),
response.status()
);
}
if (status.is5xxServerError()) {
return new WorkflowServerException(
String.format("Server error calling %s: %s", methodKey, response.reason()),
response.status()
);
}
return defaultErrorDecoder.decode(methodKey, response);
}
}
/**
* Exception for client errors (4xx).
*/
public static class WorkflowClientException extends RuntimeException {
private final int status;
public WorkflowClientException(String message, int status) {
super(message);
this.status = status;
}
public int getStatus() {
return status;
}
}
/**
* Exception for server errors (5xx).
*/
public static class WorkflowServerException extends RuntimeException {
private final int status;
public WorkflowServerException(String message, int status) {
super(message);
this.status = status;
}
public int getStatus() {
return status;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/context/SpringWorkflowContextFactory.java
|
package ai.driftkit.workflow.engine.spring.context;
import ai.driftkit.workflow.engine.core.WorkflowContext;
import ai.driftkit.workflow.engine.core.WorkflowContextFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
/**
* Spring implementation of WorkflowContextFactory.
* Provides Spring-aware context creation for workflows.
*/
@Slf4j
@Component
public class SpringWorkflowContextFactory implements WorkflowContextFactory {
@Override
public WorkflowContext create(String runId, Object triggerData, String instanceId) {
WorkflowContext context = new WorkflowContext(runId, triggerData, instanceId);
log.debug("Created WorkflowContext for run: {}", runId);
return context;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/dto/AnalyticsDtos.java
|
package ai.driftkit.workflow.engine.spring.dto;
import com.fasterxml.jackson.annotation.JsonInclude;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
import java.util.Map;
/**
* DTO classes for Analytics operations.
*/
public class AnalyticsDtos {
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class TaskVariables {
private String messageId;
private String contextId; // same as messageId for use in UI
private String message;
private String result;
private String modelId;
private long createdTime;
private long responseTime;
private List<String> promptIds;
private Map<String, Object> variables;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class LatencyPercentiles {
private Long p25;
private Long p50;
private Long p75;
private Long p90;
public static LatencyPercentiles fromMap(Map<String, Long> map) {
if (map == null) {
return null;
}
return LatencyPercentiles.builder()
.p25(map.get("p25"))
.p50(map.get("p50"))
.p75(map.get("p75"))
.p90(map.get("p90"))
.build();
}
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class TokensByCategory {
private Map<String, Integer> promptTokens;
private Map<String, Integer> completionTokens;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class DailyMetricsResponse {
private long totalTasks;
private int totalPromptTokens;
private int totalCompletionTokens;
private LatencyPercentiles latencyPercentiles;
private Map<String, Long> tasksByModel;
private Map<String, Long> tasksByPromptMethod;
private TokensByCategory tokensByPromptMethod;
private TokensByCategory tokensByPromptMethodModel;
private Map<String, LatencyPercentiles> latencyByPromptMethod;
private Map<String, Long> successByPromptMethod;
private Map<String, Long> errorsByPromptMethod;
private Map<String, Double> successRateByPromptMethod;
private long successCount;
private long errorCount;
private double successRate;
private long timestamp;
private Map<String, LatencyPercentiles> latencyByPromptMethodModel;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class PromptMetricsResponse {
private long totalTraces;
private int totalPromptTokens;
private int totalCompletionTokens;
private int totalTokens;
private LatencyPercentiles latencyPercentiles;
private Map<String, Long> tracesByModel;
private TokensByCategory tokensByModel;
private Map<String, LatencyPercentiles> latencyByModel;
private long successCount;
private long errorCount;
private double successRate;
private Map<String, Long> dailyCounts;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/dto/AssistantDtos.java
|
package ai.driftkit.workflow.engine.spring.dto;
import ai.driftkit.common.domain.chat.ChatResponse;
import ai.driftkit.workflow.engine.chat.ChatMessageTask;
import ai.driftkit.workflow.engine.schema.AIFunctionSchema;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
import java.util.Map;
/**
* DTO classes for Assistant operations.
*/
public class AssistantDtos {
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class ChatInfo {
private String chatId;
private Long lastMessageTime;
private String lastMessage;
private Map<String, Object> metadata;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class SchemaResponse {
private List<AIFunctionSchema> schemas;
private Map<String, String> messageIds;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class FirstStepSchemaResponse {
private String workflowId;
private String stepId;
private List<AIFunctionSchema> schemas;
private Map<String, Object> metadata;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class ChatResponseWithTasks {
private ChatResponse originalResponse;
private List<ChatMessageTask> request;
private List<ChatMessageTask> response;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class SessionInfo {
private String sessionId;
private String userId;
private String workflowId;
private Map<String, Object> context;
private long createdAt;
private long lastActivityAt;
private boolean active;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class SessionRequest {
private String workflowId;
private Map<String, Object> initialContext;
private Map<String, Object> metadata;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class SessionContext {
private Map<String, Object> context;
private Map<String, Object> metadata;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class WorkflowInfo {
private String id;
private String name;
private String description;
private List<String> capabilities;
private Map<String, Object> metadata;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class AssistantCapabilities {
private List<String> supportedWorkflows;
private List<String> supportedModels;
private boolean supportsStreaming;
private boolean supportsAsync;
private Map<String, Object> features;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class StreamEvent {
private String type; // "message", "delta", "error", "complete"
private String content;
private Map<String, Object> metadata;
private long timestamp;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class ChatMessage {
private String id;
private String role; // "user", "assistant", "system"
private String content;
private long timestamp;
private Map<String, Object> metadata;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/dto/ModelRequestDtos.java
|
package ai.driftkit.workflow.engine.spring.dto;
import ai.driftkit.common.domain.client.ResponseFormat;
import ai.driftkit.workflow.engine.spring.tracing.domain.AsyncTaskEntity.TaskStatus;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
import java.util.Map;
/**
* DTO classes for Model Request operations.
*/
public class ModelRequestDtos {
/**
* Text request for synchronous model operations.
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class TextRequest {
private String text;
private String systemMessage;
private String chatId;
private String modelId;
private Double temperature;
private Map<String, Object> variables;
private String workflow;
private ResponseFormat responseFormat;
private List<String> images;
}
/**
* Async task response.
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class AsyncTaskResponse {
private String taskId;
private TaskStatus status;
}
/**
* Task rating request.
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class TaskRating {
private Integer grade; // 1-5 rating scale
private String comment;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/dto/PageableResponseWithChat.java
|
package ai.driftkit.workflow.engine.spring.dto;
import ai.driftkit.workflow.engine.domain.ChatSession;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import jakarta.servlet.http.HttpServletRequest;
import lombok.Data;
import org.springframework.data.domain.Page;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* Pageable response wrapper for chat sessions.
* Compatible with legacy AssistantController response format.
*/
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public class PageableResponseWithChat {
private List<ChatInfo> content;
private int page;
private int size;
private long totalElements;
private int totalPages;
private boolean first;
private boolean last;
private String sort;
private int numberOfElements;
private boolean empty;
public PageableResponseWithChat(HttpServletRequest request, Page<ChatSession> page) {
this.content = page.getContent().stream()
.map(session -> new ChatInfo(
session.getChatId(),
session.getLastMessageTime(),
session.getDescription(),
session.getUserId(),
session.getName()
))
.collect(Collectors.toList());
this.page = page.getNumber();
this.size = page.getSize();
this.totalElements = page.getTotalElements();
this.totalPages = page.getTotalPages();
this.first = page.isFirst();
this.last = page.isLast();
this.sort = page.getSort().toString();
this.numberOfElements = page.getNumberOfElements();
this.empty = page.isEmpty();
}
/**
* Chat information DTO
*/
@Data
public static class ChatInfo {
private String chatId;
private Long lastMessageTime;
private String lastMessage;
private String userId;
private String name;
public ChatInfo(String chatId, Long lastMessageTime, String lastMessage,
String userId, String name) {
this.chatId = chatId;
this.lastMessageTime = lastMessageTime;
this.lastMessage = lastMessage;
this.userId = userId;
this.name = name;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/dto/PageableResponseWithChatMessage.java
|
package ai.driftkit.workflow.engine.spring.dto;
import ai.driftkit.common.domain.chat.ChatMessage;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import jakarta.servlet.http.HttpServletRequest;
import lombok.Data;
import org.springframework.data.domain.Page;
import java.util.List;
/**
* Pageable response wrapper for chat messages.
* Compatible with legacy AssistantController response format.
*/
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public class PageableResponseWithChatMessage {
private List<ChatMessage> content;
private int page;
private int size;
private long totalElements;
private int totalPages;
private boolean first;
private boolean last;
private String sort;
private int numberOfElements;
private boolean empty;
public PageableResponseWithChatMessage(HttpServletRequest request, Page<ChatMessage> page) {
this.content = page.getContent();
this.page = page.getNumber();
this.size = page.getSize();
this.totalElements = page.getTotalElements();
this.totalPages = page.getTotalPages();
this.first = page.isFirst();
this.last = page.isLast();
this.sort = page.getSort().toString();
this.numberOfElements = page.getNumberOfElements();
this.empty = page.isEmpty();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/dto/RestResponse.java
|
package ai.driftkit.workflow.engine.spring.dto;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* Generic REST response wrapper.
* Compatible with legacy workflow controller response format.
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
@JsonIgnoreProperties(ignoreUnknown = true)
public class RestResponse<T> {
private boolean success;
private T data;
public static <T> RestResponse<T> success(T data) {
return new RestResponse<>(true, data);
}
public static <T> RestResponse<T> failure(T data) {
return new RestResponse<>(false, data);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/dto/WorkflowAdminDtos.java
|
package ai.driftkit.workflow.engine.spring.dto;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
import java.util.Map;
/**
* DTO classes for Workflow Admin operations.
*/
public class WorkflowAdminDtos {
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class WorkflowInfo {
private String id;
private String name;
private String description;
private String version;
private boolean enabled;
private Map<String, Object> metadata;
private List<String> tags;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class CleanupResult {
private int totalInstances;
private int deletedInstances;
private List<String> deletedInstanceIds;
private boolean dryRun;
private String message;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class WorkflowStatistics {
private String workflowId;
private long totalExecutions;
private long successfulExecutions;
private long failedExecutions;
private long runningExecutions;
private long suspendedExecutions;
private double averageExecutionTimeMs;
private Map<String, Long> executionsByStatus;
private Map<String, Object> additionalMetrics;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/dto/WorkflowDtos.java
|
package ai.driftkit.workflow.engine.spring.dto;
import ai.driftkit.workflow.engine.async.ProgressTracker.Progress;
import ai.driftkit.workflow.engine.core.StepResult;
import ai.driftkit.workflow.engine.core.WorkflowContext.Keys;
import ai.driftkit.workflow.engine.persistence.WorkflowInstance;
import ai.driftkit.workflow.engine.schema.AIFunctionSchema;
import java.util.List;
import java.util.Map;
/**
* DTO classes for Workflow operations.
*/
public class WorkflowDtos {
public enum WorkflowStatus {
PENDING,
RUNNING,
SUSPENDED,
COMPLETED,
FAILED,
CANCELLED,
ASYNC
}
public enum StepStatus {
CONTINUE,
SUSPEND,
FINISH,
FAIL,
ASYNC
}
public record WorkflowExecutionRequest(
Map<String, String> properties,
String inputClass
) {
}
public record WorkflowResumeRequest(
Map<String, Object> userInput,
Map<String, String> properties,
String inputClass
) {
public Object getUserInput() {
return userInput != null ? userInput : properties;
}
}
public record WorkflowResponse(
String runId,
StepStatus status,
Object result,
String error,
boolean async,
String asyncTaskId,
AIFunctionSchema nextInputSchema,
List<AIFunctionSchema> possibleSchemas
) {
public static WorkflowResponse from(WorkflowInstance instance, StepResult<?> result) {
StepStatus status;
Object data = null;
String error = null;
if (result instanceof StepResult.Continue<?> cont) {
status = StepStatus.CONTINUE;
data = cont.data();
} else if (result instanceof StepResult.Suspend<?> susp) {
status = StepStatus.SUSPEND;
data = Map.of("suspensionId", instance.getInstanceId() + "_" + instance.getCurrentStepId());
} else if (result instanceof StepResult.Finish<?> fin) {
status = StepStatus.FINISH;
data = fin.result();
} else if (result instanceof StepResult.Fail<?> fail) {
status = StepStatus.FAIL;
error = fail.error().getMessage();
} else {
status = StepStatus.CONTINUE;
}
return new WorkflowResponse(
instance.getInstanceId(),
status,
data,
error,
false,
null,
null,
null
);
}
public static WorkflowResponse async(String runId, String taskId) {
return new WorkflowResponse(
runId,
StepStatus.ASYNC,
null,
null,
true,
taskId,
null,
null
);
}
public static WorkflowResponse error(String error) {
return new WorkflowResponse(
null,
StepStatus.FAIL,
null,
error,
false,
null,
null,
null
);
}
}
public record WorkflowStatusResponse(
WorkflowStatus status,
int percentComplete,
String message,
Object data
) {
public static WorkflowStatusResponse fromProgress(Progress progress) {
WorkflowStatus status = switch (progress.status()) {
case PENDING -> WorkflowStatus.PENDING;
case IN_PROGRESS -> WorkflowStatus.RUNNING;
case COMPLETED -> WorkflowStatus.COMPLETED;
case FAILED -> WorkflowStatus.FAILED;
case CANCELLED -> WorkflowStatus.CANCELLED;
};
return new WorkflowStatusResponse(
status,
progress.percentComplete(),
progress.message(),
null
);
}
public static WorkflowStatusResponse fromState(WorkflowInstance state) {
WorkflowStatus status = switch (state.getStatus()) {
case RUNNING -> WorkflowStatus.RUNNING;
case SUSPENDED -> WorkflowStatus.SUSPENDED;
case COMPLETED -> WorkflowStatus.COMPLETED;
case FAILED -> WorkflowStatus.FAILED;
case CANCELLED -> WorkflowStatus.CANCELLED;
};
return new WorkflowStatusResponse(
status,
state.getStatus() == WorkflowInstance.WorkflowStatus.COMPLETED ? 100 : 0,
"Workflow " + state.getStatus().toString().toLowerCase(),
state.getContext() != null ? state.getContext().getStepOutputs().get(Keys.FINAL_RESULT) : null
);
}
}
public record WorkflowCurrentResultResponse(
String runId,
WorkflowStatus status,
int percentComplete,
String message,
Map<String, Object> data,
boolean isAsync
) {}
public record WorkflowCancelResponse(
String runId,
boolean cancelled,
String message
) {}
public record WorkflowHistoryEntry(
String runId,
String workflowId,
WorkflowStatus status,
long startedAt,
Long completedAt,
Long executionTimeMs,
String userId,
String sessionId,
Map<String, Object> input,
Object result,
String error
) {}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/repository/ChatHistoryRepository.java
|
package ai.driftkit.workflow.engine.spring.repository;
import ai.driftkit.common.domain.chat.ChatMessage;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import java.util.List;
/**
* Repository interface for managing chat message history.
* Provides abstraction over storage implementation.
*/
public interface ChatHistoryRepository {
/**
* Add a message to chat history.
*
* @param chatId The chat ID
* @param message The message to add
*/
void addMessage(String chatId, ChatMessage message);
/**
* Add multiple messages to chat history.
*
* @param chatId The chat ID
* @param messages The messages to add
*/
void addMessages(String chatId, List<ChatMessage> messages);
/**
* Get chat history for a specific chat.
*
* @param chatId The chat ID
* @param pageable Pagination information
* @param includeContext Whether to include context messages
* @return Page of chat messages
*/
Page<ChatMessage> findByChatId(String chatId, Pageable pageable, boolean includeContext);
/**
* Get all messages for a chat.
*
* @param chatId The chat ID
* @return List of all messages
*/
List<ChatMessage> findAllByChatId(String chatId);
/**
* Get recent messages for a chat.
*
* @param chatId The chat ID
* @param limit Maximum number of messages to return
* @return List of recent messages
*/
List<ChatMessage> findRecentByChatId(String chatId, int limit);
/**
* Delete all messages for a chat.
*
* @param chatId The chat ID
*/
void deleteByChatId(String chatId);
/**
* Get message count for a chat.
*
* @param chatId The chat ID
* @return Number of messages
*/
long countByChatId(String chatId);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/repository/ChatSessionRepository.java
|
package ai.driftkit.workflow.engine.spring.repository;
import ai.driftkit.workflow.engine.domain.ChatSession;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import java.util.Optional;
/**
* Repository interface for managing chat sessions.
* Provides abstraction over storage implementation.
*/
public interface ChatSessionRepository {
/**
* Save or update a chat session.
*
* @param session The chat session to save
* @return The saved chat session
*/
ChatSession save(ChatSession session);
/**
* Find a chat session by ID.
*
* @param chatId The chat ID
* @return Optional containing the chat session if found
*/
Optional<ChatSession> findById(String chatId);
/**
* Find all chat sessions for a user.
*
* @param userId The user ID
* @param pageable Pagination information
* @return Page of chat sessions
*/
Page<ChatSession> findByUserId(String userId, Pageable pageable);
/**
* Find active (non-archived) chat sessions for a user.
*
* @param userId The user ID
* @param pageable Pagination information
* @return Page of active chat sessions
*/
Page<ChatSession> findActiveByUserId(String userId, Pageable pageable);
/**
* Delete a chat session.
*
* @param chatId The chat ID to delete
*/
void deleteById(String chatId);
/**
* Check if a chat session exists.
*
* @param chatId The chat ID
* @return true if exists, false otherwise
*/
boolean existsById(String chatId);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/service/WorkflowService.java
|
package ai.driftkit.workflow.engine.spring.service;
import ai.driftkit.common.domain.chat.ChatMessage;
import ai.driftkit.common.domain.chat.ChatResponse;
import ai.driftkit.common.domain.chat.ChatRequest;
import ai.driftkit.common.service.ChatStore;
import ai.driftkit.workflow.engine.chat.ChatMessageTask;
import ai.driftkit.workflow.engine.core.WorkflowEngine;
import ai.driftkit.workflow.engine.domain.ChatSession;
import ai.driftkit.workflow.engine.domain.WorkflowDetails;
import ai.driftkit.workflow.engine.domain.WorkflowMetadata;
import ai.driftkit.workflow.engine.persistence.AsyncStepStateRepository;
import ai.driftkit.workflow.engine.persistence.ChatSessionRepository;
import ai.driftkit.workflow.engine.persistence.SuspensionDataRepository;
import ai.driftkit.workflow.engine.persistence.WorkflowInstance;
import ai.driftkit.workflow.engine.persistence.WorkflowStateRepository;
import ai.driftkit.workflow.engine.schema.AIFunctionSchema;
import ai.driftkit.workflow.engine.service.DefaultWorkflowExecutionService;
import ai.driftkit.workflow.engine.service.WorkflowExecutionService;
import ai.driftkit.workflow.engine.spring.adapter.PageResultAdapter;
import ai.driftkit.workflow.engine.spring.websocket.WorkflowEventWebSocketBridge;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import java.util.*;
/**
* Spring service adapter for workflow management and chat integration.
* This is a thin wrapper that delegates to the core WorkflowExecutionService.
*/
@Slf4j
@Service
public class WorkflowService {
private final WorkflowExecutionService coreService;
// Optional WebSocket bridge (may be null if WebSocket is disabled)
@Autowired(required = false)
private WorkflowEventWebSocketBridge webSocketBridge;
/**
* Constructor that creates the core service.
*/
@Autowired
public WorkflowService(WorkflowEngine engine,
ChatSessionRepository chatSessionRepository,
AsyncStepStateRepository asyncStepStateRepository,
SuspensionDataRepository suspensionDataRepository,
WorkflowStateRepository workflowStateRepository,
ChatStore chatStore) {
this.coreService = new DefaultWorkflowExecutionService(engine, chatSessionRepository,
asyncStepStateRepository, suspensionDataRepository, workflowStateRepository, chatStore);
}
// ========== Chat Management - Delegate to Core ==========
public ChatSession getOrCreateChatSession(String chatId, String userId, String initialMessage) {
return coreService.getOrCreateSession(chatId, userId, initialMessage);
}
public Optional<ChatSession> getChatSession(String chatId) {
return coreService.getChatSession(chatId);
}
public ChatSession createChatSession(String userId, String name) {
return coreService.createChatSession(userId, name);
}
public void archiveChatSession(String chatId) {
coreService.archiveChatSession(chatId);
}
public Page<ChatSession> listChatsForUser(String userId, Pageable pageable) {
var pageRequest = PageResultAdapter.toPageRequest(pageable);
var pageResult = coreService.listChatsForUser(userId, pageRequest);
return PageResultAdapter.toPage(pageResult, pageable);
}
// ========== Chat Processing - Delegate to Core ==========
public ChatResponse processChatRequest(ChatRequest request) {
// Send request to WebSocket if available
if (webSocketBridge != null) {
webSocketBridge.sendChatRequest(request);
}
ChatResponse response = coreService.executeChat(request);
// Send response to WebSocket if available
if (webSocketBridge != null) {
webSocketBridge.sendChatResponse(response);
}
return response;
}
public ChatResponse resumeChatRequest(String messageId, ChatRequest request) {
// Send request to WebSocket if available
if (webSocketBridge != null) {
webSocketBridge.sendChatRequest(request);
}
ChatResponse response = coreService.resumeChat(messageId, request);
// Send response to WebSocket if available
if (webSocketBridge != null) {
webSocketBridge.sendChatResponse(response);
}
return response;
}
public Optional<ChatResponse> getChatResponse(String responseId) {
return coreService.getAsyncStatus(responseId);
}
public Page<ChatMessage> getChatHistory(String chatId, Pageable pageable, boolean includeContext, boolean includeSchema) {
var pageRequest = PageResultAdapter.toPageRequest(pageable);
var pageResult = coreService.getChatHistory(chatId, pageRequest, includeContext);
return PageResultAdapter.toPage(pageResult, pageable);
}
public List<ChatMessageTask> convertMessageToTasks(ChatMessage message) {
return coreService.convertMessageToTasks(message);
}
// ========== Workflow Management - Delegate to Core ==========
public List<WorkflowMetadata> listWorkflows() {
return coreService.listWorkflows();
}
public WorkflowDetails getWorkflowDetails(String workflowId) {
return coreService.getWorkflowDetails(workflowId);
}
public List<AIFunctionSchema> getWorkflowSchemas(String workflowId) {
return coreService.getWorkflowSchemas(workflowId);
}
public AIFunctionSchema getInitialSchema(String workflowId) {
return coreService.getInitialSchema(workflowId);
}
public Optional<WorkflowInstance> getWorkflowState(String runId) {
return coreService.getWorkflowState(runId);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/streaming/AgentStreamingAdapter.java
|
package ai.driftkit.workflow.engine.spring.streaming;
import ai.driftkit.common.domain.streaming.StreamingCallback;
import ai.driftkit.workflow.engine.agent.Agent;
import ai.driftkit.workflow.engine.agent.LLMAgent;
import reactor.core.publisher.Flux;
import reactor.core.publisher.FluxSink;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Spring adapter for Agent streaming operations.
* Provides utilities to convert Agent streaming responses to Spring WebFlux Flux.
*/
public class AgentStreamingAdapter {
/**
* Execute an agent with streaming and convert to Flux.
* Only works with LLMAgent that supports streaming.
*
* @param agent The agent to execute (must be LLMAgent for streaming support)
* @param input The input text
* @return A Flux that emits response tokens
*/
public static Flux<String> executeStreaming(Agent agent, String input) {
return executeStreaming(agent, input, null);
}
/**
* Execute an agent with streaming and variables, converting to Flux.
* Only works with LLMAgent that supports streaming.
*
* @param agent The agent to execute (must be LLMAgent for streaming support)
* @param input The input text
* @param variables Context variables
* @return A Flux that emits response tokens
*/
public static Flux<String> executeStreaming(Agent agent, String input, Map<String, Object> variables) {
if (!(agent instanceof LLMAgent)) {
// For non-LLMAgent, just return the result as a single-item Flux
String result = (variables != null) ? agent.execute(input, variables) : agent.execute(input);
return Flux.just(result);
}
LLMAgent llmAgent = (LLMAgent) agent;
return Flux.create(sink -> {
AtomicBoolean cancelled = new AtomicBoolean(false);
// Create callback that bridges to Flux sink
StreamingCallback<String> callback = new StreamingCallback<String>() {
@Override
public void onNext(String item) {
if (!cancelled.get() && !sink.isCancelled()) {
sink.next(item);
}
}
@Override
public void onError(Throwable error) {
if (!cancelled.get() && !sink.isCancelled()) {
sink.error(error);
}
}
@Override
public void onComplete() {
if (!cancelled.get() && !sink.isCancelled()) {
sink.complete();
}
}
};
// Execute streaming with callback
CompletableFuture<String> future = llmAgent.executeStreaming(input, variables, callback);
// Handle cancellation
sink.onDispose(() -> {
cancelled.set(true);
future.cancel(true);
});
}, FluxSink.OverflowStrategy.BUFFER);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/streaming/StreamingAdapter.java
|
package ai.driftkit.workflow.engine.spring.streaming;
import ai.driftkit.common.domain.streaming.StreamingCallback;
import ai.driftkit.common.domain.streaming.StreamingResponse;
import reactor.core.Disposable;
import reactor.core.publisher.Flux;
/**
* Adapter to convert between DriftKit streaming abstraction and Spring Reactor Flux.
* This allows framework-agnostic DriftKit components to be used with Spring WebFlux.
*/
public class StreamingAdapter {
/**
* Convert a DriftKit StreamingResponse to Spring Reactor Flux.
* Useful for exposing DriftKit streaming through Spring WebFlux endpoints.
*
* @param streamingResponse The DriftKit streaming response
* @param <T> Type of items in the stream
* @return Flux that emits the same items as the StreamingResponse
*/
public static <T> Flux<T> toFlux(StreamingResponse<T> streamingResponse) {
return Flux.create(sink -> {
streamingResponse.subscribe(new StreamingCallback<T>() {
@Override
public void onNext(T item) {
if (!sink.isCancelled()) {
sink.next(item);
}
}
@Override
public void onError(Throwable error) {
if (!sink.isCancelled()) {
sink.error(error);
}
}
@Override
public void onComplete() {
if (!sink.isCancelled()) {
sink.complete();
}
}
});
// Handle cancellation from Flux side
sink.onDispose(() -> streamingResponse.cancel());
});
}
/**
* Convert a Spring Reactor Flux to DriftKit StreamingResponse.
* Useful for wrapping Spring WebFlux streams for use in DriftKit components.
*
* @param flux The Spring Reactor Flux
* @param <T> Type of items in the stream
* @return StreamingResponse that subscribes to the Flux
*/
public static <T> StreamingResponse<T> fromFlux(Flux<T> flux) {
return new StreamingResponse<T>() {
private volatile Disposable subscription;
private volatile boolean active = false;
@Override
public void subscribe(StreamingCallback<T> callback) {
if (active) {
callback.onError(new IllegalStateException("Stream already subscribed"));
return;
}
active = true;
subscription = flux.subscribe(
callback::onNext,
error -> {
active = false;
callback.onError(error);
},
() -> {
active = false;
callback.onComplete();
}
);
}
@Override
public void cancel() {
if (subscription != null && !subscription.isDisposed()) {
subscription.dispose();
}
active = false;
}
@Override
public boolean isActive() {
return active && (subscription == null || !subscription.isDisposed());
}
};
}
/**
* Convert a DriftKit StreamingResponse to Spring Reactor Flux with buffering.
* Useful for batching stream items before sending to client.
*
* @param streamingResponse The DriftKit streaming response
* @param bufferSize Number of items to buffer
* @param <T> Type of items in the stream
* @return Flux that emits buffered lists
*/
public static <T> Flux<java.util.List<T>> toBufferedFlux(
StreamingResponse<T> streamingResponse,
int bufferSize) {
return toFlux(streamingResponse).buffer(bufferSize);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/tracing/SpringRequestTracingProvider.java
|
package ai.driftkit.workflow.engine.spring.tracing;
import ai.driftkit.common.domain.client.*;
import ai.driftkit.common.utils.AIUtils;
import ai.driftkit.workflow.engine.agent.RequestTracingProvider;
import ai.driftkit.workflow.engine.spring.tracing.domain.ModelRequestTrace;
import ai.driftkit.workflow.engine.spring.tracing.repository.CoreModelRequestTraceRepository;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.stereotype.Component;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.stream.Collectors;
/**
* Spring implementation of RequestTracingProvider.
* Provides comprehensive tracing of all LLM requests with MongoDB persistence.
*/
@Slf4j
@Component
@RequiredArgsConstructor
@ConditionalOnBean(CoreModelRequestTraceRepository.class)
public class SpringRequestTracingProvider implements RequestTracingProvider {
private final CoreModelRequestTraceRepository traceRepository;
private final Executor traceExecutor;
@Override
public void traceTextRequest(ModelTextRequest request, ModelTextResponse response, RequestContext context) {
if (!shouldTrace(response, context)) {
return;
}
try {
ModelRequestTrace trace = buildTextTrace(request, response, context,
ModelRequestTrace.RequestType.TEXT_TO_TEXT);
saveTraceAsync(trace);
log.debug("Traced text request for context: {} with type: {}",
context.getContextId(), context.getContextType());
} catch (Exception e) {
log.error("Error tracing text request for context: {}", context.getContextId(), e);
}
}
@Override
public void traceImageRequest(ModelImageRequest request, ModelImageResponse response, RequestContext context) {
if (!shouldTrace(response, context)) {
return;
}
try {
ModelRequestTrace trace = buildImageTrace(request, response, context);
saveTraceAsync(trace);
log.debug("Traced image generation request for context: {} with type: {}",
context.getContextId(), context.getContextType());
} catch (Exception e) {
log.error("Error tracing image request for context: {}", context.getContextId(), e);
}
}
@Override
public void traceImageToTextRequest(ModelTextRequest request, ModelTextResponse response, RequestContext context) {
if (!shouldTrace(response, context)) {
return;
}
try {
ModelRequestTrace trace = buildTextTrace(request, response, context,
ModelRequestTrace.RequestType.IMAGE_TO_TEXT);
saveTraceAsync(trace);
log.debug("Traced image-to-text request for context: {} with type: {}",
context.getContextId(), context.getContextType());
} catch (Exception e) {
log.error("Error tracing image-to-text request for context: {}", context.getContextId(), e);
}
}
private boolean shouldTrace(Object response, RequestContext context) {
if (response == null || context == null || context.getContextId() == null) {
return false;
}
// Check if response has trace information
if (response instanceof ModelTextResponse textResponse) {
return textResponse.getTrace() != null;
} else if (response instanceof ModelImageResponse imageResponse) {
return imageResponse.getTrace() != null;
}
return false;
}
private ModelRequestTrace buildTextTrace(ModelTextRequest request, ModelTextResponse response,
RequestContext context, ModelRequestTrace.RequestType requestType) {
ModelRequestTrace.ModelRequestTraceBuilder builder = ModelRequestTrace.builder()
.id(AIUtils.generateId())
.requestType(requestType)
.contextType(getContextType(context))
.contextId(context.getContextId())
.timestamp(System.currentTimeMillis())
.promptTemplate(extractPromptTemplate(request))
.promptId(context.getPromptId())
.variables(convertVariables(context.getVariables()))
.chatId(context.getChatId());
// Add response information
if (response != null) {
builder.responseId(response.getId())
.response(response.getResponse());
if (response.getTrace() != null) {
builder.trace(response.getTrace())
.modelId(response.getTrace().getModel());
if (response.getTrace().isHasError()) {
builder.errorMessage(response.getTrace().getErrorMessage());
}
}
}
// Add workflow information if available
if (context.getWorkflowId() != null) {
builder.workflowInfo(ModelRequestTrace.WorkflowInfo.builder()
.workflowId(context.getWorkflowId())
.workflowType(context.getWorkflowType())
.workflowStep(context.getWorkflowStep())
.build());
}
return builder.build();
}
private ModelRequestTrace buildImageTrace(ModelImageRequest request, ModelImageResponse response,
RequestContext context) {
ModelRequestTrace.ModelRequestTraceBuilder builder = ModelRequestTrace.builder()
.id(AIUtils.generateId())
.requestType(ModelRequestTrace.RequestType.TEXT_TO_IMAGE)
.contextType(getContextType(context))
.contextId(context.getContextId())
.timestamp(System.currentTimeMillis())
.promptTemplate(request.getPrompt())
.promptId(context.getPromptId())
.variables(convertVariables(context.getVariables()))
.chatId(context.getChatId());
// Add response information
if (response != null && response.getTrace() != null) {
builder.trace(response.getTrace())
.modelId(response.getTrace().getModel());
if (response.getTrace().isHasError()) {
builder.errorMessage(response.getTrace().getErrorMessage());
}
}
// Add workflow information if available
if (context.getWorkflowId() != null) {
builder.workflowInfo(ModelRequestTrace.WorkflowInfo.builder()
.workflowId(context.getWorkflowId())
.workflowType(context.getWorkflowType())
.workflowStep(context.getWorkflowStep())
.build());
}
return builder.build();
}
private ModelRequestTrace.ContextType getContextType(RequestContext context) {
if (context.getContextType() == null) {
return ModelRequestTrace.ContextType.CUSTOM;
}
return switch (context.getContextType().toLowerCase()) {
case "workflow" -> ModelRequestTrace.ContextType.WORKFLOW;
case "agent" -> ModelRequestTrace.ContextType.AGENT;
case "message_task" -> ModelRequestTrace.ContextType.MESSAGE_TASK;
case "image_task" -> ModelRequestTrace.ContextType.IMAGE_TASK;
case "direct" -> ModelRequestTrace.ContextType.DIRECT;
default -> ModelRequestTrace.ContextType.CUSTOM;
};
}
private Map<String, String> convertVariables(Map<String, Object> variables) {
if (variables == null) {
return null;
}
return variables.entrySet().stream()
.collect(Collectors.toMap(
Map.Entry::getKey,
entry -> entry.getValue() != null ? entry.getValue().toString() : null
));
}
private String extractPromptTemplate(ModelTextRequest request) {
if (request.getMessages() == null || CollectionUtils.isEmpty(request.getMessages())) {
return null;
}
// Find the last user message as prompt template
return request.getMessages().stream()
.filter(msg -> msg.getRole() == Role.user)
.reduce((first, second) -> second) // Get the last user message
.map(msg -> {
Object content = msg.getContent();
if (content instanceof String) {
return (String) content;
} else if (content instanceof List) {
// Handle multimodal content
return content.toString();
}
return null;
})
.orElse(null);
}
private void saveTraceAsync(ModelRequestTrace trace) {
CompletableFuture.runAsync(() -> saveTrace(trace), traceExecutor);
}
private void saveTrace(ModelRequestTrace trace) {
try {
traceRepository.save(trace);
log.trace("Saved trace: {} for context: {}", trace.getId(), trace.getContextId());
} catch (Exception e) {
log.error("Failed to save trace: {} for context: {}", trace.getId(), trace.getContextId(), e);
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/tracing
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/tracing/domain/AsyncTaskEntity.java
|
package ai.driftkit.workflow.engine.spring.tracing.domain;
import lombok.*;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.index.Indexed;
import org.springframework.data.mongodb.core.mapping.Document;
import java.util.Map;
/**
* Entity for storing async task execution status and results.
* Provides persistent storage for async LLM operations.
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Document(collection = "async_tasks")
public class AsyncTaskEntity {
@Id
private String taskId;
@Indexed
private String chatId;
@Indexed
private String userId;
@Indexed
private TaskStatus status;
private TaskType taskType;
// Request information
private String requestBody; // JSON serialized request
private String workflowId;
private String promptId;
private Map<String, Object> variables;
// Execution information
private Long createdAt;
private Long startedAt;
private Long completedAt;
private Long executionTimeMs;
// Result information
private String result; // JSON serialized AgentResponse
private String errorMessage;
private String errorStackTrace;
// Model information
private String modelId;
private Double temperature;
// Metadata
private Map<String, String> metadata;
/**
* Task execution status
*/
public enum TaskStatus {
PENDING, // Task created but not started
RUNNING, // Task is being executed
COMPLETED, // Task completed successfully
FAILED, // Task failed with error
TIMEOUT, // Task timed out
CANCELLED // Task was cancelled
}
/**
* Task type
*/
public enum TaskType {
PROMPT_REQUEST, // Request with prompt ID
TEXT_REQUEST, // Direct text request
WORKFLOW // Workflow execution
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/tracing
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/tracing/domain/MessageTaskEntity.java
|
package ai.driftkit.workflow.engine.spring.tracing.domain;
import ai.driftkit.common.domain.MessageTask;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.mapping.Document;
@Data
@NoArgsConstructor
@EqualsAndHashCode(callSuper = true)
@Document(collection = "messageTasks")
public class MessageTaskEntity extends MessageTask {
@Id
@Override
public String getMessageId() {
return super.getMessageId();
}
public MessageTaskEntity(MessageTask task) {
super(
task.getMessageId(),
task.getChatId(),
task.getMessage(),
task.getSystemMessage(),
task.getGradeComment(),
task.getGrade(),
task.getCreatedTime(),
task.getResponseTime(),
task.getModelId(),
task.getResult(),
task.getImageTaskId(),
task.getPromptIds(),
task.getTemperature(),
task.getWorkflow(),
task.getContextJson(),
task.getLanguage(),
task.getVariables(),
task.isJsonRequest(),
task.isJsonResponse(),
task.getResponseFormat(),
task.getWorkflowStopEvent(),
task.getLogprobs(),
task.getTopLogprobs(),
task.getLogProbs(),
task.getPurpose(),
task.getImageBase64(),
task.getImageMimeType()
);
}
public static MessageTaskEntity fromMessageTask(MessageTask task) {
if (task == null) {
return null;
}
if (task instanceof MessageTaskEntity) {
return (MessageTaskEntity) task;
}
return new MessageTaskEntity(task);
}
public static MessageTask toMessageTask(MessageTaskEntity entity) {
if (entity == null) {
return null;
}
return MessageTask.builder()
.messageId(entity.getMessageId())
.chatId(entity.getChatId())
.message(entity.getMessage())
.systemMessage(entity.getSystemMessage())
.gradeComment(entity.getGradeComment())
.grade(entity.getGrade())
.createdTime(entity.getCreatedTime())
.responseTime(entity.getResponseTime())
.modelId(entity.getModelId())
.result(entity.getResult())
.imageTaskId(entity.getImageTaskId())
.promptIds(entity.getPromptIds())
.temperature(entity.getTemperature())
.workflow(entity.getWorkflow())
.context(entity.getContextJson())
.language(entity.getLanguage())
.variables(entity.getVariables())
.jsonRequest(entity.isJsonRequest())
.jsonResponse(entity.isJsonResponse())
.responseFormat(entity.getResponseFormat())
.workflowStopEvent(entity.getWorkflowStopEvent())
.logprobs(entity.getLogprobs())
.topLogprobs(entity.getTopLogprobs())
.logProbs(entity.getLogProbs())
.purpose(entity.getPurpose())
.imageBase64(entity.getImageBase64())
.imageMimeType(entity.getImageMimeType())
.build();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/tracing
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/tracing/domain/ModelRequestTrace.java
|
package ai.driftkit.workflow.engine.spring.tracing.domain;
import ai.driftkit.common.domain.ModelTrace;
import lombok.*;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.mapping.Document;
import java.util.Map;
/**
* Domain entity for storing model request traces.
* Provides comprehensive tracking of all LLM requests across workflows and agents.
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Document(collection = "model_request_traces")
public class ModelRequestTrace {
@Id
private String id;
// Context information
private String contextId; // Can be workflow ID, agent ID, or message ID
private ContextType contextType;
private RequestType requestType;
// Timing
private long timestamp;
// Request details
private String promptTemplate;
private String promptId;
private Map<String, String> variables;
// Model information
private String modelId;
// Response details
private String responseId;
private String response;
private String errorMessage;
// Trace information from model
private ModelTrace trace;
// Workflow context
private WorkflowInfo workflowInfo;
// Additional metadata
private String purpose;
private String chatId;
private String userId;
/**
* Request type enumeration
*/
public enum RequestType {
TEXT_TO_TEXT,
TEXT_TO_IMAGE,
IMAGE_TO_TEXT,
MULTIMODAL
}
/**
* Context type enumeration
*/
public enum ContextType {
WORKFLOW, // Request from workflow
AGENT, // Request from LLMAgent
MESSAGE_TASK, // Request from message task
IMAGE_TASK, // Request from image task
DIRECT, // Direct API request
CUSTOM // Custom context
}
/**
* Workflow information
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class WorkflowInfo {
private String workflowId;
private String workflowType;
private String workflowStep;
private String workflowVersion;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/tracing
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/tracing/repository/AsyncTaskRepository.java
|
package ai.driftkit.workflow.engine.spring.tracing.repository;
import ai.driftkit.workflow.engine.spring.tracing.domain.AsyncTaskEntity;
import ai.driftkit.workflow.engine.spring.tracing.domain.AsyncTaskEntity.TaskStatus;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.data.mongodb.repository.Query;
import org.springframework.stereotype.Repository;
import java.util.List;
import java.util.Optional;
/**
* Repository for async task entities.
* Provides persistence and query operations for async LLM task execution.
*/
@Repository
public interface AsyncTaskRepository extends MongoRepository<AsyncTaskEntity, String> {
/**
* Find task by ID
*/
Optional<AsyncTaskEntity> findByTaskId(String taskId);
/**
* Find tasks by chat ID
*/
List<AsyncTaskEntity> findByChatIdOrderByCreatedAtDesc(String chatId);
/**
* Find tasks by user ID
*/
Page<AsyncTaskEntity> findByUserIdOrderByCreatedAtDesc(String userId, Pageable pageable);
/**
* Find tasks by status
*/
List<AsyncTaskEntity> findByStatus(TaskStatus status);
/**
* Find tasks by status with pagination
*/
Page<AsyncTaskEntity> findByStatus(TaskStatus status, Pageable pageable);
/**
* Find running tasks older than specified timestamp
*/
@Query("{ 'status': 'RUNNING', 'startedAt': { $lt: ?0 } }")
List<AsyncTaskEntity> findStaleRunningTasks(Long olderThan);
/**
* Find tasks by workflow ID
*/
List<AsyncTaskEntity> findByWorkflowIdOrderByCreatedAtDesc(String workflowId);
/**
* Count tasks by status
*/
Long countByStatus(TaskStatus status);
/**
* Find tasks by user and status
*/
List<AsyncTaskEntity> findByUserIdAndStatus(String userId, TaskStatus status);
/**
* Find tasks by list of task IDs
*/
List<AsyncTaskEntity> findByTaskIdIn(List<String> taskIds);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/tracing
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/tracing/repository/CoreModelRequestTraceRepository.java
|
package ai.driftkit.workflow.engine.spring.tracing.repository;
import ai.driftkit.workflow.engine.spring.tracing.domain.ModelRequestTrace;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.data.mongodb.repository.Query;
import org.springframework.stereotype.Repository;
import java.util.List;
/**
* Repository for ModelRequestTrace entities.
* Provides persistence and query capabilities for model request traces.
*/
@Repository
public interface CoreModelRequestTraceRepository extends MongoRepository<ModelRequestTrace, String> {
/**
* Find traces by context ID
*/
List<ModelRequestTrace> findByContextId(String contextId);
/**
* Find traces by context ID with pagination
*/
Page<ModelRequestTrace> findByContextId(String contextId, Pageable pageable);
/**
* Find traces by chat ID
*/
List<ModelRequestTrace> findByChatId(String chatId);
/**
* Find traces by chat ID with pagination
*/
Page<ModelRequestTrace> findByChatId(String chatId, Pageable pageable);
/**
* Find traces by workflow ID
*/
@Query("{ 'workflowInfo.workflowId': ?0 }")
List<ModelRequestTrace> findByWorkflowId(String workflowId);
/**
* Find traces by workflow ID with pagination
*/
@Query("{ 'workflowInfo.workflowId': ?0 }")
Page<ModelRequestTrace> findByWorkflowId(String workflowId, Pageable pageable);
/**
* Find traces by context type
*/
List<ModelRequestTrace> findByContextType(ModelRequestTrace.ContextType contextType);
/**
* Find traces by context type with pagination
*/
Page<ModelRequestTrace> findByContextType(ModelRequestTrace.ContextType contextType, Pageable pageable);
/**
* Find traces by request type
*/
List<ModelRequestTrace> findByRequestType(ModelRequestTrace.RequestType requestType);
/**
* Find traces by user ID
*/
List<ModelRequestTrace> findByUserId(String userId);
/**
* Find traces with errors
*/
@Query("{ 'errorMessage': { $exists: true, $ne: null } }")
List<ModelRequestTrace> findTracesWithErrors();
/**
* Find traces within time range
*/
@Query("{ 'timestamp': { $gte: ?0, $lte: ?1 } }")
List<ModelRequestTrace> findByTimestampBetween(long startTime, long endTime);
/**
* Count traces by context type
*/
long countByContextType(ModelRequestTrace.ContextType contextType);
/**
* Count traces by request type
*/
long countByRequestType(ModelRequestTrace.RequestType requestType);
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/websocket/WebSocketConfig.java
|
package ai.driftkit.workflow.engine.spring.websocket;
import lombok.RequiredArgsConstructor;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Configuration;
import org.springframework.messaging.simp.config.MessageBrokerRegistry;
import org.springframework.web.socket.config.annotation.EnableWebSocketMessageBroker;
import org.springframework.web.socket.config.annotation.StompEndpointRegistry;
import org.springframework.web.socket.config.annotation.WebSocketMessageBrokerConfigurer;
/**
* WebSocket configuration for real-time workflow updates.
* Enables STOMP over WebSocket for bidirectional communication.
*/
@Configuration
@EnableWebSocketMessageBroker
@ConditionalOnProperty(
prefix = "driftkit.workflow.websocket",
name = "enabled",
havingValue = "true",
matchIfMissing = true
)
@RequiredArgsConstructor
public class WebSocketConfig implements WebSocketMessageBrokerConfigurer {
@Override
public void configureMessageBroker(MessageBrokerRegistry config) {
// Enable simple broker for topic subscriptions
config.enableSimpleBroker(
"/topic/workflow", // Workflow status updates
"/topic/chat", // Chat message updates
"/topic/async" // Async task progress updates
);
// Set application destination prefix for client messages
config.setApplicationDestinationPrefixes("/app");
}
@Override
public void registerStompEndpoints(StompEndpointRegistry registry) {
// Register STOMP endpoint with SockJS fallback
registry.addEndpoint("/ws")
.setAllowedOriginPatterns("*")
.withSockJS();
// Also register raw WebSocket endpoint (without SockJS)
registry.addEndpoint("/ws-raw")
.setAllowedOriginPatterns("*");
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/websocket/WorkflowEventWebSocketBridge.java
|
package ai.driftkit.workflow.engine.spring.websocket;
import ai.driftkit.common.domain.chat.ChatRequest;
import ai.driftkit.common.domain.chat.ChatResponse;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Component;
/**
* Bridge component that sends chat messages to WebSocket clients.
*/
@Slf4j
@Component
@RequiredArgsConstructor
@ConditionalOnProperty(
prefix = "driftkit.workflow.websocket",
name = "enabled",
havingValue = "true",
matchIfMissing = true
)
@ConditionalOnBean(WorkflowWebSocketService.class)
public class WorkflowEventWebSocketBridge {
private final WorkflowWebSocketService webSocketService;
/**
* Send chat request to WebSocket clients.
*/
public void sendChatRequest(ChatRequest request) {
log.debug("Sending ChatRequest to WebSocket: chatId={}, userId={}",
request.getChatId(), request.getUserId());
webSocketService.sendChatMessage(request.getChatId(), request);
}
/**
* Send chat response to WebSocket clients.
*/
public void sendChatResponse(ChatResponse response) {
log.debug("Sending ChatResponse to WebSocket: chatId={}, completed={}",
response.getChatId(), response.isCompleted());
webSocketService.sendChatMessage(response.getChatId(), response);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/websocket/WorkflowWebSocketController.java
|
package ai.driftkit.workflow.engine.spring.websocket;
import ai.driftkit.workflow.engine.async.ProgressTracker;
import ai.driftkit.workflow.engine.core.WorkflowEngine;
import ai.driftkit.workflow.engine.spring.service.WorkflowService;
import lombok.Data;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.messaging.handler.annotation.DestinationVariable;
import org.springframework.messaging.handler.annotation.MessageMapping;
import org.springframework.messaging.handler.annotation.Payload;
import org.springframework.messaging.handler.annotation.SendTo;
import org.springframework.messaging.simp.annotation.SubscribeMapping;
import org.springframework.stereotype.Controller;
import java.security.Principal;
import java.util.HashMap;
import java.util.Map;
/**
* WebSocket controller for handling real-time workflow interactions.
* Provides endpoints for subscribing to updates and sending commands.
*/
@Slf4j
@Controller
@RequiredArgsConstructor
@ConditionalOnProperty(
prefix = "driftkit.workflow.websocket",
name = "enabled",
havingValue = "true",
matchIfMissing = true
)
public class WorkflowWebSocketController {
private final WorkflowEngine engine;
private final WorkflowService workflowService;
private final WorkflowWebSocketService webSocketService;
private final ProgressTracker progressTracker;
/**
* Subscribe to workflow status updates.
* Called when client subscribes to /topic/workflow/{runId}/status
*/
@SubscribeMapping("/topic/workflow/{runId}/status")
public WorkflowWebSocketService.WorkflowStatusUpdate subscribeToWorkflowStatus(
@DestinationVariable String runId) {
log.info("Client subscribed to workflow status for runId: {}", runId);
// Send current status immediately upon subscription
return engine.getWorkflowInstance(runId)
.map(instance -> new WorkflowWebSocketService.WorkflowStatusUpdate(
runId,
instance.getWorkflowId(),
instance.getStatus().toString(),
instance.getCurrentStepId(),
instance.getNextStepId(),
instance.getUpdatedAt()
))
.orElse(null);
}
/**
* Subscribe to async task progress.
* Called when client subscribes to /topic/async/{runId}/{taskId}/progress
*/
@SubscribeMapping("/topic/async/{runId}/{taskId}/progress")
public WorkflowWebSocketService.AsyncProgressUpdate subscribeToAsyncProgress(
@DestinationVariable String runId,
@DestinationVariable String taskId) {
log.info("Client subscribed to async progress for runId: {}, taskId: {}", runId, taskId);
// Send current progress immediately upon subscription
return progressTracker.getProgress(taskId)
.map(progress -> new WorkflowWebSocketService.AsyncProgressUpdate(
runId,
taskId,
progress.status().toString(),
progress.percentComplete(),
progress.message(),
System.currentTimeMillis()
))
.orElse(null);
}
/**
* Execute workflow via WebSocket.
* Client sends to /app/workflow/execute
*/
@MessageMapping("/workflow/execute")
@SendTo("/topic/workflow/executions")
public WorkflowExecutionResponse executeWorkflow(
@Payload WorkflowExecutionRequest request,
Principal principal) {
log.info("Executing workflow via WebSocket: workflowId={}, user={}",
request.getWorkflowId(), principal != null ? principal.getName() : "anonymous");
try {
var execution = engine.execute(request.getWorkflowId(), request.getInput());
// Workflow updates will be sent automatically through WorkflowService
return new WorkflowExecutionResponse(
execution.getRunId(),
request.getWorkflowId(),
"STARTED",
null
);
} catch (Exception e) {
log.error("Error executing workflow via WebSocket", e);
return new WorkflowExecutionResponse(
null,
request.getWorkflowId(),
"ERROR",
e.getMessage()
);
}
}
// Request/Response DTOs
@Data
public static class WorkflowExecutionRequest {
private String workflowId;
private Map<String, Object> input;
private Map<String, String> metadata;
}
@Data
public static class WorkflowExecutionResponse {
private final String runId;
private final String workflowId;
private final String status;
private final String error;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring
|
java-sources/ai/driftkit/driftkit-workflow-engine-spring-boot-starter/0.8.1/ai/driftkit/workflow/engine/spring/websocket/WorkflowWebSocketService.java
|
package ai.driftkit.workflow.engine.spring.websocket;
import ai.driftkit.common.domain.chat.ChatMessage;
import ai.driftkit.common.domain.chat.ChatResponse;
import ai.driftkit.workflow.engine.async.ProgressTracker;
import ai.driftkit.workflow.engine.domain.WorkflowEvent;
import ai.driftkit.workflow.engine.persistence.WorkflowInstance;
import lombok.Data;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.messaging.simp.SimpMessagingTemplate;
import org.springframework.stereotype.Service;
import java.util.Map;
/**
* Service for sending real-time workflow updates via WebSocket.
* Provides methods to broadcast workflow state changes, progress updates, and chat messages.
*/
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(
prefix = "driftkit.workflow.websocket",
name = "enabled",
havingValue = "true",
matchIfMissing = true
)
public class WorkflowWebSocketService {
private final SimpMessagingTemplate messagingTemplate;
/**
* Send workflow status update to subscribers.
*/
public void sendWorkflowStatusUpdate(String runId, WorkflowInstance instance) {
WorkflowStatusUpdate update = new WorkflowStatusUpdate(
runId,
instance.getWorkflowId(),
instance.getStatus().toString(),
instance.getCurrentStepId(),
instance.getNextStepId(),
System.currentTimeMillis()
);
String destination = "/topic/workflow/" + runId + "/status";
messagingTemplate.convertAndSend(destination, update);
log.debug("Sent workflow status update for runId: {} to {}", runId, destination);
}
/**
* Send async task progress update.
*/
public void sendAsyncProgressUpdate(String runId, String taskId, ProgressTracker.Progress progress) {
AsyncProgressUpdate update = new AsyncProgressUpdate(
runId,
taskId,
progress.status().toString(),
progress.percentComplete(),
progress.message(),
System.currentTimeMillis()
);
String destination = "/topic/async/" + runId + "/" + taskId + "/progress";
messagingTemplate.convertAndSend(destination, update);
log.debug("Sent async progress update for taskId: {} to {}", taskId, destination);
}
/**
* Send workflow event to subscribers.
*/
public void sendWorkflowEvent(String runId, WorkflowEvent event) {
String destination = "/topic/workflow/" + runId + "/events";
messagingTemplate.convertAndSend(destination, event);
log.debug("Sent workflow event for runId: {} to {}", runId, destination);
}
/**
* Send chat message (request or response) to subscribers.
*/
public void sendChatMessage(String chatId, Object message) {
String destination = "/topic/chat/" + chatId + "/messages";
messagingTemplate.convertAndSend(destination, message);
log.debug("Sent chat message for chatId: {} to {}", chatId, destination);
}
/**
* Send chat message update.
*/
public void sendChatMessage(String chatId, ChatMessage message) {
String destination = "/topic/chat/" + chatId + "/messages";
messagingTemplate.convertAndSend(destination, message);
log.debug("Sent chat message for chatId: {} to {}", chatId, destination);
}
/**
* Send chat response update (for async responses).
*/
public void sendChatResponseUpdate(String chatId, ChatResponse response) {
ChatResponseUpdate update = new ChatResponseUpdate(
response.getId(),
chatId,
response.isCompleted(),
response.getPercentComplete(),
response.getPropertiesMap(),
System.currentTimeMillis()
);
String destination = "/topic/chat/" + chatId + "/responses/" + response.getId();
messagingTemplate.convertAndSend(destination, update);
log.debug("Sent chat response update for responseId: {} to {}", response.getId(), destination);
}
/**
* Send error notification.
*/
public void sendErrorNotification(String runId, String stepId, Throwable error) {
ErrorNotification notification = new ErrorNotification(
runId,
stepId,
error.getClass().getSimpleName(),
error.getMessage(),
System.currentTimeMillis()
);
String destination = "/topic/workflow/" + runId + "/errors";
messagingTemplate.convertAndSend(destination, notification);
log.debug("Sent error notification for runId: {} to {}", runId, destination);
}
/**
* Send workflow completion notification.
*/
public void sendCompletionNotification(String runId, Object result) {
CompletionNotification notification = new CompletionNotification(
runId,
result != null ? result.getClass().getSimpleName() : null,
result,
System.currentTimeMillis()
);
String destination = "/topic/workflow/" + runId + "/completion";
messagingTemplate.convertAndSend(destination, notification);
log.debug("Sent completion notification for runId: {} to {}", runId, destination);
}
// DTOs for WebSocket messages
@Data
public static class WorkflowStatusUpdate {
private final String runId;
private final String workflowId;
private final String status;
private final String currentStepId;
private final String nextStepId;
private final long timestamp;
}
@Data
public static class AsyncProgressUpdate {
private final String runId;
private final String taskId;
private final String status;
private final int percentComplete;
private final String message;
private final long lastUpdateTime;
}
@Data
public static class ChatResponseUpdate {
private final String responseId;
private final String chatId;
private final boolean completed;
private final Integer percentComplete;
private final Map<String, String> properties;
private final long timestamp;
}
@Data
public static class ErrorNotification {
private final String runId;
private final String stepId;
private final String errorType;
private final String errorMessage;
private final long timestamp;
}
@Data
public static class CompletionNotification {
private final String runId;
private final String resultType;
private final Object result;
private final long timestamp;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/assertions/AssertionEngine.java
|
package ai.driftkit.workflow.test.assertions;
import ai.driftkit.workflow.test.core.ExecutionTracker;
import ai.driftkit.workflow.test.core.WorkflowTestException;
import lombok.extern.slf4j.Slf4j;
import java.util.Objects;
/**
* Main assertion engine for workflow tests.
* Provides fluent assertions for workflows and steps.
*/
@Slf4j
public class AssertionEngine {
private final ExecutionTracker executionTracker;
/**
* Creates an assertion engine with the given execution tracker.
*
* @param executionTracker the execution tracker
*/
public AssertionEngine(ExecutionTracker executionTracker) {
this.executionTracker = Objects.requireNonNull(executionTracker, "executionTracker cannot be null");
}
/**
* Creates assertions for a specific step.
*
* @param workflowId the workflow ID
* @param stepId the step ID
* @return step assertions
*/
public StepAssertions assertStep(String workflowId, String stepId) {
Objects.requireNonNull(workflowId, "workflowId cannot be null");
Objects.requireNonNull(stepId, "stepId cannot be null");
return new StepAssertions(workflowId, stepId, executionTracker);
}
// Note: Workflow and execution assertions are now handled by EnhancedWorkflowAssertions
// and can be accessed through the static factory methods
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/assertions/EnhancedWorkflowAssertions.java
|
package ai.driftkit.workflow.test.assertions;
import ai.driftkit.workflow.test.core.ExecutionTracker;
import ai.driftkit.workflow.engine.core.WorkflowContext;
import ai.driftkit.workflow.engine.core.WorkflowEngine;
import ai.driftkit.workflow.engine.core.WorkflowEngine.WorkflowExecution;
import ai.driftkit.workflow.engine.persistence.WorkflowInstance;
import ai.driftkit.workflow.engine.persistence.WorkflowInstance.WorkflowStatus;
import ai.driftkit.workflow.engine.domain.WorkflowEvent;
import org.assertj.core.api.AbstractAssert;
import org.assertj.core.api.Assertions;
import java.time.Duration;
import java.util.List;
import java.util.Objects;
import java.util.function.Consumer;
import java.util.function.Predicate;
import java.util.stream.Collectors;
/**
* Enhanced AssertJ style assertions for workflow executions.
* Provides fluent assertion API for workflow testing.
*/
public class EnhancedWorkflowAssertions extends AbstractAssert<EnhancedWorkflowAssertions, WorkflowExecution<?>> {
private final ExecutionTracker executionTracker;
public EnhancedWorkflowAssertions(WorkflowExecution<?> actual, ExecutionTracker executionTracker) {
super(actual, EnhancedWorkflowAssertions.class);
this.executionTracker = Objects.requireNonNull(executionTracker, "executionTracker cannot be null");
}
/**
* Entry point for workflow assertions.
*/
public static EnhancedWorkflowAssertions assertThat(WorkflowExecution<?> execution, ExecutionTracker tracker) {
return new EnhancedWorkflowAssertions(execution, tracker);
}
/**
* Asserts that the workflow has executed specific steps in any order.
*
* @param expectedSteps the expected step IDs
* @return this assertion
*/
public EnhancedWorkflowAssertions hasExecutedSteps(String... expectedSteps) {
isNotNull();
List<String> executedSteps = executionTracker.getExecutedSteps(actual.getWorkflowId());
Assertions.assertThat(executedSteps)
.as("Executed steps for workflow '%s'", actual.getWorkflowId())
.contains(expectedSteps);
return this;
}
/**
* Asserts that the workflow has executed specific steps in exact order.
*
* @param expectedSteps the expected step IDs in order
* @return this assertion
*/
public EnhancedWorkflowAssertions hasExecutedStepsInOrder(String... expectedSteps) {
isNotNull();
List<String> executedSteps = executionTracker.getExecutedSteps(actual.getWorkflowId());
Assertions.assertThat(executedSteps)
.as("Executed steps for workflow '%s'", actual.getWorkflowId())
.containsExactly(expectedSteps);
return this;
}
/**
* Asserts that the workflow has no failures.
*
* @return this assertion
*/
public EnhancedWorkflowAssertions hasNoFailures() {
isNotNull();
List<ExecutionTracker.ExecutionRecord> failures = executionTracker.getHistory()
.getRecords().stream()
.filter(r -> r.getWorkflowId().equals(actual.getWorkflowId()))
.filter(r -> r.getStatus() == ExecutionTracker.ExecutionStatus.FAILED)
.collect(Collectors.toList());
Assertions.assertThat(failures)
.as("Failed executions for workflow '%s'", actual.getWorkflowId())
.isEmpty();
return this;
}
/**
* Asserts that the workflow completed within a specific duration.
*
* @param duration the maximum expected duration
* @return this assertion
*/
public EnhancedWorkflowAssertions completedWithin(Duration duration) {
isNotNull();
Objects.requireNonNull(duration, "duration cannot be null");
// Get workflow start and end times
List<ExecutionTracker.ExecutionRecord> workflowRecords = executionTracker.getHistory()
.getRecords().stream()
.filter(r -> r.getWorkflowId().equals(actual.getWorkflowId()))
.filter(r -> r.getType() == ExecutionTracker.RecordType.WORKFLOW)
.collect(Collectors.toList());
if (workflowRecords.size() < 2) {
failWithMessage("Cannot determine workflow duration - insufficient records");
}
long startTime = workflowRecords.stream()
.filter(r -> r.getStatus() == ExecutionTracker.ExecutionStatus.STARTED)
.findFirst()
.orElseThrow(() -> new AssertionError("No workflow start record found"))
.getTimestamp();
long endTime = workflowRecords.stream()
.filter(r -> r.getStatus() == ExecutionTracker.ExecutionStatus.COMPLETED ||
r.getStatus() == ExecutionTracker.ExecutionStatus.FAILED)
.findFirst()
.orElseThrow(() -> new AssertionError("No workflow completion record found"))
.getTimestamp();
Duration actualDuration = Duration.ofMillis(endTime - startTime);
if (actualDuration.compareTo(duration) > 0) {
failWithMessage("Expected workflow '%s' to complete within %s but took %s",
actual.getWorkflowId(), duration, actualDuration);
}
return this;
}
/**
* Asserts that the workflow produced a result matching the predicate.
*
* @param predicate the result predicate
* @return this assertion
*/
public EnhancedWorkflowAssertions producedResult(Predicate<Object> predicate) {
isNotNull();
Objects.requireNonNull(predicate, "predicate cannot be null");
Object result = getWorkflowResult();
if (!predicate.test(result)) {
failWithMessage("Expected workflow result to match predicate but it didn't. Actual result: %s", result);
}
return this;
}
/**
* Asserts specific conditions on the workflow result.
*
* @param assertions consumer for result assertions
* @return this assertion
*/
public EnhancedWorkflowAssertions hasResultSatisfying(Consumer<Object> assertions) {
isNotNull();
Objects.requireNonNull(assertions, "assertions cannot be null");
Object result = getWorkflowResult();
assertions.accept(result);
return this;
}
/**
* Asserts that a specific step was executed.
*
* @param stepId the step ID
* @return step-specific assertions
*/
public StepAssertions step(String stepId) {
isNotNull();
Objects.requireNonNull(stepId, "stepId cannot be null");
return new StepAssertions(actual.getWorkflowId(), stepId, executionTracker);
}
/**
* Asserts that the workflow is in a specific state.
*
* @param expectedState the expected state
* @return this assertion
*/
public EnhancedWorkflowAssertions hasState(WorkflowStatus expectedState) {
isNotNull();
Objects.requireNonNull(expectedState, "expectedState cannot be null");
// Get the current workflow state from the execution
WorkflowStatus actualState = actual.getEngine().getWorkflowInstance(actual.getRunId())
.map(WorkflowInstance::getStatus)
.orElseThrow(() -> new AssertionError("No workflow instance found for run ID: " + actual.getRunId()));
Assertions.assertThat(actualState)
.as("Workflow state")
.isEqualTo(expectedState);
return this;
}
/**
* Asserts that the workflow completed successfully.
*
* @return this assertion
*/
public EnhancedWorkflowAssertions isCompleted() {
return hasState(WorkflowStatus.COMPLETED);
}
/**
* Asserts that the workflow failed.
*
* @return this assertion
*/
public EnhancedWorkflowAssertions isFailed() {
return hasState(WorkflowStatus.FAILED);
}
/**
* Asserts that the workflow is suspended.
*
* @return this assertion
*/
public EnhancedWorkflowAssertions isSuspended() {
return hasState(WorkflowStatus.SUSPENDED);
}
/**
* Asserts that the workflow has specific attributes.
*
* @param key the attribute key
* @param value the expected value
* @return this assertion
*/
public EnhancedWorkflowAssertions hasAttribute(String key, Object value) {
isNotNull();
Objects.requireNonNull(key, "key cannot be null");
Object actualValue = actual.getEngine().getWorkflowInstance(actual.getRunId())
.map(instance -> instance.getContext().getContextValue(key, Object.class))
.orElse(null);
Assertions.assertThat(actualValue)
.as("Workflow attribute '%s'", key)
.isEqualTo(value);
return this;
}
/**
* Asserts that workflow events were recorded.
*
* @return this assertion
*/
public EnhancedWorkflowAssertions hasEmittedEvents() {
isNotNull();
List<WorkflowEvent> emittedEvents = executionTracker.getHistory()
.getRecords().stream()
.filter(r -> r.getWorkflowId().equals(actual.getWorkflowId()))
.filter(r -> r.getData() instanceof WorkflowEvent)
.map(r -> (WorkflowEvent) r.getData())
.collect(Collectors.toList());
Assertions.assertThat(emittedEvents)
.as("Emitted events for workflow '%s'", actual.getWorkflowId())
.isNotEmpty();
return this;
}
private Object getWorkflowResult() {
// Get the final result from execution records
return executionTracker.getHistory()
.getRecords().stream()
.filter(r -> r.getWorkflowId().equals(actual.getWorkflowId()))
.filter(r -> r.getType() == ExecutionTracker.RecordType.WORKFLOW)
.filter(r -> r.getStatus() == ExecutionTracker.ExecutionStatus.COMPLETED)
.findFirst()
.map(ExecutionTracker.ExecutionRecord::getData)
.orElseThrow(() -> new AssertionError("No workflow completion result found"));
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/assertions/StepAssertions.java
|
package ai.driftkit.workflow.test.assertions;
import ai.driftkit.workflow.test.core.ExecutionTracker;
import ai.driftkit.workflow.engine.core.StepResult;
import org.assertj.core.api.Assertions;
import lombok.extern.slf4j.Slf4j;
import java.util.List;
import java.util.stream.Collectors;
/**
* Assertions for workflow steps.
*/
@Slf4j
public class StepAssertions {
private final String workflowId;
private final String stepId;
private final ExecutionTracker tracker;
public StepAssertions(String workflowId, String stepId, ExecutionTracker tracker) {
this.workflowId = workflowId;
this.stepId = stepId;
this.tracker = tracker;
}
/**
* Assert that the step was executed.
*
* @return this for chaining
*/
public StepAssertions wasExecuted() {
boolean executed = tracker.wasExecuted(workflowId, stepId);
Assertions.assertThat(executed)
.as("Step %s.%s should have been executed", workflowId, stepId)
.isTrue();
return this;
}
/**
* Assert that the step was not executed.
*
* @return this for chaining
*/
public StepAssertions wasNotExecuted() {
boolean executed = tracker.wasExecuted(workflowId, stepId);
Assertions.assertThat(executed)
.as("Step %s.%s should not have been executed", workflowId, stepId)
.isFalse();
return this;
}
/**
* Assert execution count.
*
* @param expectedCount the expected count
* @return this for chaining
*/
public StepAssertions wasExecutedTimes(int expectedCount) {
int actualCount = tracker.getExecutionCount(workflowId, stepId);
Assertions.assertThat(actualCount)
.as("Step %s.%s execution count", workflowId, stepId)
.isEqualTo(expectedCount);
return this;
}
/**
* Assert that the step succeeded (no Fail result).
*
* @return this for chaining
*/
public StepAssertions succeeded() {
List<ExecutionTracker.ExecutionRecord> executions = getStepExecutions();
boolean anyFailed = executions.stream()
.filter(record -> record.getStatus() == ExecutionTracker.ExecutionStatus.COMPLETED)
.anyMatch(record -> record.getData() instanceof StepResult.Fail);
Assertions.assertThat(anyFailed)
.as("Step %s.%s should have succeeded", workflowId, stepId)
.isFalse();
return this;
}
/**
* Assert that the step failed.
*
* @return this for chaining
*/
public StepAssertions failed() {
List<ExecutionTracker.ExecutionRecord> executions = getStepExecutions();
boolean anyFailed = executions.stream()
.filter(record -> record.getStatus() == ExecutionTracker.ExecutionStatus.COMPLETED)
.anyMatch(record -> record.getData() instanceof StepResult.Fail);
Assertions.assertThat(anyFailed)
.as("Step %s.%s should have failed", workflowId, stepId)
.isTrue();
return this;
}
/**
* Assert that the step completed with specific data.
*
* @param expectedData the expected data
* @return this for chaining
*/
public StepAssertions completedWith(Object expectedData) {
List<ExecutionTracker.ExecutionRecord> completions = getStepExecutions().stream()
.filter(record -> record.getStatus() == ExecutionTracker.ExecutionStatus.COMPLETED)
.collect(Collectors.toList());
Assertions.assertThat(completions)
.as("Step %s.%s should have completed", workflowId, stepId)
.isNotEmpty();
boolean foundMatch = completions.stream()
.anyMatch(record -> {
if (record.getData() instanceof StepResult.Continue<?> cont) {
return expectedData.equals(cont.data());
} else if (record.getData() instanceof StepResult.Finish<?> finish) {
return expectedData.equals(finish.result());
}
return false;
});
Assertions.assertThat(foundMatch)
.as("Step %s.%s should have completed with data: %s", workflowId, stepId, expectedData)
.isTrue();
return this;
}
private List<ExecutionTracker.ExecutionRecord> getStepExecutions() {
return tracker.getHistory().getOrderedExecutions().stream()
.filter(record -> {
if (!workflowId.equals(record.getWorkflowId())) {
return false;
}
// First try exact match
if (stepId.equals(record.getStepId())) {
return true;
}
// Then check if the recorded stepId contains our stepId (for branch-prefixed steps)
String recordedStepId = record.getStepId();
if (recordedStepId != null && recordedStepId.contains(stepId)) {
return true;
}
return false;
})
.collect(Collectors.toList());
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/assertions
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/assertions/strategy/AssertionStrategy.java
|
package ai.driftkit.workflow.test.assertions.strategy;
import ai.driftkit.workflow.test.core.ExecutionTracker;
/**
* Strategy for verifying execution behavior.
*/
public abstract class AssertionStrategy {
protected final String description;
protected AssertionStrategy(String description) {
this.description = description;
}
/**
* Verifies the execution history against expected behavior.
*
* @param history the execution history
* @param expectedBehavior the expected behavior
* @throws AssertionError if verification fails
*/
public abstract void verify(ExecutionTracker.ExecutionHistory history, ExpectedBehavior expectedBehavior);
/**
* Gets the strategy description.
*
* @return description
*/
public String getDescription() {
return description;
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/assertions
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/assertions/strategy/EventualConsistencyAssertionStrategy.java
|
package ai.driftkit.workflow.test.assertions.strategy;
import ai.driftkit.workflow.test.core.ExecutionTracker;
import org.assertj.core.api.Assertions;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* Assertion strategy that verifies eventual consistency.
* Steps must execute but order doesn't matter.
*/
public class EventualConsistencyAssertionStrategy extends AssertionStrategy {
public EventualConsistencyAssertionStrategy() {
super("Eventual Consistency Assertion");
}
@Override
public void verify(ExecutionTracker.ExecutionHistory history, ExpectedBehavior expectedBehavior) {
// Extract all executed steps
List<String> executedSteps = history.getOrderedExecutions().stream()
.filter(record -> record.getType() == ExecutionTracker.RecordType.STEP)
.filter(record -> record.getStatus() == ExecutionTracker.ExecutionStatus.STARTED)
.map(ExecutionTracker.ExecutionRecord::getStepId)
.collect(Collectors.toList());
// Verify expected steps (any order)
if (expectedBehavior.getExpectedSteps() != null && !expectedBehavior.getExpectedSteps().isEmpty()) {
List<String> distinctExecuted = executedSteps.stream()
.distinct()
.collect(Collectors.toList());
Assertions.assertThat(distinctExecuted)
.as("Expected steps (any order)")
.containsExactlyInAnyOrderElementsOf(expectedBehavior.getExpectedSteps());
}
// Verify execution counts
if (expectedBehavior.getExecutionCounts() != null) {
Map<String, Long> actualCounts = executedSteps.stream()
.collect(Collectors.groupingBy(
stepId -> stepId,
Collectors.counting()
));
expectedBehavior.getExecutionCounts().forEach((stepId, expectedCount) -> {
long actualCount = actualCounts.getOrDefault(stepId, 0L);
Assertions.assertThat(actualCount)
.as("Execution count for step: %s", stepId)
.isEqualTo(expectedCount);
});
}
// Verify completion status
if (expectedBehavior.isShouldComplete()) {
boolean hasCompletion = history.getOrderedExecutions().stream()
.anyMatch(record ->
record.getType() == ExecutionTracker.RecordType.WORKFLOW &&
record.getStatus() == ExecutionTracker.ExecutionStatus.COMPLETED
);
Assertions.assertThat(hasCompletion)
.as("Workflow should have completed")
.isTrue();
}
// Verify failure status
if (expectedBehavior.isShouldFail()) {
boolean hasFailure = history.getOrderedExecutions().stream()
.anyMatch(record ->
record.getType() == ExecutionTracker.RecordType.WORKFLOW &&
record.getStatus() == ExecutionTracker.ExecutionStatus.FAILED
);
Assertions.assertThat(hasFailure)
.as("Workflow should have failed")
.isTrue();
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/assertions
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/assertions/strategy/ExpectedBehavior.java
|
package ai.driftkit.workflow.test.assertions.strategy;
import lombok.Builder;
import lombok.Data;
import java.util.List;
import java.util.Map;
/**
* Describes expected workflow behavior for assertions.
*/
@Data
@Builder
public class ExpectedBehavior {
/**
* Expected step execution order (strict).
*/
private List<String> stepOrder;
/**
* Expected steps to execute (any order).
*/
private List<String> expectedSteps;
/**
* Steps that should NOT execute.
*/
private List<String> unexpectedSteps;
/**
* Expected execution counts per step.
*/
private Map<String, Integer> executionCounts;
/**
* Whether workflow should complete successfully.
*/
private boolean shouldComplete;
/**
* Whether workflow should fail.
*/
private boolean shouldFail;
/**
* Expected failure message pattern (regex).
*/
private String failurePattern;
/**
* Maximum allowed execution time in milliseconds.
*/
private Long maxExecutionTime;
/**
* Minimum required execution time in milliseconds.
*/
private Long minExecutionTime;
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/assertions
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/assertions/strategy/StrictOrderAssertionStrategy.java
|
package ai.driftkit.workflow.test.assertions.strategy;
import ai.driftkit.workflow.test.core.ExecutionTracker;
import org.assertj.core.api.Assertions;
import java.util.List;
import java.util.stream.Collectors;
/**
* Assertion strategy that verifies strict execution order.
*/
public class StrictOrderAssertionStrategy extends AssertionStrategy {
public StrictOrderAssertionStrategy() {
super("Strict Order Assertion");
}
@Override
public void verify(ExecutionTracker.ExecutionHistory history, ExpectedBehavior expectedBehavior) {
if (expectedBehavior.getStepOrder() == null || expectedBehavior.getStepOrder().isEmpty()) {
return; // Nothing to verify
}
// Extract actual step execution order
List<String> actualOrder = history.getOrderedExecutions().stream()
.filter(record -> record.getType() == ExecutionTracker.RecordType.STEP)
.filter(record -> record.getStatus() == ExecutionTracker.ExecutionStatus.STARTED)
.map(ExecutionTracker.ExecutionRecord::getStepId)
.collect(Collectors.toList());
// Verify strict order
Assertions.assertThat(actualOrder)
.as("Step execution order")
.containsExactly(expectedBehavior.getStepOrder().toArray(new String[0]));
// Verify execution counts if specified
if (expectedBehavior.getExecutionCounts() != null) {
expectedBehavior.getExecutionCounts().forEach((stepId, expectedCount) -> {
long actualCount = actualOrder.stream()
.filter(stepId::equals)
.count();
Assertions.assertThat(actualCount)
.as("Execution count for step: %s", stepId)
.isEqualTo(expectedCount);
});
}
// Verify unexpected steps
if (expectedBehavior.getUnexpectedSteps() != null) {
List<String> unexpectedFound = actualOrder.stream()
.filter(expectedBehavior.getUnexpectedSteps()::contains)
.collect(Collectors.toList());
Assertions.assertThat(unexpectedFound)
.as("Unexpected steps that were executed")
.isEmpty();
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/core/AnnotationWorkflowTest.java
|
package ai.driftkit.workflow.test.core;
import ai.driftkit.workflow.engine.core.WorkflowEngine;
import ai.driftkit.workflow.engine.domain.WorkflowEngineConfig;
import ai.driftkit.workflow.engine.persistence.inmemory.InMemoryAsyncStepStateRepository;
import ai.driftkit.workflow.engine.persistence.inmemory.InMemoryChatSessionRepository;
import ai.driftkit.workflow.engine.persistence.inmemory.InMemorySuspensionDataRepository;
import ai.driftkit.workflow.engine.persistence.inmemory.InMemoryWorkflowStateRepository;
import ai.driftkit.workflow.engine.async.InMemoryProgressTracker;
import ai.driftkit.common.service.impl.InMemoryChatStore;
import ai.driftkit.common.service.impl.SimpleTextTokenizer;
import lombok.extern.slf4j.Slf4j;
import java.util.Objects;
/**
* Base class for testing annotation-based workflows.
* Provides engine configuration optimized for annotation scanning.
*/
@Slf4j
public abstract class AnnotationWorkflowTest extends WorkflowTestBase {
@Override
protected WorkflowEngine createEngine() {
log.debug("Creating workflow engine for annotation-based workflows");
// Create the state repository
var stateRepository = new InMemoryWorkflowStateRepository();
WorkflowEngineConfig config = WorkflowEngineConfig.builder()
.coreThreads(1)
.maxThreads(2)
.queueCapacity(1000)
.scheduledThreads(2)
.defaultStepTimeoutMs(300000) // 5 minutes
.stateRepository(stateRepository)
.progressTracker(new InMemoryProgressTracker())
.chatSessionRepository(new InMemoryChatSessionRepository())
.chatStore(new InMemoryChatStore(new SimpleTextTokenizer()))
.asyncStepStateRepository(new InMemoryAsyncStepStateRepository())
.suspensionDataRepository(new InMemorySuspensionDataRepository())
.build();
return new WorkflowEngine(config);
}
/**
* Registers annotation-based workflows.
* Subclasses should override this to register their workflow instances.
*
* Example:
* <pre>{@code
* @Override
* protected void registerWorkflows() {
* engine.register(new MyAnnotatedWorkflow());
* engine.register(new AnotherAnnotatedWorkflow());
* }
* }</pre>
*/
/**
* Convenience method to register an annotation-based workflow instance.
*
* @param workflowInstance the workflow instance to register
*/
protected void registerWorkflow(Object workflowInstance) {
Objects.requireNonNull(workflowInstance, "workflowInstance cannot be null");
log.debug("Registering annotation-based workflow: {}", workflowInstance.getClass().getSimpleName());
engine.register(workflowInstance);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/core/ExecutionTracker.java
|
package ai.driftkit.workflow.test.core;
import ai.driftkit.workflow.engine.core.StepResult;
import ai.driftkit.workflow.engine.persistence.WorkflowInstance;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.stream.Collectors;
/**
* Tracks workflow and step executions for testing purposes.
* Thread-safe implementation for concurrent test scenarios.
*/
@Slf4j
public class ExecutionTracker {
private final List<ExecutionRecord> executions = new CopyOnWriteArrayList<>();
private final Map<String, Integer> executionCounts = new ConcurrentHashMap<>();
/**
* Records a workflow execution event.
*/
public void recordWorkflowEvent(WorkflowInstance instance, ExecutionStatus status, Object data) {
Objects.requireNonNull(instance, "instance cannot be null");
Objects.requireNonNull(status, "status cannot be null");
ExecutionRecord record = new ExecutionRecord(
RecordType.WORKFLOW,
status,
instance.getWorkflowId(),
instance.getInstanceId(),
null, // no stepId for workflow
data,
System.currentTimeMillis()
);
executions.add(record);
log.debug("Recorded workflow event: {}", record);
}
/**
* Records a step execution event.
*/
public void recordStepEvent(StepContext context, ExecutionStatus status, Object data) {
Objects.requireNonNull(context, "context cannot be null");
Objects.requireNonNull(status, "status cannot be null");
ExecutionRecord record = new ExecutionRecord(
RecordType.STEP,
status,
context.getWorkflowId(),
context.getRunId(),
context.getStepId(),
data,
System.currentTimeMillis()
);
executions.add(record);
if (status == ExecutionStatus.STARTED) {
incrementExecutionCount(context.createKey());
}
log.debug("Recorded step event: {}", record);
}
// Convenience methods for WorkflowTestInterceptor
public void recordWorkflowStart(WorkflowInstance instance, Object input) {
recordWorkflowEvent(instance, ExecutionStatus.STARTED, input);
}
public void recordWorkflowComplete(WorkflowInstance instance, Object result) {
recordWorkflowEvent(instance, ExecutionStatus.COMPLETED, result);
}
public void recordWorkflowError(WorkflowInstance instance, Throwable error) {
recordWorkflowEvent(instance, ExecutionStatus.FAILED, error);
}
public void recordStepStart(StepContext context) {
recordStepEvent(context, ExecutionStatus.STARTED, context.getInput());
}
public void recordStepComplete(StepContext context, StepResult<?> result) {
recordStepEvent(context, ExecutionStatus.COMPLETED, result);
}
public void recordStepError(StepContext context, Throwable error) {
recordStepEvent(context, ExecutionStatus.FAILED, error);
}
/**
* Gets the execution history for analysis.
*
* @return immutable execution history
*/
public ExecutionHistory getHistory() {
return new ExecutionHistory(
List.copyOf(executions),
Map.copyOf(executionCounts)
);
}
/**
* Gets the execution count for a specific step.
* Supports both exact matches and partial matches for steps with prefixes.
*
* @param workflowId the workflow ID
* @param stepId the step ID
* @return execution count
*/
public int getExecutionCount(String workflowId, String stepId) {
Objects.requireNonNull(workflowId, "workflowId cannot be null");
Objects.requireNonNull(stepId, "stepId cannot be null");
// First try exact match
String key = createKey(workflowId, stepId);
Integer exactCount = executionCounts.get(key);
if (exactCount != null) {
return exactCount;
}
// Then try to find steps that contain the stepId (for branch-prefixed steps)
String prefix = workflowId + ".";
return executionCounts.entrySet().stream()
.filter(entry ->
entry.getKey().startsWith(prefix) &&
entry.getKey().contains(stepId)
)
.mapToInt(Map.Entry::getValue)
.sum();
}
/**
* Checks if a step was executed.
* Supports both exact matches and partial matches for steps with prefixes.
*
* @param workflowId the workflow ID
* @param stepId the step ID
* @return true if the step was executed at least once
*/
public boolean wasExecuted(String workflowId, String stepId) {
return getExecutionCount(workflowId, stepId) > 0;
}
/**
* Gets all step executions for a workflow.
*
* @param workflowId the workflow ID
* @return list of step execution records
*/
public List<ExecutionRecord> getStepExecutions(String workflowId) {
Objects.requireNonNull(workflowId, "workflowId cannot be null");
return executions.stream()
.filter(exec -> exec.getWorkflowId().equals(workflowId))
.filter(exec -> exec.getType() == RecordType.STEP)
.collect(Collectors.toList());
}
/**
* Gets all executed step IDs for a workflow.
*
* @param workflowId the workflow ID
* @return set of executed step IDs
*/
public Set<String> getExecutedStepIds(String workflowId) {
Objects.requireNonNull(workflowId, "workflowId cannot be null");
return getStepExecutions(workflowId).stream()
.filter(exec -> exec.getStatus() == ExecutionStatus.COMPLETED)
.map(ExecutionRecord::getStepId)
.collect(Collectors.toSet());
}
/**
* Gets the list of executed step IDs for a workflow.
*
* @param workflowId the workflow ID
* @return list of step IDs that were executed
*/
public List<String> getExecutedSteps(String workflowId) {
Objects.requireNonNull(workflowId, "workflowId cannot be null");
return executions.stream()
.filter(exec -> exec.getWorkflowId().equals(workflowId))
.filter(exec -> exec.getType() == RecordType.STEP)
.filter(exec -> exec.getStatus() == ExecutionStatus.STARTED)
.map(ExecutionRecord::getStepId)
.distinct()
.collect(Collectors.toList());
}
/**
* Increments the execution count for a specific step.
* This is used to track retry attempts.
*
* @param workflowId the workflow ID
* @param stepId the step ID
*/
public void incrementStepExecution(String workflowId, String stepId) {
Objects.requireNonNull(workflowId, "workflowId cannot be null");
Objects.requireNonNull(stepId, "stepId cannot be null");
String key = createKey(workflowId, stepId);
incrementExecutionCount(key);
log.debug("Incremented execution count for {}.{} to {}",
workflowId, stepId, executionCounts.get(key));
}
/**
* Clears all tracked executions.
*/
public void clear() {
log.debug("Clearing execution tracker");
executions.clear();
executionCounts.clear();
}
private void incrementExecutionCount(String key) {
executionCounts.merge(key, 1, Integer::sum);
}
/**
* Creates a unique key for workflow and step combination.
*
* @param workflowId the workflow ID
* @param stepId the step ID
* @return unique key in format "workflowId.stepId"
*/
private String createKey(String workflowId, String stepId) {
return workflowId + "." + stepId;
}
/**
* Immutable snapshot of execution history.
*/
@Getter
@RequiredArgsConstructor
public static class ExecutionHistory {
private final List<ExecutionRecord> executions;
private final Map<String, Integer> executionCounts;
/**
* Gets all execution records.
*
* @return list of all records
*/
public List<ExecutionRecord> getRecords() {
return executions;
}
/**
* Gets executions in chronological order.
*
* @return ordered list of executions
*/
public List<ExecutionRecord> getOrderedExecutions() {
return executions.stream()
.sorted(Comparator.comparing(ExecutionRecord::getTimestamp))
.collect(Collectors.toList());
}
/**
* Gets only step executions.
*
* @return list of step executions
*/
public List<ExecutionRecord> getStepExecutions() {
return executions.stream()
.filter(exec -> exec.getType() == RecordType.STEP)
.collect(Collectors.toList());
}
/**
* Gets only workflow executions.
*
* @return list of workflow executions
*/
public List<ExecutionRecord> getWorkflowExecutions() {
return executions.stream()
.filter(exec -> exec.getType() == RecordType.WORKFLOW)
.collect(Collectors.toList());
}
}
/**
* Universal execution record.
*/
@Getter
@RequiredArgsConstructor
public static class ExecutionRecord {
private final RecordType type;
private final ExecutionStatus status;
private final String workflowId;
private final String runId;
private final String stepId;
private final Object data;
private final long timestamp;
@Override
public String toString() {
return String.format("ExecutionRecord{type=%s, status=%s, workflow=%s, step=%s, timestamp=%d}",
type, status, workflowId, stepId, timestamp);
}
}
/**
* Type of record.
*/
public enum RecordType {
WORKFLOW,
STEP
}
/**
* Execution status.
*/
public enum ExecutionStatus {
STARTED,
COMPLETED,
FAILED
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/core/FluentWorkflowTest.java
|
package ai.driftkit.workflow.test.core;
import ai.driftkit.workflow.engine.builder.WorkflowBuilder;
import ai.driftkit.workflow.engine.graph.WorkflowGraph;
import ai.driftkit.workflow.engine.core.WorkflowEngine;
import ai.driftkit.workflow.engine.domain.WorkflowEngineConfig;
import ai.driftkit.workflow.engine.persistence.inmemory.InMemoryAsyncStepStateRepository;
import ai.driftkit.workflow.engine.persistence.inmemory.InMemoryChatSessionRepository;
import ai.driftkit.workflow.engine.persistence.inmemory.InMemorySuspensionDataRepository;
import ai.driftkit.workflow.engine.persistence.inmemory.InMemoryWorkflowStateRepository;
import ai.driftkit.workflow.engine.async.InMemoryProgressTracker;
import ai.driftkit.common.service.impl.InMemoryChatStore;
import ai.driftkit.common.service.impl.SimpleTextTokenizer;
import lombok.extern.slf4j.Slf4j;
import java.util.Objects;
/**
* Base class for testing fluent API workflows.
* Provides engine configuration optimized for programmatic workflow definitions.
*/
@Slf4j
public abstract class FluentWorkflowTest extends WorkflowTestBase {
@Override
protected WorkflowEngine createEngine() {
log.debug("Creating workflow engine for fluent API workflows");
// Create the state repository
var stateRepository = new InMemoryWorkflowStateRepository();
WorkflowEngineConfig config = WorkflowEngineConfig.builder()
.coreThreads(1)
.maxThreads(2)
.queueCapacity(1000)
.scheduledThreads(2)
.defaultStepTimeoutMs(300000) // 5 minutes
.stateRepository(stateRepository)
.progressTracker(new InMemoryProgressTracker())
.chatSessionRepository(new InMemoryChatSessionRepository())
.chatStore(new InMemoryChatStore(new SimpleTextTokenizer()))
.asyncStepStateRepository(new InMemoryAsyncStepStateRepository())
.suspensionDataRepository(new InMemorySuspensionDataRepository())
.build();
return new WorkflowEngine(config);
}
/**
* Registers fluent API workflows.
* Subclasses should override this to register their workflow builders.
*
* Example:
* <pre>{@code
* @Override
* protected void registerWorkflows() {
* registerWorkflow(
* WorkflowBuilder.define("order-workflow", OrderRequest.class, OrderResult.class)
* .then("validate", this::validateOrder)
* .then("process", this::processOrder)
* .build()
* );
* }
* }</pre>
*/
/**
* Convenience method to register a fluent API workflow builder.
*
* @param builder the workflow builder
* @param <T> input type
* @param <R> result type
*/
protected <T, R> void registerWorkflow(WorkflowBuilder<T, R> builder) {
Objects.requireNonNull(builder, "builder cannot be null");
WorkflowGraph<T, R> graph = builder.build();
log.debug("Registering fluent API workflow: {}", graph.id());
engine.register(graph);
}
/**
* Convenience method to register a workflow graph directly.
*
* @param graph the workflow graph
*/
protected void registerWorkflow(WorkflowGraph<?, ?> graph) {
Objects.requireNonNull(graph, "graph cannot be null");
log.debug("Registering workflow graph: {}", graph.id());
engine.register(graph);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/core/MockBuilder.java
|
package ai.driftkit.workflow.test.core;
import ai.driftkit.workflow.engine.core.StepResult;
import lombok.RequiredArgsConstructor;
import java.lang.reflect.Method;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.Predicate;
/**
* Fluent builder for creating mock definitions.
* Provides a clean API for configuring workflow step mocks.
*/
public class MockBuilder {
private final MockRegistry registry;
MockBuilder(MockRegistry registry) {
this.registry = registry;
}
/**
* Starts building a mock for a workflow.
*
* @param workflowId the workflow ID
* @return workflow mock builder
*/
public WorkflowMockBuilder workflow(String workflowId) {
Objects.requireNonNull(workflowId, "workflowId cannot be null");
return new WorkflowMockBuilder(registry, workflowId);
}
/**
* Builder for workflow-specific mocks.
*/
@RequiredArgsConstructor
public static class WorkflowMockBuilder {
private final MockRegistry registry;
private final String workflowId;
/**
* Starts building a mock for a specific step.
*
* @param stepId the step ID
* @return step mock builder
*/
public StepMockBuilder step(String stepId) {
Objects.requireNonNull(stepId, "stepId cannot be null");
return new StepMockBuilder(registry, workflowId, stepId);
}
}
/**
* Builder for step-specific mocks.
*/
@RequiredArgsConstructor
public static class StepMockBuilder {
private final MockRegistry registry;
private final String workflowId;
private final String stepId;
/**
* Creates a mock that always executes.
*
* @return behavior builder
*/
public BehaviorBuilder always() {
return new BehaviorBuilder(registry, workflowId, stepId, null);
}
/**
* Creates a conditional mock.
*
* @param inputType the expected input type
* @param condition the condition to evaluate
* @param <I> input type
* @return behavior builder
*/
public <I> BehaviorBuilder when(Class<I> inputType, Predicate<I> condition) {
Objects.requireNonNull(inputType, "inputType cannot be null");
Objects.requireNonNull(condition, "condition cannot be null");
return new BehaviorBuilder(registry, workflowId, stepId, new TypedCondition<>(inputType, condition));
}
/**
* Creates a mock that fails a specific number of times before succeeding.
*
* @param times number of times to fail
* @return timed behavior builder
*/
public TimedBehaviorBuilder times(int times) {
if (times <= 0) {
throw new IllegalArgumentException("times must be positive");
}
return new TimedBehaviorBuilder(registry, workflowId, stepId, times);
}
/**
* Creates a mock using an existing mock object (e.g., Mockito mock).
*
* @param mockObject the mock object to use
* @return registration builder
*/
public MockRegistrationBuilder mockWith(Object mockObject) {
Objects.requireNonNull(mockObject, "mockObject cannot be null");
return new MockRegistrationBuilder(registry, workflowId, stepId, mockObject);
}
}
/**
* Builder for mock behavior.
*/
@RequiredArgsConstructor
public static class BehaviorBuilder {
private final MockRegistry registry;
private final String workflowId;
private final String stepId;
private final TypedCondition<?> condition;
/**
* Mock returns a specific result.
*
* @param inputType the expected input type
* @param resultProvider function to create the result
* @param <I> input type
* @param <O> output type
*/
public <I, O> void thenReturn(Class<I> inputType, Function<I, StepResult<O>> resultProvider) {
Objects.requireNonNull(inputType, "inputType cannot be null");
Objects.requireNonNull(resultProvider, "resultProvider cannot be null");
MockDefinition<I> mock = MockDefinition.of(workflowId, stepId, inputType, resultProvider);
if (condition != null) {
// Use type-safe registration with the condition's input type and predicate
condition.registerConditionalMock(registry, workflowId, stepId, mock);
} else {
registry.register(mock);
}
}
/**
* Mock returns a fixed result.
*
* @param inputType the expected input type
* @param result the result to return
* @param <I> input type
* @param <O> output type
*/
public <I, O> void thenReturn(Class<I> inputType, StepResult<O> result) {
Objects.requireNonNull(result, "result cannot be null");
thenReturn(inputType, input -> result);
}
/**
* Mock throws an exception.
*
* @param exception the exception to throw
*/
public void thenFail(Exception exception) {
Objects.requireNonNull(exception, "exception cannot be null");
MockDefinition<Object> mock = MockDefinition.throwing(workflowId, stepId, Object.class, exception);
if (condition != null) {
condition.registerConditionalMock(registry, workflowId, stepId, mock);
} else {
registry.register(mock);
}
}
/**
* Mock succeeds with a simple value.
*
* @param value the value to return
* @param <O> output type
*/
public <O> void thenSucceed(O value) {
MockDefinition<Object> mock = MockDefinition.returning(
workflowId, stepId, Object.class, StepResult.continueWith(value)
);
if (condition != null) {
condition.registerConditionalMock(registry, workflowId, stepId, mock);
} else {
registry.register(mock);
}
}
}
/**
* Builder for timed mock behavior.
*/
@RequiredArgsConstructor
public static class TimedBehaviorBuilder {
private final MockRegistry registry;
private final String workflowId;
private final String stepId;
private final int times;
private Exception failureException = new RuntimeException("Mock failure");
/**
* Sets the exception to throw during failures.
*
* @param exception the exception to throw
* @return this builder
*/
public TimedBehaviorBuilder thenFail(Exception exception) {
Objects.requireNonNull(exception, "exception cannot be null");
this.failureException = exception;
return this;
}
/**
* Sets what happens after the failures.
*
* @return behavior builder for success case
*/
public AfterFailureBuilder afterwards() {
return new AfterFailureBuilder(registry, workflowId, stepId, times, failureException);
}
}
/**
* Builder for behavior after timed failures.
*/
@RequiredArgsConstructor
public static class AfterFailureBuilder {
private final MockRegistry registry;
private final String workflowId;
private final String stepId;
private final int failTimes;
private final Exception failureException;
/**
* Mock succeeds after failures.
*
* @param value the value to return on success
* @param <O> output type
*/
public <O> void thenSucceed(O value) {
FailureThenSuccessMock mock = new FailureThenSuccessMock(
workflowId, stepId, failTimes, failureException, StepResult.continueWith(value)
);
registry.register(mock);
}
/**
* Mock returns specific result after failures.
*
* @param inputType the expected input type
* @param resultProvider function to create the result
* @param <I> input type
* @param <O> output type
*/
public <I, O> void thenReturn(Class<I> inputType, Function<I, StepResult<O>> resultProvider) {
Objects.requireNonNull(inputType, "inputType cannot be null");
Objects.requireNonNull(resultProvider, "resultProvider cannot be null");
// Create a safe wrapper that converts the typed result to wildcard
Function<I, StepResult<?>> wildcardProvider = input -> {
StepResult<O> typedResult = resultProvider.apply(input);
return typedResult; // Safe upcast from StepResult<O> to StepResult<?>
};
FailureThenSuccessMock mock = new FailureThenSuccessMock(
workflowId, stepId, failTimes, failureException,
new DynamicResultProvider<I>(inputType, wildcardProvider)
);
registry.register(mock);
}
}
/**
* Container for typed condition.
*/
private static class TypedCondition<I> {
private final Class<I> inputType;
private final Predicate<I> predicate;
TypedCondition(Class<I> inputType, Predicate<I> predicate) {
this.inputType = inputType;
this.predicate = predicate;
}
/**
* Registers a conditional mock with proper type safety.
*/
void registerConditionalMock(MockRegistry registry, String workflowId, String stepId, MockDefinition<?> mock) {
registry.registerConditional(workflowId, stepId, inputType, predicate, mock);
}
}
/**
* Mock that fails a certain number of times then succeeds.
*/
private static class FailureThenSuccessMock extends MockDefinition<Object> {
// Use a map to track attempts per workflow instance to handle concurrent tests
private final Map<String, Integer> attemptCounts = new ConcurrentHashMap<>();
private final int failTimes;
private final Exception failureException;
private final Object successResult;
// Track completed executions for cleanup
private final Set<String> completedExecutions = ConcurrentHashMap.newKeySet();
// Max entries before forced cleanup
private static final int MAX_TRACKED_EXECUTIONS = 1000;
FailureThenSuccessMock(String workflowId, String stepId, int failTimes,
Exception failureException, Object successResult) {
super(workflowId, stepId, Object.class, null);
this.failTimes = failTimes;
this.failureException = failureException;
this.successResult = successResult;
}
@Override
public StepResult<?> execute(Object input, StepContext context) {
// Perform cleanup if too many entries
if (attemptCounts.size() > MAX_TRACKED_EXECUTIONS) {
cleanupCompletedExecutions();
}
// Use workflow instance ID to track attempts per execution
String instanceKey = context.getRunId();
int attemptCount = attemptCounts.compute(instanceKey, (k, v) -> v == null ? 1 : v + 1);
if (attemptCount <= failTimes) {
// Return StepResult.Fail instead of throwing exception
// This ensures the retry mechanism sees the failure
return StepResult.fail(failureException);
}
// Clean up the count after success to avoid memory leak
attemptCounts.remove(instanceKey);
completedExecutions.add(instanceKey);
if (successResult instanceof StepResult) {
return (StepResult<?>) successResult;
} else if (successResult instanceof DynamicResultProvider) {
return ((DynamicResultProvider<?>) successResult).provide(input);
} else {
return StepResult.continueWith(successResult);
}
}
/**
* Clean up completed executions from the attempt counts map.
*/
private void cleanupCompletedExecutions() {
completedExecutions.forEach(attemptCounts::remove);
completedExecutions.clear();
}
/**
* Clean up all tracked state.
*/
public void cleanup() {
attemptCounts.clear();
completedExecutions.clear();
}
}
/**
* Provider for dynamic results based on input.
*/
private static class DynamicResultProvider<I> {
private final Class<I> inputType;
private final Function<I, StepResult<?>> provider;
DynamicResultProvider(Class<I> inputType, Function<I, StepResult<?>> provider) {
this.inputType = inputType;
this.provider = provider;
}
/**
* Provides result with safe type casting.
*
* @param input the input object to process
* @return the step result
* @throws IllegalArgumentException if input type doesn't match expected type
*/
StepResult<?> provide(Object input) {
if (!inputType.isInstance(input)) {
throw new IllegalArgumentException(
"Expected input of type " + inputType.getName() + " but got " +
(input != null ? input.getClass().getName() : "null")
);
}
try {
I typedInput = inputType.cast(input);
return provider.apply(typedInput);
} catch (ClassCastException e) {
// This should not happen since we checked with isInstance
throw new IllegalStateException(
"Type casting failed: expected " + inputType.getName() +
" but got " + (input != null ? input.getClass().getName() : "null"), e
);
}
}
}
/**
* Builder for registering existing mock objects.
*/
@RequiredArgsConstructor
public static class MockRegistrationBuilder {
private final MockRegistry registry;
private final String workflowId;
private final String stepId;
private final Object mockObject;
/**
* Registers the mock object.
*/
public void register() {
// Create a mock definition that delegates to the mock object
MockDefinition<Object> mockDef = new DelegatingMockDefinition(
workflowId, stepId, mockObject
);
registry.register(mockDef);
}
}
/**
* Mock definition that delegates to an external mock object.
*/
private static class DelegatingMockDefinition extends MockDefinition<Object> {
private final Object mockObject;
DelegatingMockDefinition(String workflowId, String stepId, Object mockObject) {
super(workflowId, stepId, Object.class, null);
this.mockObject = mockObject;
}
@Override
public StepResult<?> execute(Object input, StepContext context) {
try {
// Find and invoke method that matches the input type
for (Method method : mockObject.getClass().getMethods()) {
if (method.getName().equals("process") ||
method.getName().equals("execute") ||
method.getName().equals("apply")) {
Class<?>[] paramTypes = method.getParameterTypes();
if (paramTypes.length == 1 && paramTypes[0].isInstance(input)) {
Object result = method.invoke(mockObject, input);
// Convert result to StepResult if needed
if (result instanceof StepResult) {
return (StepResult<?>) result;
} else {
return StepResult.continueWith(result);
}
}
}
}
// Fallback: try to invoke with no args
for (Method method : mockObject.getClass().getMethods()) {
if (method.getName().equals("process") ||
method.getName().equals("execute") ||
method.getName().equals("get")) {
if (method.getParameterCount() == 0) {
Object result = method.invoke(mockObject);
if (result instanceof StepResult) {
return (StepResult<?>) result;
} else {
return StepResult.continueWith(result);
}
}
}
}
throw new IllegalStateException(
"Mock object does not have a suitable method: " + mockObject.getClass()
);
} catch (Exception e) {
return StepResult.fail(e);
}
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/core/MockDefinition.java
|
package ai.driftkit.workflow.test.core;
import ai.driftkit.workflow.engine.core.StepResult;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import java.util.Objects;
import java.util.function.BiFunction;
import java.util.function.Function;
/**
* Definition of a mock behavior for a workflow step.
* Type-safe container for mock configuration.
*
* @param <I> input type
*/
@Getter
@RequiredArgsConstructor
public class MockDefinition<I> {
protected final String workflowId;
protected final String stepId;
protected final Class<I> inputType;
protected final MockBehavior<I> behavior;
/**
* Executes the mock behavior.
*
* @param input the step input
* @param context the step context
* @return the mocked step result
*/
public StepResult<?> execute(Object input, StepContext context) {
if (!inputType.isInstance(input)) {
throw new IllegalArgumentException(
"Mock expects input of type " + inputType.getName() +
" but got " + (input != null ? input.getClass().getName() : "null")
);
}
@SuppressWarnings("unchecked")
I typedInput = (I) input;
return behavior.execute(typedInput, context);
}
/**
* Creates a mock definition from a simple function.
*
* @param workflowId the workflow ID
* @param stepId the step ID
* @param inputType the expected input type
* @param behavior the mock behavior function
* @param <I> input type
* @param <O> output type
* @return mock definition
*/
public static <I, O> MockDefinition<I> of(String workflowId, String stepId,
Class<I> inputType,
Function<I, StepResult<O>> behavior) {
Objects.requireNonNull(workflowId, "workflowId cannot be null");
Objects.requireNonNull(stepId, "stepId cannot be null");
Objects.requireNonNull(inputType, "inputType cannot be null");
Objects.requireNonNull(behavior, "behavior cannot be null");
return new MockDefinition<>(
workflowId,
stepId,
inputType,
(input, context) -> behavior.apply(input)
);
}
/**
* Creates a mock definition that returns any type.
* Used when the exact return type is not known at compile time.
*
* @param workflowId the workflow ID
* @param stepId the step ID
* @param inputType the expected input type
* @param behavior the mock behavior function
* @param <I> input type
* @return mock definition
*/
public static <I> MockDefinition<I> ofAny(String workflowId, String stepId,
Class<I> inputType,
Function<I, StepResult<?>> behavior) {
Objects.requireNonNull(workflowId, "workflowId cannot be null");
Objects.requireNonNull(stepId, "stepId cannot be null");
Objects.requireNonNull(inputType, "inputType cannot be null");
Objects.requireNonNull(behavior, "behavior cannot be null");
return new MockDefinition<>(
workflowId,
stepId,
inputType,
(input, context) -> behavior.apply(input)
);
}
/**
* Creates a mock definition with context access.
*
* @param workflowId the workflow ID
* @param stepId the step ID
* @param inputType the expected input type
* @param behavior the mock behavior function with context
* @param <I> input type
* @param <O> output type
* @return mock definition
*/
public static <I, O> MockDefinition<I> ofWithContext(String workflowId, String stepId,
Class<I> inputType,
BiFunction<I, StepContext, StepResult<O>> behavior) {
Objects.requireNonNull(workflowId, "workflowId cannot be null");
Objects.requireNonNull(stepId, "stepId cannot be null");
Objects.requireNonNull(inputType, "inputType cannot be null");
Objects.requireNonNull(behavior, "behavior cannot be null");
return new MockDefinition<>(
workflowId,
stepId,
inputType,
behavior::apply
);
}
/**
* Creates a mock that always returns a specific result.
*
* @param workflowId the workflow ID
* @param stepId the step ID
* @param inputType the expected input type
* @param result the result to return
* @param <I> input type
* @param <O> output type
* @return mock definition
*/
public static <I, O> MockDefinition<I> returning(String workflowId, String stepId,
Class<I> inputType,
StepResult<O> result) {
Objects.requireNonNull(result, "result cannot be null");
return of(workflowId, stepId, inputType, input -> result);
}
/**
* Creates a mock that always throws an exception.
*
* @param workflowId the workflow ID
* @param stepId the step ID
* @param inputType the expected input type
* @param exception the exception to throw
* @param <I> input type
* @return mock definition
*/
public static <I> MockDefinition<I> throwing(String workflowId, String stepId,
Class<I> inputType,
Exception exception) {
Objects.requireNonNull(exception, "exception cannot be null");
return of(workflowId, stepId, inputType, input -> {
throw new RuntimeException(exception);
});
}
/**
* Interface for mock behavior implementation.
*
* @param <I> input type
*/
@FunctionalInterface
public interface MockBehavior<I> {
/**
* Executes the mock behavior.
*
* @param input the step input
* @param context the step context
* @return the mocked step result
*/
StepResult<?> execute(I input, StepContext context);
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/core/MockRegistry.java
|
package ai.driftkit.workflow.test.core;
import lombok.extern.slf4j.Slf4j;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Predicate;
/**
* Registry for managing mock definitions.
* Thread-safe implementation for concurrent test scenarios.
*/
@Slf4j
public class MockRegistry {
private final Map<String, List<MockDefinition<?>>> mocks = new ConcurrentHashMap<>();
private final Map<String, List<ConditionalMock<?>>> conditionalMocks = new ConcurrentHashMap<>();
/**
* Registers a mock for a specific workflow step.
*
* @param mock the mock definition to register
*/
public void register(MockDefinition<?> mock) {
Objects.requireNonNull(mock, "mock cannot be null");
String key = createKey(mock.getWorkflowId(), mock.getStepId());
log.debug("Registering mock for {}", key);
mocks.compute(key, (k, list) -> {
if (list == null) {
list = new ArrayList<>();
}
list.add(mock);
return list;
});
}
/**
* Registers a conditional mock for a specific workflow step.
*
* @param workflowId the workflow ID
* @param stepId the step ID
* @param inputType the expected input type for type safety
* @param condition the condition to evaluate
* @param mock the mock to use when condition is true
* @param <I> input type
*/
public <I> void registerConditional(String workflowId, String stepId,
Class<I> inputType, Predicate<I> condition, MockDefinition<?> mock) {
Objects.requireNonNull(workflowId, "workflowId cannot be null");
Objects.requireNonNull(stepId, "stepId cannot be null");
Objects.requireNonNull(inputType, "inputType cannot be null");
Objects.requireNonNull(condition, "condition cannot be null");
Objects.requireNonNull(mock, "mock cannot be null");
String key = createKey(workflowId, stepId);
log.debug("Registering conditional mock for {}", key);
ConditionalMock<I> conditionalMock = new ConditionalMock<>(inputType, condition, mock);
conditionalMocks.compute(key, (k, list) -> {
if (list == null) {
list = new ArrayList<>();
}
list.add(conditionalMock);
return list;
});
}
/**
* Finds a mock for the given step context.
* Supports both exact matches and partial matches for steps with branch prefixes.
*
* @param context the step context
* @return the mock definition or empty if not found
*/
public Optional<MockDefinition<?>> findMock(StepContext context) {
Objects.requireNonNull(context, "context cannot be null");
String key = context.createKey();
String workflowId = context.getWorkflowId();
String stepId = context.getStepId();
log.debug("Looking for mock with key: {} (Available keys: regular={}, conditional={})",
key, mocks.keySet(), conditionalMocks.keySet());
// Try exact match first
Optional<MockDefinition<?>> exactMatch = findExactMatch(key, context.getInput());
if (exactMatch.isPresent()) {
return exactMatch;
}
// If no exact match, try partial matching for branch-prefixed steps
return findPartialMatch(workflowId, stepId, context.getInput());
}
/**
* Find exact match for the given key.
*/
private Optional<MockDefinition<?>> findExactMatch(String key, Object input) {
// First try conditional mocks
List<ConditionalMock<?>> conditionals = conditionalMocks.get(key);
if (conditionals != null) {
for (ConditionalMock<?> conditional : conditionals) {
if (conditional.matches(input)) {
log.debug("Found matching conditional mock for {}", key);
return Optional.of(conditional.getMock());
}
}
}
// Then try regular mocks
List<MockDefinition<?>> regularMocks = mocks.get(key);
if (regularMocks != null && !regularMocks.isEmpty()) {
// Return the most recent mock (last added)
MockDefinition<?> mock = regularMocks.get(regularMocks.size() - 1);
log.debug("Found regular mock for {}", key);
return Optional.of(mock);
}
return Optional.empty();
}
/**
* Find partial match for branch-prefixed steps.
*/
private Optional<MockDefinition<?>> findPartialMatch(String workflowId, String stepId, Object input) {
String workflowPrefix = workflowId + ".";
// Check conditional mocks with partial matching
Optional<MockDefinition<?>> conditionalMatch = findPartialConditionalMatch(workflowPrefix, stepId, input);
if (conditionalMatch.isPresent()) {
return conditionalMatch;
}
// Check regular mocks with partial matching
return findPartialRegularMatch(workflowPrefix, stepId);
}
/**
* Find partial match in conditional mocks.
*/
private Optional<MockDefinition<?>> findPartialConditionalMatch(String workflowPrefix, String stepId, Object input) {
for (Map.Entry<String, List<ConditionalMock<?>>> entry : conditionalMocks.entrySet()) {
String entryKey = entry.getKey();
if (!entryKey.startsWith(workflowPrefix)) {
continue;
}
String registeredStepId = entryKey.substring(workflowPrefix.length());
if (!stepId.contains(registeredStepId)) {
continue;
}
log.debug("Trying partial match for conditional mocks: {} contains {}", stepId, registeredStepId);
for (ConditionalMock<?> conditional : entry.getValue()) {
if (conditional.matches(input)) {
log.debug("Found matching conditional mock via partial match");
return Optional.of(conditional.getMock());
}
}
}
return Optional.empty();
}
/**
* Find partial match in regular mocks.
*/
private Optional<MockDefinition<?>> findPartialRegularMatch(String workflowPrefix, String stepId) {
for (Map.Entry<String, List<MockDefinition<?>>> entry : mocks.entrySet()) {
String entryKey = entry.getKey();
if (!entryKey.startsWith(workflowPrefix)) {
continue;
}
String registeredStepId = entryKey.substring(workflowPrefix.length());
if (!stepId.contains(registeredStepId)) {
continue;
}
log.debug("Found partial match: {} contains {}", stepId, registeredStepId);
List<MockDefinition<?>> mockList = entry.getValue();
if (mockList != null && !mockList.isEmpty()) {
MockDefinition<?> mock = mockList.get(mockList.size() - 1);
log.debug("Found regular mock via partial match");
return Optional.of(mock);
}
}
log.debug("No mock found for workflow {} step {}", workflowPrefix, stepId);
return Optional.empty();
}
/**
* Removes all mocks for a specific workflow step.
*
* @param workflowId the workflow ID
* @param stepId the step ID
*/
public void remove(String workflowId, String stepId) {
Objects.requireNonNull(workflowId, "workflowId cannot be null");
Objects.requireNonNull(stepId, "stepId cannot be null");
String key = createKey(workflowId, stepId);
mocks.remove(key);
conditionalMocks.remove(key);
log.debug("Removed all mocks for {}", key);
}
/**
* Clears all registered mocks.
*/
public void clear() {
log.debug("Clearing all mocks");
mocks.clear();
conditionalMocks.clear();
}
/**
* Gets the number of registered mocks.
*
* @return total mock count
*/
public int size() {
int regularCount = mocks.values().stream()
.mapToInt(List::size)
.sum();
int conditionalCount = conditionalMocks.values().stream()
.mapToInt(List::size)
.sum();
return regularCount + conditionalCount;
}
/**
* Creates a unique key for workflow and step.
*
* @param workflowId the workflow ID
* @param stepId the step ID
* @return unique key
*/
private String createKey(String workflowId, String stepId) {
return new StringBuilder(workflowId.length() + stepId.length() + 1)
.append(workflowId)
.append('.')
.append(stepId)
.toString();
}
/**
* Container for conditional mocks with type safety.
*/
private static class ConditionalMock<I> {
private final Class<I> inputType;
private final Predicate<I> condition;
private final MockDefinition<?> mock;
ConditionalMock(Class<I> inputType, Predicate<I> condition, MockDefinition<?> mock) {
this.inputType = inputType;
this.condition = condition;
this.mock = mock;
}
/**
* Safely checks if the input matches the condition.
*
* @param input the input object to test
* @return true if input matches condition, false otherwise
*/
boolean matches(Object input) {
if (!inputType.isInstance(input)) {
log.debug("Conditional mock type mismatch: expected {}, got {}",
inputType.getSimpleName(),
input != null ? input.getClass().getSimpleName() : "null");
return false;
}
try {
I typedInput = inputType.cast(input);
boolean result = condition.test(typedInput);
log.debug("Conditional mock evaluated: input={}, result={}", input, result);
return result;
} catch (Exception e) {
log.warn("Conditional mock predicate threw exception", e);
return false;
}
}
MockDefinition<?> getMock() {
return mock;
}
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/core/StepContext.java
|
package ai.driftkit.workflow.test.core;
import ai.driftkit.workflow.engine.graph.StepNode;
import ai.driftkit.workflow.engine.persistence.WorkflowInstance;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
/**
* Context information for a workflow step execution.
* Immutable container for step execution details.
*/
@Getter
@RequiredArgsConstructor
public class StepContext {
private final WorkflowInstance instance;
private final StepNode step;
private final Object input;
/**
* Gets the workflow ID.
*
* @return the workflow ID
*/
public String getWorkflowId() {
return instance.getWorkflowId();
}
/**
* Gets the workflow run ID.
*
* @return the workflow run ID
*/
public String getRunId() {
return instance.getInstanceId();
}
/**
* Gets the step ID.
*
* @return the step ID
*/
public String getStepId() {
return step.id();
}
/**
* Gets the input type of the step.
*
* @return the input type or null if not available
*/
public Class<?> getInputType() {
return input != null ? input.getClass() : null;
}
/**
* Creates a unique key for this step context.
*
* @return unique key in format "workflowId.stepId"
*/
public String createKey() {
return getWorkflowId() + "." + getStepId();
}
@Override
public String toString() {
return "StepContext{" +
"workflowId='" + getWorkflowId() + '\'' +
", runId='" + getRunId() + '\'' +
", stepId='" + getStepId() + '\'' +
", inputType=" + getInputType() +
'}';
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/core/TestInternalStepListener.java
|
package ai.driftkit.workflow.test.core;
import ai.driftkit.workflow.engine.core.InternalStepListener;
import ai.driftkit.workflow.engine.core.StepResult;
import ai.driftkit.workflow.engine.core.WorkflowContext;
import ai.driftkit.workflow.engine.graph.StepNode;
import ai.driftkit.workflow.engine.persistence.WorkflowInstance;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import java.util.Optional;
/**
* Test implementation of InternalStepListener that tracks and mocks internal step executions.
*/
@Slf4j
@RequiredArgsConstructor
public class TestInternalStepListener implements InternalStepListener {
private final WorkflowInstance workflowInstance;
private final ExecutionTracker executionTracker;
private final MockRegistry mockRegistry;
@Override
public void beforeInternalStep(String stepId, Object input, WorkflowContext context) {
log.debug("Before internal step: {}.{}", workflowInstance.getWorkflowId(), stepId);
if (executionTracker != null) {
// Create virtual node for tracking
StepNode virtualNode = StepNode.fromFunction(stepId,
obj -> StepResult.continueWith(obj), Object.class, Object.class);
StepContext stepContext = new StepContext(workflowInstance, virtualNode, input);
executionTracker.recordStepStart(stepContext);
}
}
@Override
public void afterInternalStep(String stepId, StepResult<?> result, WorkflowContext context) {
log.debug("After internal step: {}.{} with result: {}", workflowInstance.getWorkflowId(), stepId, result);
if (executionTracker != null) {
StepNode virtualNode = StepNode.fromFunction(stepId,
obj -> StepResult.continueWith(obj), Object.class, Object.class);
StepContext stepContext = new StepContext(workflowInstance, virtualNode, null);
executionTracker.recordStepComplete(stepContext, result);
}
}
@Override
public void onInternalStepError(String stepId, Exception error, WorkflowContext context) {
log.error("Internal step error: {}.{}", workflowInstance.getWorkflowId(), stepId, error);
if (executionTracker != null) {
StepNode virtualNode = StepNode.fromFunction(stepId,
obj -> StepResult.continueWith(obj), Object.class, Object.class);
StepContext stepContext = new StepContext(workflowInstance, virtualNode, null);
executionTracker.recordStepError(stepContext, error);
}
}
@Override
public Optional<StepResult<?>> interceptInternalStep(String stepId, Object input, WorkflowContext context) {
if (mockRegistry != null) {
// Create virtual node for mock lookup
StepNode virtualNode = StepNode.fromFunction(stepId,
obj -> StepResult.continueWith(obj), Object.class, Object.class);
StepContext stepContext = new StepContext(workflowInstance, virtualNode, input);
// Check if there's a mock for this step
var mockOpt = mockRegistry.findMock(stepContext);
if (mockOpt.isPresent()) {
log.debug("Found mock for internal step: {}.{}", workflowInstance.getWorkflowId(), stepId);
try {
StepResult<?> result = mockOpt.get().execute(input, stepContext);
// Return the mock result as-is, including StepResult.Fail
// The workflow builder's executeStepWithRetry will handle fail results properly
return Optional.of(result);
} catch (RuntimeException e) {
// For retry testing, we need to let the original exception through
log.debug("Mock execution threw exception for internal step {}.{}",
workflowInstance.getWorkflowId(), stepId, e);
throw e;
} catch (Exception e) {
log.error("Mock execution failed for internal step: {}.{}",
workflowInstance.getWorkflowId(), stepId, e);
throw new RuntimeException("Mock execution failed", e);
}
}
}
return Optional.empty();
}
}
|
0
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test
|
java-sources/ai/driftkit/driftkit-workflow-test-framework/0.8.1/ai/driftkit/workflow/test/core/TypeSafeFunction.java
|
package ai.driftkit.workflow.test.core;
import ai.driftkit.workflow.engine.core.StepResult;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import java.util.function.Function;
/**
* Type-safe wrapper for mock functions that performs runtime type checking.
* Prevents ClassCastException by validating types before execution.
*
* @param <I> input type
* @param <O> output type
*/
@Slf4j
@RequiredArgsConstructor
public class TypeSafeFunction<I, O> implements Function<I, StepResult<O>> {
private final Object mockBehavior;
private final Class<I> inputType;
private final Class<O> outputType;
@Override
public StepResult<O> apply(I input) {
// Validate input type
if (input != null && !inputType.isInstance(input)) {
throw new IllegalArgumentException(
"Mock expects input of type " + inputType.getName() +
" but received " + input.getClass().getName()
);
}
// Validate mock behavior is a function
if (!(mockBehavior instanceof Function)) {
throw new IllegalStateException(
"Mock behavior must be a Function but was " +
(mockBehavior != null ? mockBehavior.getClass().getName() : "null")
);
}
try {
// Execute the mock
@SuppressWarnings("unchecked")
Function<I, StepResult<O>> function = (Function<I, StepResult<O>>) mockBehavior;
StepResult<O> result = function.apply(input);
// Validate result
if (result == null) {
throw new IllegalStateException("Mock function returned null result");
}
// Validate output type if possible
if (result instanceof StepResult.Continue<O> continueResult && continueResult.data() != null) {
Object data = continueResult.data();
if (!outputType.isInstance(data)) {
log.warn("Mock returned data of type {} but expected {}",
data.getClass().getName(), outputType.getName());
}
}
return result;
} catch (ClassCastException e) {
throw new IllegalStateException(
"Type mismatch in mock execution. Check that your mock function has correct type parameters.", e
);
} catch (Exception e) {
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
}
throw new RuntimeException("Mock execution failed", e);
}
}
/**
* Creates a type-safe function from an untyped object.
*
* @param mockBehavior the mock behavior object
* @param inputType expected input type
* @param outputType expected output type
* @param <I> input type
* @param <O> output type
* @return type-safe function
*/
public static <I, O> TypeSafeFunction<I, O> wrap(Object mockBehavior,
Class<I> inputType,
Class<O> outputType) {
return new TypeSafeFunction<>(mockBehavior, inputType, outputType);
}
/**
* Creates a type-safe function that validates input type only.
* Use when output type cannot be determined at compile time.
*
* @param mockBehavior the mock behavior object
* @param inputType expected input type
* @param <I> input type
* @return type-safe function
*/
public static <I> TypeSafeFunction<I, Object> wrapInput(Object mockBehavior, Class<I> inputType) {
return new TypeSafeFunction<>(mockBehavior, inputType, Object.class);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.