index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/preprocessing
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/preprocessing/parser/StringToSingleTextParser.java
|
package ai.knowly.langtorch.preprocessing.parser;
import ai.knowly.langtorch.schema.text.SingleText;
/**
* The StringToSingleTextParser class is a Java parser that converts a string input into a
* SingleText object.
*/
public class StringToSingleTextParser implements Parser<String, SingleText> {
private StringToSingleTextParser() {
super();
}
public static StringToSingleTextParser create() {
return new StringToSingleTextParser();
}
@Override
public SingleText parse(String input) {
return SingleText.of(input);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/preprocessing/splitter
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/preprocessing/splitter/text/SplitterOption.java
|
package ai.knowly.langtorch.preprocessing.splitter.text;
/** The SplitterOption class is an abstract class that represents a splitter option. */
public abstract class SplitterOption {
String text;
protected SplitterOption(String text) {
this.text = text;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/preprocessing/splitter
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/preprocessing/splitter/text/TextSplitter.java
|
package ai.knowly.langtorch.preprocessing.splitter.text;
import java.util.List;
/** The TextSplitter interface is an interface that represents a text splitter. */
public interface TextSplitter<S extends SplitterOption> {
List<String> splitText(S option);
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/preprocessing/splitter/text
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/preprocessing/splitter/text/word/WordSplitter.java
|
package ai.knowly.langtorch.preprocessing.splitter.text.word;
import ai.knowly.langtorch.preprocessing.splitter.text.TextSplitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableList.Builder;
import java.util.List;
/** Splits text into chunks of words. */
public class WordSplitter implements TextSplitter<WordSplitterOption> {
public static WordSplitter create() {
return new WordSplitter();
}
@Override
public List<String> splitText(WordSplitterOption option) {
int maxLengthPerChunk = option.getMaxLengthPerChunk();
String text = option.getText();
Builder<String> chunks = ImmutableList.builder();
// Validate the maxLengthPerChunk
if (maxLengthPerChunk < 1) {
throw new IllegalArgumentException("maxLengthPerChunk should be greater than 0");
}
String[] words = text.split("\\s+");
int minLengthOfWord = words[0].length();
for (String word : words) {
minLengthOfWord = Math.min(minLengthOfWord, word.length());
}
if (maxLengthPerChunk < minLengthOfWord) {
throw new IllegalArgumentException(
"maxLengthPerChunk is smaller than the smallest word in the string");
}
StringBuilder chunk = new StringBuilder();
int wordsLength = words.length;
for (int i = 0; i < wordsLength; i++) {
String word = words[i];
boolean isLastWord = i == wordsLength - 1;
if ((chunk.length() + word.length() + (isLastWord ? 0 : 1))
<= maxLengthPerChunk) { // '+1' accounts for spaces, except for the last word
chunk.append(word);
if (!isLastWord) {
chunk.append(" ");
}
} else {
chunks.add(chunk.toString().trim());
chunk = new StringBuilder();
chunk.append(word).append(" ");
}
}
// Add remaining chunk if any
if (chunk.length() > 0) {
chunks.add(chunk.toString().trim());
}
return chunks.build();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/preprocessing/splitter/text
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/preprocessing/splitter/text/word/WordSplitterOption.java
|
package ai.knowly.langtorch.preprocessing.splitter.text.word;
import ai.knowly.langtorch.preprocessing.splitter.text.SplitterOption;
import lombok.Builder;
import lombok.Data;
import lombok.EqualsAndHashCode;
/** Options for {@link WordSplitter}. */
@EqualsAndHashCode(callSuper = true)
@Data
@Builder(toBuilder = true, setterPrefix = "set")
public class WordSplitterOption extends SplitterOption {
// Unprocessed text.
private final String text;
// The max length of a chunk.
private final int maxLengthPerChunk;
private WordSplitterOption(String text, int maxLengthPerChunk) {
super(text);
this.text = text;
this.maxLengthPerChunk = maxLengthPerChunk;
}
public static WordSplitterOption of(String text, int totalLengthOfChunk) {
return new WordSplitterOption(text, totalLengthOfChunk);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/EmbeddingProcessor.java
|
package ai.knowly.langtorch.processor;
import ai.knowly.langtorch.schema.embeddings.EmbeddingInput;
import ai.knowly.langtorch.schema.embeddings.EmbeddingOutput;
/** EmbeddingsProcessor is a shared interface for embedding output. */
public interface EmbeddingProcessor extends Processor<EmbeddingInput, EmbeddingOutput> {}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/Processor.java
|
package ai.knowly.langtorch.processor;
import ai.knowly.langtorch.schema.io.Input;
import ai.knowly.langtorch.schema.io.Output;
import com.google.common.util.concurrent.ListenableFuture;
/**
* Processor is LLM model's capability of taking/generating data of different modalities or types.
*/
public interface Processor<I extends Input, O extends Output> {
O run(I inputData);
ListenableFuture<O> runAsync(I inputData);
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/ProcessorConfig.java
|
package ai.knowly.langtorch.processor;
/** The ProcessorConfig interface is an interface that represents a processor config. */
public interface ProcessorConfig {}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/ProcessorExecutionException.java
|
package ai.knowly.langtorch.processor;
/**
* The ProcessorExecutionException class is a class that represents a processor execution exception.
*/
public class ProcessorExecutionException extends RuntimeException {
public ProcessorExecutionException(String message) {
super(message);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/cohere/CohereProcessorModule.java
|
package ai.knowly.langtorch.processor.cohere;
import ai.knowly.langtorch.llm.cohere.CohereAIService;
import ai.knowly.langtorch.llm.cohere.schema.config.CohereAIServiceConfig;
import ai.knowly.langtorch.processor.cohere.generate.CohereGenerateProcessorConfig;
import ai.knowly.langtorch.utils.Environment;
import ai.knowly.langtorch.utils.api.key.CohereKeyUtil;
import com.google.common.flogger.FluentLogger;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
public final class CohereProcessorModule extends AbstractModule {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
@Provides
public CohereAIService providesCohereAPI() {
return new CohereAIService(
CohereAIServiceConfig.builder()
.setApiKey(CohereKeyUtil.getKey(logger, Environment.PRODUCTION))
.build());
}
@Provides
public CohereGenerateProcessorConfig providesCohereGenerateProcessorConfig() {
return CohereGenerateProcessorConfig.builder().build();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/cohere
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/cohere/generate/CohereGenerateProcessor.java
|
package ai.knowly.langtorch.processor.cohere.generate;
import ai.knowly.langtorch.llm.cohere.CohereAIService;
import ai.knowly.langtorch.llm.cohere.schema.CohereGenerateRequest;
import ai.knowly.langtorch.llm.cohere.schema.CohereGenerateResponse;
import ai.knowly.langtorch.processor.ProcessorExecutionException;
import ai.knowly.langtorch.processor.Processor;
import ai.knowly.langtorch.schema.text.SingleText;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import javax.inject.Inject;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
/** Processor for Cohere.ai text generation service. */
public class CohereGenerateProcessor implements Processor<SingleText, SingleText> {
private final CohereAIService cohereAIService;
@Inject
CohereGenerateProcessor(CohereAIService cohereAIService) {
this.cohereAIService = cohereAIService;
}
@Override
public SingleText run(SingleText inputData) {
CohereGenerateResponse response =
cohereAIService.generate(
CohereGenerateRequest.builder().prompt(inputData.getText()).build());
if (response.getGenerations().isEmpty()) {
throw new ProcessorExecutionException("Receive empty generations from cohere.ai.");
}
return SingleText.of(response.getGenerations().get(0).getText());
}
@Override
public ListenableFuture<SingleText> runAsync(SingleText inputData) {
ListenableFuture<CohereGenerateResponse> responseFuture =
cohereAIService.generateAsync(
CohereGenerateRequest.builder().prompt(inputData.getText()).build());
return Futures.transform(
responseFuture,
response -> {
if (response.getGenerations().isEmpty()) {
throw new ProcessorExecutionException("Receive empty generations from cohere.ai.");
}
return SingleText.of(response.getGenerations().get(0).getText());
},
directExecutor());
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/cohere
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/cohere/generate/CohereGenerateProcessorConfig.java
|
package ai.knowly.langtorch.processor.cohere.generate;
import ai.knowly.langtorch.processor.ProcessorConfig;
import com.google.auto.value.AutoValue;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.util.List;
import java.util.Optional;
@AutoValue
public abstract class CohereGenerateProcessorConfig implements ProcessorConfig {
private static final String DEFAULT_MODEL = "command";
public static Builder builder() {
return new AutoValue_CohereGenerateProcessorConfig.Builder()
.setModel(DEFAULT_MODEL)
.setEndSequences(ImmutableList.of())
.setStopSequences(ImmutableList.of())
.setLogitBias(ImmutableMap.of());
}
public abstract Builder toBuilder();
// Abstract methods for configuration properties
public abstract String getModel();
public abstract Optional<String> getPresent();
public abstract Optional<Double> getTemperature();
public abstract Optional<Double> getP();
public abstract Optional<Integer> getK();
public abstract Optional<Integer> getMaxTokens();
public abstract Optional<Integer> getNumGenerations();
public abstract Optional<Double> getPresencePenalty();
public abstract Optional<Double> getFrequencyPenalty();
public abstract ImmutableMap<String, Float> getLogitBias();
public abstract List<String> getEndSequences();
public abstract List<String> getStopSequences();
public abstract Optional<CohereGenerateReturnLikelihoods> getReturnLikelihoods();
public abstract Optional<CohereGenerateTruncate> getTruncate();
@AutoValue.Builder
public abstract static class Builder {
public abstract Builder setModel(String newModel);
public abstract Builder setPresent(String newPresent);
public abstract Builder setTemperature(double newTemperature);
public abstract Builder setP(double newP);
public abstract Builder setK(int newK);
public abstract Builder setMaxTokens(int newMaxTokens);
public abstract Builder setNumGenerations(int newNumGenerations);
public abstract Builder setPresencePenalty(double newPresencePenalty);
public abstract Builder setFrequencyPenalty(double newFrequencyPenalty);
public abstract Builder setLogitBias(ImmutableMap<String, Float> newLogitBias);
public abstract Builder setEndSequences(List<String> newEndSequences);
public abstract Builder setStopSequences(List<String> newStopSequences);
public abstract Builder setReturnLikelihoods(
CohereGenerateReturnLikelihoods newReturnLikelihoods);
public abstract Builder setTruncate(CohereGenerateTruncate newTruncate);
public abstract CohereGenerateProcessorConfig build();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/cohere
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/cohere/generate/CohereGenerateRequestConverter.java
|
package ai.knowly.langtorch.processor.cohere.generate;
import ai.knowly.langtorch.llm.cohere.schema.CohereGenerateRequest;
public class CohereGenerateRequestConverter {
private CohereGenerateRequestConverter() {}
public static CohereGenerateRequest convert(
String prompt, CohereGenerateProcessorConfig cohereGenerateProcessorConfig) {
CohereGenerateRequest.Builder cohereGenerateRequestBuilder =
CohereGenerateRequest.builder().prompt(prompt);
// Set optional configuration properties
cohereGenerateProcessorConfig
.getTemperature()
.ifPresent(cohereGenerateRequestBuilder::temperature);
cohereGenerateProcessorConfig.getP().ifPresent(cohereGenerateRequestBuilder::p);
cohereGenerateProcessorConfig.getK().ifPresent(cohereGenerateRequestBuilder::k);
cohereGenerateProcessorConfig.getPresent().ifPresent(cohereGenerateRequestBuilder::preset);
cohereGenerateProcessorConfig
.getNumGenerations()
.ifPresent(cohereGenerateRequestBuilder::numGenerations);
if (!cohereGenerateProcessorConfig.getEndSequences().isEmpty()) {
cohereGenerateRequestBuilder.endSequences(cohereGenerateProcessorConfig.getEndSequences());
}
if (!cohereGenerateProcessorConfig.getStopSequences().isEmpty()) {
cohereGenerateRequestBuilder.stopSequences(cohereGenerateProcessorConfig.getStopSequences());
}
cohereGenerateProcessorConfig.getMaxTokens().ifPresent(cohereGenerateRequestBuilder::maxTokens);
cohereGenerateProcessorConfig
.getPresencePenalty()
.ifPresent(cohereGenerateRequestBuilder::presencePenalty);
cohereGenerateProcessorConfig
.getFrequencyPenalty()
.ifPresent(cohereGenerateRequestBuilder::frequencyPenalty);
cohereGenerateRequestBuilder.logitBias(cohereGenerateProcessorConfig.getLogitBias());
cohereGenerateProcessorConfig
.getReturnLikelihoods()
.ifPresent(
likelihoods -> cohereGenerateRequestBuilder.returnLikelihoods(likelihoods.toString()));
cohereGenerateProcessorConfig
.getTruncate()
.ifPresent(truncate -> cohereGenerateRequestBuilder.truncate(truncate.toString()));
return cohereGenerateRequestBuilder.build();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/cohere
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/cohere/generate/CohereGenerateReturnLikelihoods.java
|
package ai.knowly.langtorch.processor.cohere.generate;
/** Specifies how and if the token likelihoods are returned with the response. */
public enum CohereGenerateReturnLikelihoods {
NONE("NONE"),
ALL("ALL"),
GENERATION("GENERATION");
private final String returnLikelihoods;
CohereGenerateReturnLikelihoods(String returnLikelihoods) {
this.returnLikelihoods = returnLikelihoods;
}
@Override
public String toString() {
return returnLikelihoods;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/cohere
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/cohere/generate/CohereGenerateTruncate.java
|
package ai.knowly.langtorch.processor.cohere.generate;
/** Specifies how the API will handle inputs longer than the maximum token length. */
public enum CohereGenerateTruncate {
NONE("NONE"),
END("END"),
START("START");
private final String truncate;
CohereGenerateTruncate(String truncate) {
this.truncate = truncate;
}
@Override
public String toString() {
return truncate;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/minimax/MiniMaxServiceProvider.java
|
package ai.knowly.langtorch.processor.minimax;
import ai.knowly.langtorch.llm.minimax.MiniMaxService;
import ai.knowly.langtorch.llm.minimax.schema.config.MiniMaxServiceConfig;
import ai.knowly.langtorch.utils.Environment;
import ai.knowly.langtorch.utils.api.key.MiniMaxKeyUtil;
import com.google.common.flogger.FluentLogger;
/**
* @author maxiao
* @date 2023/06/07
*/
public final class MiniMaxServiceProvider {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private MiniMaxServiceProvider() {}
public static MiniMaxService createMiniMaxService(String groupId, String apiKey) {
return new MiniMaxService(
MiniMaxServiceConfig.builder().setGroupId(groupId).setApiKey(apiKey).build());
}
public static MiniMaxService createMiniMaxService() {
return createMiniMaxService(
MiniMaxKeyUtil.getGroupId(logger, Environment.PRODUCTION),
MiniMaxKeyUtil.getKey(logger, Environment.PRODUCTION));
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/minimax
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/minimax/chat/MiniMaxChatProcessor.java
|
package ai.knowly.langtorch.processor.minimax.chat;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import ai.knowly.langtorch.llm.minimax.MiniMaxService;
import ai.knowly.langtorch.llm.minimax.schema.dto.completion.ChatCompletionRequest;
import ai.knowly.langtorch.llm.minimax.schema.dto.completion.ChatCompletionResult;
import ai.knowly.langtorch.processor.Processor;
import ai.knowly.langtorch.schema.chat.*;
import ai.knowly.langtorch.schema.text.MultiChatMessage;
import com.google.common.util.concurrent.FluentFuture;
import com.google.common.util.concurrent.ListenableFuture;
import java.util.List;
import javax.inject.Inject;
/**
* MiniMax chat module implementation. Handles chat input and output for the MiniMax Language Model.
*
* @author maxiao
* @date 2023/06/08
*/
public class MiniMaxChatProcessor implements Processor<MultiChatMessage, ChatMessage> {
// MiniMaxApi instance used for making requests
private final MiniMaxService miniMaxService;
// Configuration for the MiniMax Chat Processor
private MiniMaxChatProcessorConfig miniMaxChatProcessorConfig;
@Inject
public MiniMaxChatProcessor(
MiniMaxService miniMaxService, MiniMaxChatProcessorConfig miniMaxChatProcessorConfig) {
this.miniMaxService = miniMaxService;
this.miniMaxChatProcessorConfig = miniMaxChatProcessorConfig;
}
// Method to run the module with the given input and return the output chat message
@Override
public ChatMessage run(MultiChatMessage inputData) {
ChatCompletionRequest chatCompletionRequest =
MiniMaxChatProcessorRequestConverter.convert(miniMaxChatProcessorConfig, inputData);
ChatCompletionResult chatCompletion =
miniMaxService.createChatCompletion(chatCompletionRequest);
List<ChatCompletionResult.Choices> choices = chatCompletion.getChoices();
ChatCompletionResult.Choices choicesResult = choices.get(0);
return MiniMaxBotMessage.of(choicesResult.getText());
}
@Override
public ListenableFuture<ChatMessage> runAsync(MultiChatMessage inputData) {
ChatCompletionRequest chatCompletionRequest =
MiniMaxChatProcessorRequestConverter.convert(miniMaxChatProcessorConfig, inputData);
ListenableFuture<ChatCompletionResult> chatCompletionAsync =
miniMaxService.createChatCompletionAsync(chatCompletionRequest);
return FluentFuture.from(chatCompletionAsync)
.transform(
chatCompletion -> {
miniMaxService.checkResp(chatCompletion.getBaseResp());
List<ChatCompletionResult.Choices> choices = chatCompletion.getChoices();
ChatCompletionResult.Choices choicesResult = choices.get(0);
return MiniMaxBotMessage.of(choicesResult.getText());
},
directExecutor());
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/minimax
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/minimax/chat/MiniMaxChatProcessorConfig.java
|
package ai.knowly.langtorch.processor.minimax.chat;
import ai.knowly.langtorch.llm.minimax.schema.dto.completion.ChatCompletionRequest;
import ai.knowly.langtorch.processor.ProcessorConfig;
import com.google.auto.value.AutoValue;
import java.util.Optional;
/**
* Configuration class for MiniMaxChatProcessor with various options
*
* @author maxiao
* @date 2023/06/08
*/
@AutoValue
public abstract class MiniMaxChatProcessorConfig implements ProcessorConfig {
private static final String DEFAULT_MODEL = "abab5-chat";
public static MiniMaxChatProcessorConfig getDefaultInstance() {
return builder().build();
}
public static Builder builder() {
return new AutoValue_MiniMaxChatProcessorConfig.Builder().setModel(DEFAULT_MODEL);
}
// Method to create a builder from the current instance
public abstract Builder toBuilder();
// Abstract methods for configuration properties
public abstract String getModel();
public abstract Optional<Boolean> getWithEmotion();
public abstract Optional<Boolean> getStream();
public abstract Optional<Boolean> getUseStandardSse();
public abstract Optional<Integer> getBeamWidth();
public abstract Optional<String> getPrompt();
public abstract Optional<ChatCompletionRequest.RoleMeta> getRoleMeta();
public abstract Optional<Boolean> getContinueLastMessage();
public abstract Optional<Long> getTokensToGenerate();
public abstract Optional<Float> getTemperature();
public abstract Optional<Float> getTopP();
public abstract Optional<Boolean> getSkipInfoMask();
// Builder class for constructing MiniMaxChatProcessorConfig instances
@AutoValue.Builder
public abstract static class Builder {
// Builder methods for setting configuration properties
public abstract Builder setModel(String model);
public abstract Builder setWithEmotion(Boolean withEmotion);
public abstract Builder setStream(Boolean stream);
public abstract Builder setUseStandardSse(Boolean useStandardSse);
public abstract Builder setBeamWidth(Integer beamWidth);
public abstract Builder setPrompt(String prompt);
public abstract Builder setRoleMeta(ChatCompletionRequest.RoleMeta roleMeta);
public abstract Builder setContinueLastMessage(Boolean continueLastMessage);
public abstract Builder setTokensToGenerate(Long tokensToGenerate);
public abstract Builder setTemperature(Float temperature);
public abstract Builder setTopP(Float topP);
public abstract Builder setSkipInfoMask(Boolean skipInfoMask);
// Method to build an instance of MiniMaxChatProcessorConfig
public abstract MiniMaxChatProcessorConfig build();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/minimax
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/minimax/chat/MiniMaxChatProcessorRequestConverter.java
|
package ai.knowly.langtorch.processor.minimax.chat;
import ai.knowly.langtorch.llm.minimax.schema.dto.completion.ChatCompletionRequest;
import ai.knowly.langtorch.schema.text.MultiChatMessage;
import java.util.List;
import java.util.stream.Collectors;
/**
* Converter class to convert MiniMaxChatProcessorConfig and a list of chat messages to a
* ChatCompletionRequest
*
* @author maxiao
* @date 2023/06/08
*/
public class MiniMaxChatProcessorRequestConverter {
private MiniMaxChatProcessorRequestConverter() {}
// Method to convert MiniMaxChatProcessorConfig and a list of chat messages
// to a ChatCompletionRequest
public static ChatCompletionRequest convert(
MiniMaxChatProcessorConfig miniMaxChatProcessorConfig, MultiChatMessage messages) {
List<ChatCompletionRequest.Message> messageList =
messages.getMessages().stream()
.map(
message ->
ChatCompletionRequest.Message.builder()
.setSenderType(message.getRole().toString().toUpperCase())
.setText(message.getContent())
.build())
.collect(Collectors.toList());
ChatCompletionRequest.ChatCompletionRequestBuilder completionRequestBuilder =
ChatCompletionRequest.builder()
.setModel(miniMaxChatProcessorConfig.getModel())
.setMessages(messageList);
// Set optional configuration properties
miniMaxChatProcessorConfig.getWithEmotion().ifPresent(completionRequestBuilder::setWithEmotion);
miniMaxChatProcessorConfig.getStream().ifPresent(completionRequestBuilder::setStream);
miniMaxChatProcessorConfig
.getUseStandardSse()
.ifPresent(completionRequestBuilder::setUseStandardSse);
miniMaxChatProcessorConfig.getBeamWidth().ifPresent(completionRequestBuilder::setBeamWidth);
miniMaxChatProcessorConfig.getPrompt().ifPresent(completionRequestBuilder::setPrompt);
miniMaxChatProcessorConfig.getRoleMeta().ifPresent(completionRequestBuilder::setRoleMeta);
miniMaxChatProcessorConfig
.getContinueLastMessage()
.ifPresent(completionRequestBuilder::setContinueLastMessage);
miniMaxChatProcessorConfig
.getTokensToGenerate()
.ifPresent(completionRequestBuilder::setTokensToGenerate);
miniMaxChatProcessorConfig.getTemperature().ifPresent(completionRequestBuilder::setTemperature);
miniMaxChatProcessorConfig.getTopP().ifPresent(completionRequestBuilder::setTopP);
miniMaxChatProcessorConfig
.getSkipInfoMask()
.ifPresent(completionRequestBuilder::setSkipInfoMask);
// Build and return the ChatCompletionRequest
return completionRequestBuilder.build();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/minimax
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/minimax/embeddings/MiniMaxEmbeddingsProcessor.java
|
package ai.knowly.langtorch.processor.minimax.embeddings;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import ai.knowly.langtorch.llm.minimax.MiniMaxService;
import ai.knowly.langtorch.llm.minimax.schema.dto.embedding.EmbeddingResult;
import ai.knowly.langtorch.processor.EmbeddingProcessor;
import ai.knowly.langtorch.schema.embeddings.*;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
public class MiniMaxEmbeddingsProcessor implements EmbeddingProcessor {
private final MiniMaxService miniMaxService;
private final MiniMaxEmbeddingsProcessorConfig miniMaxEmbeddingsProcessorConfig;
public MiniMaxEmbeddingsProcessor(
MiniMaxService miniMaxService,
MiniMaxEmbeddingsProcessorConfig miniMaxEmbeddingsProcessorConfig) {
this.miniMaxService = miniMaxService;
this.miniMaxEmbeddingsProcessorConfig = miniMaxEmbeddingsProcessorConfig;
}
@Override
public EmbeddingOutput run(EmbeddingInput inputData) {
EmbeddingResult embeddingResult =
miniMaxService.createEmbeddings(
MiniMaxEmbeddingsProcessorRequestConverter.convert(
inputData.getModel(),
inputData.getInput(),
MiniMaxEmbeddingTypeScene.DB.toString()));
return EmbeddingOutput.of(
EmbeddingType.MINI_MAX,
embeddingResult.getVectors().stream()
.map(Embedding::ofFloatVector)
.collect(toImmutableList()));
}
@Override
public ListenableFuture<EmbeddingOutput> runAsync(EmbeddingInput inputData) {
ListenableFuture<EmbeddingResult> embeddingResult =
miniMaxService.createEmbeddingsAsync(
MiniMaxEmbeddingsProcessorRequestConverter.convert(
inputData.getModel(),
inputData.getInput(),
MiniMaxEmbeddingTypeScene.DB.toString()));
return Futures.transform(
embeddingResult,
result -> {
miniMaxService.checkResp(result.getBaseResp());
return EmbeddingOutput.of(
EmbeddingType.MINI_MAX,
result.getVectors().stream()
.map(Embedding::ofFloatVector)
.collect(toImmutableList()));
},
directExecutor());
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/minimax
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/minimax/embeddings/MiniMaxEmbeddingsProcessorConfig.java
|
package ai.knowly.langtorch.processor.minimax.embeddings;
import ai.knowly.langtorch.processor.ProcessorConfig;
import com.google.auto.value.AutoValue;
@AutoValue
public abstract class MiniMaxEmbeddingsProcessorConfig implements ProcessorConfig {
public static MiniMaxEmbeddingsProcessorConfig getDefaultInstance() {
return builder().build();
}
public static MiniMaxEmbeddingsProcessorConfig.Builder builder() {
return new AutoValue_MiniMaxEmbeddingsProcessorConfig.Builder();
}
@AutoValue.Builder
public abstract static class Builder {
public abstract MiniMaxEmbeddingsProcessorConfig build();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/minimax
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/minimax/embeddings/MiniMaxEmbeddingsProcessorRequestConverter.java
|
package ai.knowly.langtorch.processor.minimax.embeddings;
import ai.knowly.langtorch.llm.minimax.schema.dto.embedding.EmbeddingRequest;
import java.util.List;
public final class MiniMaxEmbeddingsProcessorRequestConverter {
private MiniMaxEmbeddingsProcessorRequestConverter() {}
public static EmbeddingRequest convert(String model, List<String> texts, String type) {
EmbeddingRequest embeddingRequest = new EmbeddingRequest();
embeddingRequest.setModel(model);
embeddingRequest.setTexts(texts);
embeddingRequest.setType(type);
return embeddingRequest;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai/OpenAIServiceProvider.java
|
package ai.knowly.langtorch.processor.openai;
import ai.knowly.langtorch.llm.openai.OpenAIService;
import ai.knowly.langtorch.llm.openai.schema.config.OpenAIServiceConfig;
import ai.knowly.langtorch.utils.Environment;
import ai.knowly.langtorch.utils.api.key.OpenAIKeyUtil;
import com.google.common.flogger.FluentLogger;
public final class OpenAIServiceProvider {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private OpenAIServiceProvider() {}
public static OpenAIService createOpenAIService(String apiKey) {
return new OpenAIService(OpenAIServiceConfig.builder().setApiKey(apiKey).build());
}
public static OpenAIService createOpenAIService() {
return createOpenAIService(OpenAIKeyUtil.getKey(logger, Environment.PRODUCTION));
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai/chat/OpenAIChatProcessor.java
|
package ai.knowly.langtorch.processor.openai.chat;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import ai.knowly.langtorch.llm.openai.OpenAIService;
import ai.knowly.langtorch.llm.openai.schema.dto.completion.chat.ChatCompletionRequest;
import ai.knowly.langtorch.llm.openai.schema.dto.completion.chat.ChatCompletionResult;
import ai.knowly.langtorch.processor.Processor;
import ai.knowly.langtorch.schema.chat.AssistantMessage;
import ai.knowly.langtorch.schema.chat.ChatMessage;
import ai.knowly.langtorch.schema.chat.Role;
import ai.knowly.langtorch.schema.chat.SystemMessage;
import ai.knowly.langtorch.schema.chat.UserMessage;
import ai.knowly.langtorch.schema.text.MultiChatMessage;
import com.google.common.util.concurrent.FluentFuture;
import com.google.common.util.concurrent.ListenableFuture;
import javax.inject.Inject;
/**
* OpenAI chat module implementation. Handles chat input and output for the OpenAI Language Model.
*/
public class OpenAIChatProcessor implements Processor<MultiChatMessage, ChatMessage> {
// OpenAiApi instance used for making requests
private final OpenAIService openAIService;
// Configuration for the OpenAI Chat Processor
private final OpenAIChatProcessorConfig openAIChatProcessorConfig;
@Inject
public OpenAIChatProcessor(
OpenAIService openAIService, OpenAIChatProcessorConfig openAIChatProcessorConfig) {
this.openAIService = openAIService;
this.openAIChatProcessorConfig = openAIChatProcessorConfig;
}
// Method to run the module with the given input and return the output chat message
@Override
public ChatMessage run(MultiChatMessage inputData) {
ChatCompletionRequest chatCompletionRequest =
OpenAIChatProcessorRequestConverter.convert(
openAIChatProcessorConfig, inputData.getMessages());
ChatCompletionResult chatCompletion = openAIService.createChatCompletion(chatCompletionRequest);
ChatMessage chatMessage = chatCompletion.getChoices().get(0).getMessage();
if (Role.USER == chatMessage.getRole()) {
return UserMessage.of(chatMessage.getContent());
}
if (Role.SYSTEM == chatMessage.getRole()) {
return SystemMessage.of(chatMessage.getContent());
}
if (Role.ASSISTANT == chatMessage.getRole()) {
return AssistantMessage.of(chatMessage.getContent());
}
throw new UnknownMessageException(
String.format(
"Unknown role %s with message: %s ", chatMessage.getRole(), chatMessage.getContent()));
}
@Override
public ListenableFuture<ChatMessage> runAsync(MultiChatMessage inputData) {
ChatCompletionRequest chatCompletionRequest =
OpenAIChatProcessorRequestConverter.convert(
openAIChatProcessorConfig, inputData.getMessages());
ListenableFuture<ChatCompletionResult> chatCompletionAsync =
openAIService.createChatCompletionAsync(chatCompletionRequest);
return FluentFuture.from(chatCompletionAsync)
.transform(
chatCompletion -> {
ChatMessage chatMessage = chatCompletion.getChoices().get(0).getMessage();
if (chatMessage.getRole() == Role.USER) {
return UserMessage.of(chatMessage.getContent());
}
if (chatMessage.getRole() == Role.SYSTEM) {
return SystemMessage.of(chatMessage.getContent());
}
if (chatMessage.getRole() == Role.ASSISTANT) {
return AssistantMessage.of(chatMessage.getContent());
}
throw new UnknownMessageException(
String.format(
"Unknown role %s with message: %s ",
chatMessage.getRole(), chatMessage.getContent()));
},
directExecutor());
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai/chat/OpenAIChatProcessorConfig.java
|
package ai.knowly.langtorch.processor.openai.chat;
import ai.knowly.langtorch.processor.ProcessorConfig;
import com.google.auto.value.AutoValue;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
// Configuration class for OpenAIChatProcessor with various options
@AutoValue
public abstract class OpenAIChatProcessorConfig implements ProcessorConfig {
private static final String DEFAULT_MODEL = "gpt-3.5-turbo";
private static final int DEFAULT_MAX_TOKEN = 2048;
public static OpenAIChatProcessorConfig getDefaultInstance() {
return builder().build();
}
public static Builder builder() {
return new AutoValue_OpenAIChatProcessorConfig.Builder()
.setModel(DEFAULT_MODEL)
.setMaxTokens(DEFAULT_MAX_TOKEN)
.setStop(new ArrayList<>())
.setLogitBias(new HashMap<>());
}
// Method to create a builder from the current instance
public abstract Builder toBuilder();
// Abstract methods for configuration properties
public abstract String getModel();
public abstract Optional<Double> getTemperature();
public abstract Optional<Double> getTopP();
public abstract Optional<Integer> getN();
public abstract Optional<Boolean> getStream();
public abstract ImmutableList<String> getStop();
public abstract Optional<Integer> getMaxTokens();
public abstract Optional<Double> getPresencePenalty();
public abstract Optional<Double> getFrequencyPenalty();
public abstract ImmutableMap<String, Integer> getLogitBias();
public abstract Optional<String> getUser();
// Builder class for constructing OpenAIChatProcessorConfig instances
@AutoValue.Builder
public abstract static class Builder {
// Builder methods for setting configuration properties
public abstract Builder setModel(String model);
public abstract Builder setTemperature(Double temperature);
public abstract Builder setTopP(Double topP);
public abstract Builder setN(Integer n);
public abstract Builder setStream(Boolean stream);
public abstract Builder setStop(List<String> stop);
public abstract Builder setMaxTokens(Integer maxTokens);
public abstract Builder setPresencePenalty(Double presencePenalty);
public abstract Builder setFrequencyPenalty(Double frequencyPenalty);
public abstract Builder setLogitBias(Map<String, Integer> logitBias);
public abstract Builder setUser(String user);
// Method to build an instance of OpenAIChatProcessorConfig
public abstract OpenAIChatProcessorConfig build();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai/chat/OpenAIChatProcessorRequestConverter.java
|
package ai.knowly.langtorch.processor.openai.chat;
import ai.knowly.langtorch.llm.openai.schema.dto.completion.chat.ChatCompletionRequest;
import java.util.List;
// Converter class to convert OpenAIChatProcessorConfig and a list of chat messages
// to a ChatCompletionRequest
public final class OpenAIChatProcessorRequestConverter {
private OpenAIChatProcessorRequestConverter() {}
// Method to convert OpenAIChatProcessorConfig and a list of chat messages
// to a ChatCompletionRequest
public static ChatCompletionRequest convert(
OpenAIChatProcessorConfig openAIChatProcessorConfig,
List<ai.knowly.langtorch.schema.chat.ChatMessage> messages) {
ChatCompletionRequest.ChatCompletionRequestBuilder completionRequestBuilder =
ChatCompletionRequest.builder()
.setModel(openAIChatProcessorConfig.getModel())
.setMessages(messages);
// Set optional configuration properties
openAIChatProcessorConfig.getTemperature().ifPresent(completionRequestBuilder::setTemperature);
openAIChatProcessorConfig.getTopP().ifPresent(completionRequestBuilder::setTopP);
openAIChatProcessorConfig.getN().ifPresent(completionRequestBuilder::setN);
openAIChatProcessorConfig.getStream().ifPresent(completionRequestBuilder::setStream);
if (!openAIChatProcessorConfig.getStop().isEmpty()) {
completionRequestBuilder.setStop(openAIChatProcessorConfig.getStop());
}
openAIChatProcessorConfig.getMaxTokens().ifPresent(completionRequestBuilder::setMaxTokens);
openAIChatProcessorConfig
.getPresencePenalty()
.ifPresent(completionRequestBuilder::setPresencePenalty);
openAIChatProcessorConfig
.getFrequencyPenalty()
.ifPresent(completionRequestBuilder::setFrequencyPenalty);
completionRequestBuilder.setLogitBias(openAIChatProcessorConfig.getLogitBias());
openAIChatProcessorConfig.getUser().ifPresent(completionRequestBuilder::setUser);
// Build and return the ChatCompletionRequest
return completionRequestBuilder.build();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai/chat/UnknownMessageException.java
|
package ai.knowly.langtorch.processor.openai.chat;
public class UnknownMessageException extends RuntimeException {
public UnknownMessageException(String message) {
super(message);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai/embedding/OpenAIEmbeddingProcessor.java
|
package ai.knowly.langtorch.processor.openai.embedding;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import ai.knowly.langtorch.llm.openai.OpenAIService;
import ai.knowly.langtorch.llm.openai.schema.dto.embedding.EmbeddingResult;
import ai.knowly.langtorch.processor.EmbeddingProcessor;
import ai.knowly.langtorch.schema.embeddings.Embedding;
import ai.knowly.langtorch.schema.embeddings.EmbeddingInput;
import ai.knowly.langtorch.schema.embeddings.EmbeddingOutput;
import ai.knowly.langtorch.schema.embeddings.EmbeddingType;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import javax.inject.Inject;
/** Embeddings processor for OpenAI. */
public class OpenAIEmbeddingProcessor implements EmbeddingProcessor {
private final OpenAIService openAIService;
private final OpenAIEmbeddingsProcessorConfig openAIEmbeddingsProcessorConfig;
@Inject
public OpenAIEmbeddingProcessor(
OpenAIService openAiApi, OpenAIEmbeddingsProcessorConfig openAIEmbeddingsProcessorConfig) {
this.openAIService = openAiApi;
this.openAIEmbeddingsProcessorConfig = openAIEmbeddingsProcessorConfig;
}
@Override
public EmbeddingOutput run(EmbeddingInput inputData) {
EmbeddingResult embeddingResult =
openAIService.createEmbeddings(
OpenAIEmbeddingsProcessorRequestConverter.convert(
openAIEmbeddingsProcessorConfig, inputData.getModel(), inputData.getInput()));
return EmbeddingOutput.of(
EmbeddingType.OPEN_AI,
embeddingResult.getData().stream()
.map(embedding -> Embedding.of(embedding.getValue()))
.collect(toImmutableList()));
}
@Override
public ListenableFuture<EmbeddingOutput> runAsync(EmbeddingInput inputData) {
ListenableFuture<EmbeddingResult> embeddingResult =
openAIService.createEmbeddingsAsync(
OpenAIEmbeddingsProcessorRequestConverter.convert(
openAIEmbeddingsProcessorConfig, inputData.getModel(), inputData.getInput()));
return Futures.transform(
embeddingResult,
result ->
EmbeddingOutput.of(
EmbeddingType.OPEN_AI,
result.getData().stream()
.map(embedding -> Embedding.of(embedding.getValue()))
.collect(toImmutableList())),
directExecutor());
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai/embedding/OpenAIEmbeddingsProcessorConfig.java
|
package ai.knowly.langtorch.processor.openai.embedding;
import ai.knowly.langtorch.processor.ProcessorConfig;
import com.google.auto.value.AutoValue;
import java.util.Optional;
@AutoValue
public abstract class OpenAIEmbeddingsProcessorConfig implements ProcessorConfig {
public static OpenAIEmbeddingsProcessorConfig getDefaultInstance() {
return builder().build();
}
public static OpenAIEmbeddingsProcessorConfig.Builder builder() {
return new AutoValue_OpenAIEmbeddingsProcessorConfig.Builder();
}
public abstract Optional<String> getUser();
@AutoValue.Builder
public abstract static class Builder {
public abstract OpenAIEmbeddingsProcessorConfig.Builder setUser(String user);
public abstract OpenAIEmbeddingsProcessorConfig build();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai/embedding/OpenAIEmbeddingsProcessorRequestConverter.java
|
package ai.knowly.langtorch.processor.openai.embedding;
import ai.knowly.langtorch.llm.openai.schema.dto.embedding.EmbeddingRequest;
import java.util.List;
public final class OpenAIEmbeddingsProcessorRequestConverter {
private OpenAIEmbeddingsProcessorRequestConverter() {}
public static EmbeddingRequest convert(
OpenAIEmbeddingsProcessorConfig openAIEmbeddingsProcessorConfig,
String model,
List<String> input) {
EmbeddingRequest embeddingRequest = new EmbeddingRequest();
embeddingRequest.setModel(model);
embeddingRequest.setInput(input);
openAIEmbeddingsProcessorConfig.getUser().ifPresent(embeddingRequest::setUser);
return embeddingRequest;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai/image/OpenAIImageProcessor.java
|
package ai.knowly.langtorch.processor.openai.image;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import ai.knowly.langtorch.llm.openai.OpenAIService;
import ai.knowly.langtorch.llm.openai.schema.dto.image.CreateImageRequest;
import ai.knowly.langtorch.llm.openai.schema.dto.image.ImageResult;
import ai.knowly.langtorch.processor.Processor;
import ai.knowly.langtorch.schema.image.Image;
import ai.knowly.langtorch.schema.image.Images;
import ai.knowly.langtorch.schema.text.SingleText;
import com.google.common.util.concurrent.FluentFuture;
import com.google.common.util.concurrent.ListenableFuture;
import javax.inject.Inject;
public class OpenAIImageProcessor implements Processor<SingleText, Images> {
private final OpenAIService openAIService;
private final OpenAIImageProcessorConfig openAIImageProcessorConfig;
@Inject
public OpenAIImageProcessor(
OpenAIService openAIService, OpenAIImageProcessorConfig openAIImageProcessorConfig) {
this.openAIService = openAIService;
this.openAIImageProcessorConfig = openAIImageProcessorConfig;
}
// Method to run the module with the given input and return the output text
@Override
public Images run(SingleText inputData) {
CreateImageRequest createImageRequest =
OpenAIImageProcessorRequestConverter.convert(
openAIImageProcessorConfig, inputData.getText());
ImageResult result = openAIService.createImage(createImageRequest);
return Images.of(
result.getCreated(),
result.getData().stream()
.map(image -> Image.of(image.getUrl()))
.collect(toImmutableList()));
}
@Override
public ListenableFuture<Images> runAsync(SingleText inputData) {
CreateImageRequest createImageRequest =
OpenAIImageProcessorRequestConverter.convert(
openAIImageProcessorConfig, inputData.getText());
return FluentFuture.from(openAIService.createImageAsync(createImageRequest))
.transform(
result ->
Images.of(
result.getCreated(),
result.getData().stream()
.map(image -> Image.of(image.getUrl()))
.collect(toImmutableList())),
directExecutor());
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai/image/OpenAIImageProcessorConfig.java
|
package ai.knowly.langtorch.processor.openai.image;
import ai.knowly.langtorch.processor.ProcessorConfig;
import com.google.auto.value.AutoValue;
import java.util.Optional;
@AutoValue
public abstract class OpenAIImageProcessorConfig implements ProcessorConfig {
public static OpenAIImageProcessorConfig getDefaultInstance() {
return builder().build();
}
public static OpenAIImageProcessorConfig.Builder builder() {
return new AutoValue_OpenAIImageProcessorConfig.Builder();
}
// Method to create a builder from the current instance
abstract OpenAIImageProcessorConfig.Builder toBuilder();
public abstract Optional<Integer> getN();
// The size of the generated images. Must be one of "256x256", "512x512", or "1024x1024".
// Defaults to "1024x1024"
public abstract Optional<String> getSize();
public abstract Optional<String> getUser();
@AutoValue.Builder
public abstract static class Builder {
public abstract OpenAIImageProcessorConfig.Builder setSize(String size);
public abstract OpenAIImageProcessorConfig.Builder setN(Integer n);
public abstract OpenAIImageProcessorConfig.Builder setUser(String user);
public abstract OpenAIImageProcessorConfig build();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai/image/OpenAIImageProcessorRequestConverter.java
|
package ai.knowly.langtorch.processor.openai.image;
import ai.knowly.langtorch.llm.openai.schema.dto.image.CreateImageRequest;
public final class OpenAIImageProcessorRequestConverter {
private OpenAIImageProcessorRequestConverter() {}
public static CreateImageRequest convert(
OpenAIImageProcessorConfig openAIImageProcessorConfig, String prompt) {
CreateImageRequest createImageRequest = new CreateImageRequest();
// Set required configuration properties
createImageRequest.setPrompt(prompt);
// Set optional configuration properties
openAIImageProcessorConfig.getN().ifPresent(createImageRequest::setN);
openAIImageProcessorConfig.getSize().ifPresent(createImageRequest::setSize);
openAIImageProcessorConfig.getUser().ifPresent(createImageRequest::setUser);
return createImageRequest;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai/text/OpenAITextProcessor.java
|
package ai.knowly.langtorch.processor.openai.text;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import ai.knowly.langtorch.llm.openai.OpenAIService;
import ai.knowly.langtorch.llm.openai.schema.dto.completion.CompletionRequest;
import ai.knowly.langtorch.llm.openai.schema.dto.completion.CompletionResult;
import ai.knowly.langtorch.processor.Processor;
import ai.knowly.langtorch.schema.text.SingleText;
import com.google.common.util.concurrent.FluentFuture;
import com.google.common.util.concurrent.ListenableFuture;
import javax.inject.Inject;
/**
* OpenAI text module implementation. Handles single text input and output for the OpenAI Language
* Model.
*/
public class OpenAITextProcessor implements Processor<SingleText, SingleText> {
private final OpenAIService openAIService;
// Configuration for the OpenAI Text Processor
private final OpenAITextProcessorConfig openAITextProcessorConfig;
@Inject
public OpenAITextProcessor(
OpenAIService openAIService, OpenAITextProcessorConfig openAITextProcessorConfig) {
this.openAIService = openAIService;
this.openAITextProcessorConfig = openAITextProcessorConfig;
}
@Override
public SingleText run(SingleText inputData) {
CompletionRequest completionRequest =
OpenAITextProcessorRequestConverter.convert(openAITextProcessorConfig, inputData.getText());
CompletionResult completion = openAIService.createCompletion(completionRequest);
return SingleText.of(completion.getChoices().get(0).getText());
}
@Override
public ListenableFuture<SingleText> runAsync(SingleText inputData) {
CompletionRequest completionRequest =
OpenAITextProcessorRequestConverter.convert(openAITextProcessorConfig, inputData.getText());
return FluentFuture.from(openAIService.createCompletionAsync(completionRequest))
.transform(
(CompletionResult completion) ->
SingleText.of(completion.getChoices().get(0).getText()),
directExecutor());
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai/text/OpenAITextProcessorConfig.java
|
package ai.knowly.langtorch.processor.openai.text;
import ai.knowly.langtorch.processor.ProcessorConfig;
import com.google.auto.value.AutoValue;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
// Configuration class for OpenAITextProcessor with various options
@AutoValue
public abstract class OpenAITextProcessorConfig implements ProcessorConfig {
@VisibleForTesting static final String DEFAULT_MODEL = "text-davinci-003";
private static final int DEFAULT_MAX_TOKENS = 2048;
public static OpenAITextProcessorConfig getDefaultInstance() {
return builder().build();
}
public static Builder builder() {
return new AutoValue_OpenAITextProcessorConfig.Builder()
.setModel(DEFAULT_MODEL)
.setMaxTokens(DEFAULT_MAX_TOKENS)
.setLogitBias(new HashMap<>())
.setStop(new ArrayList<>());
}
// Method to create a builder from the current instance
abstract Builder toBuilder();
// Abstract methods for configuration properties
public abstract String getModel();
public abstract Optional<String> getSuffix();
public abstract Optional<Integer> getMaxTokens();
public abstract Optional<Double> getTemperature();
public abstract Optional<Double> getTopP();
public abstract Optional<Integer> getN();
public abstract Optional<Boolean> getStream();
public abstract Optional<Integer> getLogprobs();
public abstract Optional<Boolean> getEcho();
public abstract ImmutableList<String> getStop();
public abstract Optional<Double> getPresencePenalty();
public abstract Optional<Double> getFrequencyPenalty();
public abstract Optional<Integer> getBestOf();
public abstract ImmutableMap<String, Integer> getLogitBias();
public abstract Optional<String> getUser();
// Builder class for constructing OpenAITextProcessorConfig instances
@AutoValue.Builder
public abstract static class Builder {
public abstract Builder setModel(String model);
public abstract Builder setSuffix(String suffix);
public abstract Builder setMaxTokens(Integer maxTokens);
public abstract Builder setTemperature(Double temperature);
public abstract Builder setTopP(Double topP);
public abstract Builder setN(Integer n);
public abstract Builder setStream(Boolean stream);
public abstract Builder setLogprobs(Integer logprobs);
public abstract Builder setEcho(Boolean echo);
public abstract Builder setStop(List<String> stop);
public abstract Builder setPresencePenalty(Double presencePenalty);
public abstract Builder setFrequencyPenalty(Double frequencyPenalty);
public abstract Builder setBestOf(Integer bestOf);
public abstract Builder setLogitBias(Map<String, Integer> logitBias);
public abstract Builder setUser(String user);
// Method to build an instance of OpenAITextProcessorConfig
public abstract OpenAITextProcessorConfig build();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/processor/openai/text/OpenAITextProcessorRequestConverter.java
|
package ai.knowly.langtorch.processor.openai.text;
import ai.knowly.langtorch.llm.openai.schema.dto.completion.CompletionRequest;
// Converter class to convert OpenAITextProcessorConfig and a prompt string
// to a CompletionRequest
public final class OpenAITextProcessorRequestConverter {
private OpenAITextProcessorRequestConverter() {}
// Method to convert OpenAITextProcessorConfig and a prompt string
// to a CompletionRequest
public static CompletionRequest convert(
OpenAITextProcessorConfig openAITextProcessorConfig, String prompt) {
CompletionRequest completionRequest = new CompletionRequest();
// Set required configuration properties
completionRequest.setModel(openAITextProcessorConfig.getModel());
completionRequest.setPrompt(prompt);
// Set optional configuration properties
openAITextProcessorConfig.getSuffix().ifPresent(completionRequest::setSuffix);
openAITextProcessorConfig.getMaxTokens().ifPresent(completionRequest::setMaxTokens);
openAITextProcessorConfig.getTemperature().ifPresent(completionRequest::setTemperature);
openAITextProcessorConfig.getTopP().ifPresent(completionRequest::setTopP);
openAITextProcessorConfig.getN().ifPresent(completionRequest::setN);
openAITextProcessorConfig.getStream().ifPresent(completionRequest::setStream);
openAITextProcessorConfig.getLogprobs().ifPresent(completionRequest::setLogprobs);
openAITextProcessorConfig.getEcho().ifPresent(completionRequest::setEcho);
if (!openAITextProcessorConfig.getStop().isEmpty()) {
completionRequest.setStop(openAITextProcessorConfig.getStop());
}
openAITextProcessorConfig.getPresencePenalty().ifPresent(completionRequest::setPresencePenalty);
openAITextProcessorConfig
.getFrequencyPenalty()
.ifPresent(completionRequest::setFrequencyPenalty);
openAITextProcessorConfig.getBestOf().ifPresent(completionRequest::setBestOf);
completionRequest.setLogitBias(openAITextProcessorConfig.getLogitBias());
openAITextProcessorConfig.getUser().ifPresent(completionRequest::setUser);
return completionRequest;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt/annotation/Prompt.java
|
package ai.knowly.langtorch.prompt.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Repeatable;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* The Prompt annotation is used to define a prompt template with variables. It contains a template
* string, an optional list of variable names, and an optional name for the prompt.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@Repeatable(Prompts.class)
public @interface Prompt {
String template();
String[] variables() default {};
// The name of the prompt. This is only required when there are multiple Prompt annotations on a
// single class.
String name() default "";
// The examples for the prompt. This is used for few-shot prompting.
String[] examples() default {};
// The header for the examples. Optional.
String exampleHeader() default "";
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt/annotation/PromptProcessor.java
|
package ai.knowly.langtorch.prompt.annotation;
import ai.knowly.langtorch.prompt.template.PromptTemplate;
import java.lang.reflect.Field;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
/**
* The PromptProcessor is responsible for processing Prompt and Prompts annotations on a class. It
* can create a PromptTemplate based on the annotations and the fields of the annotated class.
*/
public class PromptProcessor {
private PromptProcessor() {}
/**
* Create a PromptTemplate using the single Prompt annotation on the class. This method should be
* used when there is only one Prompt annotation on the class.
*
* @param clazz The annotated class.
* @param instance An instance of the annotated class.
* @return A PromptTemplate based on the annotation and the fields of the class.
*/
public static PromptTemplate createPromptTemplate(Class<?> clazz, Object instance) {
validateAnnotatedClass(clazz);
if (clazz.isAnnotationPresent(Prompts.class)
|| clazz.getAnnotationsByType(Prompt.class).length > 1) {
throw new IllegalArgumentException(
"Ambiguous prompt annotations. Please specify a prompt name.");
}
Prompt promptAnnotation = clazz.getAnnotation(Prompt.class);
return createPromptTemplateFromClassAndInstance(clazz, instance, promptAnnotation);
}
/**
* Create a PromptTemplate using a specific Prompt annotation on the class. This method should be
* used when there are multiple Prompt annotations on the class.
*
* @param clazz The annotated class.
* @param instance An instance of the annotated class.
* @param promptName The name of the Prompt annotation to use.
* @return A PromptTemplate based on the specified annotation and the fields of the class.
*/
public static PromptTemplate createPromptTemplate(
Class<?> clazz, Object instance, String promptName) {
validateAnnotatedClass(clazz);
Prompt[] prompts = getPrompts(clazz);
Prompt promptAnnotation = findPromptByName(promptName, prompts);
return createPromptTemplateFromClassAndInstance(clazz, instance, promptAnnotation);
}
private static PromptTemplate createPromptTemplateFromClassAndInstance(
Class<?> clazz, Object instance, Prompt promptAnnotation) {
String template = promptAnnotation.template();
String[] variableNames = promptAnnotation.variables();
String[] examples = promptAnnotation.examples();
String exampleHeader = promptAnnotation.exampleHeader();
Map<String, String> variableValues = extractVariableValues(clazz, instance, variableNames);
PromptTemplate.Builder builder =
PromptTemplate.builder().setTemplate(template).addAllVariableValuePairs(variableValues);
if (examples.length > 0) {
builder.setExamples(Arrays.asList(examples));
if (!exampleHeader.isEmpty()) {
builder.setExampleHeader(exampleHeader);
}
}
return builder.build();
}
/**
* Validates that the class has either a Prompt or Prompts annotation.
*
* @param clazz The class to validate.
*/
private static void validateAnnotatedClass(Class<?> clazz) {
if (!clazz.isAnnotationPresent(Prompt.class) && !clazz.isAnnotationPresent(Prompts.class)) {
throw new IllegalArgumentException("Class should be annotated with @Prompt or @Prompts");
}
}
/**
* Retrieves an array of Prompt annotations from the class. If the class has a Prompts annotation,
* it returns the array of Prompt annotations from it. If the class has a single Prompt
* annotation, it returns an array containing that annotation.
*
* @param clazz The class to get the Prompt annotations from.
* @return An array of Prompt annotations.
*/
private static Prompt[] getPrompts(Class<?> clazz) {
if (clazz.isAnnotationPresent(Prompts.class)) {
return clazz.getAnnotation(Prompts.class).value();
} else {
return new Prompt[] {clazz.getAnnotation(Prompt.class)};
}
}
/**
* Finds a Prompt annotation with the specified name in the array of Prompt annotations.
*
* @param promptName The name of the Prompt annotation to find.
* @param prompts The array of Prompt annotations to search in.
* @return The found Prompt annotation.
* @throws IllegalArgumentException if no Prompt annotation with the specified name is found.
*/
private static Prompt findPromptByName(String promptName, Prompt[] prompts) {
for (Prompt prompt : prompts) {
if (prompt.name().equals(promptName)) {
return prompt;
}
}
throw new IllegalArgumentException("No prompt found with the specified name.");
}
/**
* Extracts variable values from the fields of the class based on the variable names provided.
*
* @param clazz The class containing the fields.
* @param instance An instance of the class.
* @param variableNames An array of variable names to extract values for.
* @return A map containing variable names and their corresponding values.
* @throws IllegalArgumentException if a field with the specified name is not found or is
* inaccessible.
*/
private static Map<String, String> extractVariableValues(
Class<?> clazz, Object instance, String[] variableNames) {
Map<String, String> variableValues = new HashMap<>();
for (String variableName : variableNames) {
try {
Field field = clazz.getDeclaredField(variableName);
String fieldValue = (String) field.get(instance);
variableValues.put(variableName, fieldValue);
} catch (NoSuchFieldException | IllegalAccessException e) {
throw new IllegalArgumentException("Unable to extract variable value", e);
}
}
return variableValues;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt/annotation/Prompts.java
|
package ai.knowly.langtorch.prompt.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/** The Prompts annotation is a container for multiple Prompt annotations. */
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface Prompts {
Prompt[] value();
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt/manager/FileLoadingException.java
|
package ai.knowly.langtorch.prompt.manager;
import java.io.IOException;
public class FileLoadingException extends RuntimeException {
public FileLoadingException(IOException e) {
super(e);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt/manager/FileSaveException.java
|
package ai.knowly.langtorch.prompt.manager;
public class FileSaveException extends RuntimeException {
public FileSaveException(Exception e) {
super(e);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt/manager/OptionalTypeAdapter.java
|
package ai.knowly.langtorch.prompt.manager;
import com.google.gson.Gson;
import com.google.gson.TypeAdapter;
import com.google.gson.TypeAdapterFactory;
import com.google.gson.reflect.TypeToken;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonToken;
import com.google.gson.stream.JsonWriter;
import java.io.IOException;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Optional;
public class OptionalTypeAdapter<T> extends TypeAdapter<Optional<T>> {
public static final TypeAdapterFactory FACTORY =
new TypeAdapterFactory() {
@SuppressWarnings("unchecked")
@Override
public <T> TypeAdapter<T> create(Gson gson, TypeToken<T> typeToken) {
Class<T> rawType = (Class<T>) typeToken.getRawType();
if (rawType != Optional.class) {
return null;
}
final Type[] typeArgs =
((ParameterizedType) typeToken.getType()).getActualTypeArguments();
TypeAdapter<?> adapter = gson.getAdapter(TypeToken.get(typeArgs[0]));
return (TypeAdapter<T>) new OptionalTypeAdapter<>(adapter);
}
};
private final TypeAdapter<T> delegate;
public OptionalTypeAdapter(TypeAdapter<T> delegate) {
this.delegate = delegate;
}
@Override
public Optional<T> read(JsonReader in) throws IOException {
if (in.peek() == JsonToken.NULL) {
in.nextNull();
return Optional.empty();
}
T value = delegate.read(in);
return Optional.ofNullable(value);
}
@Override
public void write(JsonWriter out, Optional<T> value) throws IOException {
if (value.isPresent()) {
delegate.write(out, value.get());
} else {
out.nullValue();
}
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt/manager/PromptManager.java
|
package ai.knowly.langtorch.prompt.manager;
import ai.knowly.langtorch.prompt.template.PromptTemplate;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.io.IOUtils;
/** A class to manage prompt templates with multiple versions. */
public final class PromptManager {
private static final Gson gson =
new GsonBuilder()
.registerTypeAdapter(PromptTemplate.class, new PromptTemplateTypeAdapter())
.registerTypeAdapterFactory(OptionalTypeAdapter.FACTORY)
.create();
private static final String DEFAULT_FILE_NAME = "prompt-manager.json";
private final Map<Long, PromptTemplate> promptTemplateVersions;
/**
* Private constructor for PromptManager.
*
* @param promptTemplateVersions A map containing prompt templates and their version numbers.
*/
private PromptManager(Map<Long, PromptTemplate> promptTemplateVersions) {
this.promptTemplateVersions = promptTemplateVersions;
}
/**
* Creates a new instance of PromptManager.
*
* @return A new instance of PromptManager.
*/
public static PromptManager create() {
return new PromptManager(new HashMap<>());
}
/**
* Creates an instance of PromptManager from a JSON string.
*
* @param json The JSON string.
* @return The instance of PromptManager.
*/
private static PromptManager fromJson(String json) {
PromptManagerConfig config = gson.fromJson(json, PromptManagerConfig.class);
return new PromptManager(config.getPromptTemplates());
}
/**
* Loads a PromptManager from a file with the default file name.
*
* @param folderName The folder name.
* @return An instance of PromptManager.
*/
public static PromptManager fromFile(String folderName) {
return fromFile(folderName, DEFAULT_FILE_NAME);
}
/**
* Loads a PromptManager from a file with a specified file name.
*
* @param folderName The folder name.
* @param fileName The file name.
* @return An instance of PromptManager.
*/
public static PromptManager fromFile(String folderName, String fileName) {
String path = String.format("%s/%s", folderName, fileName);
try (FileInputStream inputStream = new FileInputStream(path)) {
String json = IOUtils.toString(inputStream, Charset.defaultCharset());
return fromJson(json);
} catch (IOException e) {
throw new FileLoadingException(e);
}
}
/**
* Saves the PromptManager to a file with the default file name.
*
* @param folderName The folder name.
*/
public void toFile(String folderName) {
toFile(folderName, DEFAULT_FILE_NAME);
}
/**
* Saves the PromptManager to a file with a specified file name.
*
* @param folderName The folder name.
* @param fileName The file name.
*/
public void toFile(String folderName, String fileName) {
String toWriteFileName = fileName.contains(".json") ? fileName : (fileName + ".json");
try (FileWriter fileWriter = new FileWriter(folderName + "/" + toWriteFileName)) {
fileWriter.write(toJson());
} catch (IOException e) {
throw new FileSaveException(e);
}
}
/**
* Converts the PromptManager to a JSON string.
*
* @return The JSON string.
*/
private String toJson() {
return gson.toJson(PromptManagerConfig.create(promptTemplateVersions));
}
/**
* Returns the prompt template for a specific version.
*
* @param version The version number.
* @return The PromptTemplate.
*/
public PromptTemplate getPromptTemplate(long version) {
return promptTemplateVersions.get(version);
}
/**
* Checks if the PromptManager contains a specific version.
*
* @param version The version number.
* @return A boolean indicating whether the version exists.
*/
public boolean containsVersion(long version) {
return promptTemplateVersions.containsKey(version);
}
/**
* Adds a new prompt template with the specified version.
*
* @param version The version number.
* @param promptTemplate The PromptTemplate to add.
* @return The updated PromptManager instance.
*/
public PromptManager addPromptTemplate(long version, PromptTemplate promptTemplate) {
promptTemplateVersions.put(version, promptTemplate);
return this;
}
/**
* Removes a prompt template with the specified version.
*
* @param version The version number.
*/
public void removePromptTemplate(long version) {
promptTemplateVersions.remove(version);
}
/**
* Updates a prompt template with the specified version.
*
* @param version The version number.
* @param promptTemplate The updated PromptTemplate.
*/
public void updatePromptTemplate(long version, PromptTemplate promptTemplate) {
promptTemplateVersions.put(version, promptTemplate);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt/manager/PromptManagerConfig.java
|
package ai.knowly.langtorch.prompt.manager;
import ai.knowly.langtorch.prompt.template.PromptTemplate;
import com.google.gson.annotations.SerializedName;
import java.util.Map;
public class PromptManagerConfig {
@SerializedName("promptTemplates")
private Map<Long, PromptTemplate> promptTemplates;
private PromptManagerConfig(Map<Long, PromptTemplate> promptTemplates) {
this.promptTemplates = promptTemplates;
}
public static PromptManagerConfig create(Map<Long, PromptTemplate> promptTemplates) {
return new PromptManagerConfig(promptTemplates);
}
public Map<Long, PromptTemplate> getPromptTemplates() {
return promptTemplates;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt/manager/PromptTemplateTypeAdapter.java
|
package ai.knowly.langtorch.prompt.manager;
import ai.knowly.langtorch.prompt.template.PromptTemplate;
import com.google.gson.TypeAdapter;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class PromptTemplateTypeAdapter extends TypeAdapter<PromptTemplate> {
@Override
public void write(JsonWriter out, PromptTemplate promptTemplate) throws IOException {
out.beginObject();
out.name("template").value(promptTemplate.template().orElse(null));
out.name("exampleHeader").value(promptTemplate.exampleHeader().orElse(null));
out.name("examples").beginArray();
for (String example : promptTemplate.examples()) {
out.value(example);
}
out.endArray();
out.name("variables").beginObject();
for (Map.Entry<String, String> entry : promptTemplate.variables().entrySet()) {
out.name(entry.getKey()).value(entry.getValue());
}
out.endObject();
out.endObject();
}
@Override
public PromptTemplate read(JsonReader in) throws IOException {
PromptTemplate.Builder builder = PromptTemplate.builder();
in.beginObject();
while (in.hasNext()) {
String name = in.nextName();
switch (name) {
case "template":
builder.setTemplate(in.nextString());
break;
case "exampleHeader":
builder.setExampleHeader(in.nextString());
break;
case "examples":
in.beginArray();
List<String> examples = new ArrayList<>();
while (in.hasNext()) {
examples.add(in.nextString());
}
in.endArray();
builder.setExamples(examples);
break;
case "variables":
in.beginObject();
Map<String, String> variables = new HashMap<>();
while (in.hasNext()) {
String variableName = in.nextName();
String variableValue = in.nextString();
variables.put(variableName, variableValue);
}
in.endObject();
builder.addAllVariableValuePairs(variables);
break;
default:
in.skipValue();
break;
}
}
in.endObject();
return builder.build();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/prompt/template/PromptTemplate.java
|
package ai.knowly.langtorch.prompt.template;
import ai.knowly.langtorch.schema.io.Input;
import ai.knowly.langtorch.schema.io.Output;
import com.google.auto.value.AutoValue;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* A class representing a prompt template with variables.
*
* <p>The template is a string with variables in the form of {{$var}}. The variables are replaced
* with the values in the variables map.
*
* <p>Note: variables must be one or more word characters (letters, digits, or underscores).
*/
@AutoValue
public abstract class PromptTemplate implements Input, Output {
public static final String VARIABLE_TEMPLATE_PATTERN = "\\{\\{\\$([a-zA-Z0-9_]+)\\}\\}";
private static final String DEFAULT_EXAMPLE_HEADER = "Here's examples:\n";
public static Builder builder() {
return new AutoValue_PromptTemplate.Builder().setExamples(ImmutableList.of());
}
public static ImmutableList<String> extractVariableNames(String template) {
ImmutableList.Builder<String> builder = ImmutableList.builder();
Pattern compiledPattern = Pattern.compile(VARIABLE_TEMPLATE_PATTERN);
Matcher matcher = compiledPattern.matcher(template);
while (matcher.find()) {
builder.add(matcher.group(1));
}
return builder.build();
}
private static Optional<String> formatExamples(
List<String> examples, Optional<String> exampleHeader) {
if (examples.isEmpty()) {
return Optional.empty();
}
StringBuilder builder = new StringBuilder();
if (exampleHeader.isPresent()) {
if (!exampleHeader.get().endsWith("\n")) {
builder.append(exampleHeader.get()).append("\n");
} else {
builder.append(exampleHeader.get());
}
} else {
builder.append(DEFAULT_EXAMPLE_HEADER);
}
for (String example : examples) {
builder.append(example).append("\n");
}
return Optional.of(builder.toString());
}
public abstract Builder toBuilder();
public abstract Optional<String> template();
// Example header is a string that can be used to describe the examples.
public abstract Optional<String> exampleHeader();
// Examples are a list of strings that can be used for few-shot prompting by providing examples of
// the prompt.
public abstract ImmutableList<String> examples();
public abstract ImmutableMap<String, String> variables();
// Public methods
/**
* Validates the template and the variables map. <br>
* 1. Template is not empty. <br>
* 2. Number of variables in the template must match the number of variables in the map. <br>
* 3. All variables in the template must be present in the variables map.
*/
private void validate() {
if (!template().isPresent()) {
throw new IllegalArgumentException("Template is not present.");
}
ImmutableList<String> variableNamesFromTemplate = extractVariableNames(template().get());
ImmutableMap<String, String> variablesInMap = variables();
if (variableNamesFromTemplate.size() != variablesInMap.size()) {
throw new IllegalArgumentException(
"Number of variables in the template must match the number of variables in the map.");
}
variableNamesFromTemplate.forEach(
variableName -> {
if (!variablesInMap.containsKey(variableName)) {
throw new IllegalArgumentException(
String.format("Variable %s is not present in the variables map.", variableName));
}
});
}
/**
* Formats the template by replacing the variables with their values.
*
* @return The formatted template.
*/
public String format() {
validate();
Optional<String> formattedExample = formatExamples(examples(), exampleHeader());
if (variables().isEmpty()) {
if (formattedExample.isPresent()) {
return String.format("%s\n%s", template().get(), formattedExample.get());
}
return template().get();
}
Pattern compiledPattern = Pattern.compile(VARIABLE_TEMPLATE_PATTERN);
Matcher matcher = compiledPattern.matcher(template().get());
StringBuffer outputBuffer = new StringBuffer();
while (matcher.find()) {
String variableName = matcher.group(1);
String replacement = variables().getOrDefault(variableName, "");
matcher.appendReplacement(outputBuffer, Matcher.quoteReplacement(replacement));
}
matcher.appendTail(outputBuffer);
if (formattedExample.isPresent()) {
return String.format("%s\n%s", outputBuffer.toString(), formattedExample.get());
}
return outputBuffer.toString();
}
@AutoValue.Builder
public abstract static class Builder {
public abstract Builder setTemplate(String template);
public abstract Builder setExamples(List<String> examples);
public abstract Builder setExampleHeader(String exampleHeader);
abstract ImmutableMap.Builder<String, String> variablesBuilder();
public Builder addVariableValuePair(String variableName, String value) {
variablesBuilder().put(variableName, value);
return this;
}
public Builder addAllVariableValuePairs(Map<String, String> variables) {
variablesBuilder().putAll(variables);
return this;
}
public abstract PromptTemplate build();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/chat/AssistantMessage.java
|
package ai.knowly.langtorch.schema.chat;
/** A message from the assistant. */
public final class AssistantMessage {
private AssistantMessage() {}
public static ChatMessage of(String content) {
return new ChatMessage(content, Role.ASSISTANT, null, null);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/chat/ChatMessage.java
|
package ai.knowly.langtorch.schema.chat;
import ai.knowly.langtorch.llm.openai.schema.dto.completion.chat.FunctionCall;
import ai.knowly.langtorch.store.memory.MemoryValue;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
public class ChatMessage extends Message implements MemoryValue {
private final Role role;
private String name;
private FunctionCall functionCall;
@JsonCreator
public ChatMessage(
@JsonProperty("content") String content,
@JsonProperty("role") Role role,
@JsonProperty("name") String name,
@JsonProperty("function_call") FunctionCall functionCall) {
super(content);
this.role = role;
this.name = name;
this.functionCall = functionCall;
}
public Role getRole() {
return role;
}
public String getName() {
return name;
}
public FunctionCall getFunctionCall() {
return functionCall;
}
@Override
public String toString() {
return String.format("%s: %s", getRole(), getContent());
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof ChatMessage)) return false;
ChatMessage that = (ChatMessage) o;
if (getRole() != that.getRole()) return false;
return getContent() != null
? getContent().equals(that.getContent())
: that.getContent() == null;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/chat/Message.java
|
package ai.knowly.langtorch.schema.chat;
import ai.knowly.langtorch.schema.io.Input;
import ai.knowly.langtorch.schema.io.Output;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
public class Message implements Input, Output {
private final String content;
@JsonCreator
public Message(@JsonProperty("content") String content) {
this.content = content;
}
public String getContent() {
return content;
}
@Override
public String toString() {
return String.format("Role: UNKNOWN(Base Message), Content: %s", getContent());
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/chat/MiniMaxBotMessage.java
|
package ai.knowly.langtorch.schema.chat;
/**
* @author maxiao
* @date 2023/06/13
*/
public class MiniMaxBotMessage {
private MiniMaxBotMessage() {}
public static ChatMessage of(String content) {
return new ChatMessage(content, Role.MINIMAX_BOT, null, null);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/chat/MiniMaxUserMessage.java
|
package ai.knowly.langtorch.schema.chat;
/**
* @author maxiao
* @date 2023/06/13
*/
public class MiniMaxUserMessage {
private MiniMaxUserMessage() {}
public static ChatMessage of(String content) {
return new ChatMessage(content, Role.MINIMAX_USER, null, null);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/chat/Role.java
|
package ai.knowly.langtorch.schema.chat;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
/** A enum for the role of a message. */
public enum Role {
/** openai role */
SYSTEM("system"),
USER("user"),
ASSISTANT("assistant"),
FUNCTION("function"),
/** minimax role */
MINIMAX_USER("USER"),
MINIMAX_BOT("BOT");
@JsonValue
@JsonSerialize(using = ToStringSerializer.class)
private String value;
Role(String value) {
this.value = value;
}
@JsonCreator
public static Role fromString(String value) {
for (Role role : Role.values()) {
if (role.value.equals(value)) {
return role;
}
}
throw new IllegalArgumentException("Invalid value: " + value);
}
@Override
public String toString() {
return value;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/chat/SystemMessage.java
|
package ai.knowly.langtorch.schema.chat;
/** A message from the system. */
public final class SystemMessage {
private SystemMessage() {}
public static ChatMessage of(String content) {
return new ChatMessage(content, Role.SYSTEM, null, null);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/chat/UserMessage.java
|
package ai.knowly.langtorch.schema.chat;
/** A message from the user. */
public final class UserMessage {
private UserMessage() {}
public static ChatMessage of(String content) {
return new ChatMessage(content, Role.USER, null, null);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/embeddings/Embedding.java
|
package ai.knowly.langtorch.schema.embeddings;
import static java.util.Collections.emptyList;
import ai.knowly.langtorch.schema.io.Output;
import java.util.List;
public class Embedding implements Output {
private final List<Double> vector;
private final List<Float> floatVector;
private Embedding(List<Double> vector, List<Float> floatVector) {
this.vector = vector;
this.floatVector = floatVector;
}
public static Embedding of(List<Double> vector) {
return new Embedding(vector, emptyList());
}
public static Embedding ofFloatVector(List<Float> floatVector) {
return new Embedding(emptyList(), floatVector);
}
public List<Double> getVector() {
return vector;
}
public List<Float> getFloatVector() {
return floatVector;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/embeddings/EmbeddingInput.java
|
package ai.knowly.langtorch.schema.embeddings;
import ai.knowly.langtorch.schema.io.Input;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import lombok.Builder;
import lombok.Data;
import lombok.NonNull;
@Data
@Builder(toBuilder = true, setterPrefix = "set")
public class EmbeddingInput implements Input {
@Builder.Default private final List<String> input = new ArrayList<>();
@NonNull private String model;
private String user;
public Optional<String> getUser() {
return Optional.ofNullable(user);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/embeddings/EmbeddingOutput.java
|
package ai.knowly.langtorch.schema.embeddings;
import ai.knowly.langtorch.schema.io.Output;
import java.util.List;
public class EmbeddingOutput implements Output {
private final EmbeddingType type;
private final List<Embedding> value;
private EmbeddingOutput(EmbeddingType type, List<Embedding> value) {
this.type = type;
this.value = value;
}
public static EmbeddingOutput of(EmbeddingType type, List<Embedding> embeddings) {
return new EmbeddingOutput(type, embeddings);
}
public EmbeddingType getType() {
return type;
}
public List<Embedding> getValue() {
return value;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/embeddings/EmbeddingType.java
|
package ai.knowly.langtorch.schema.embeddings;
public enum EmbeddingType {
OPEN_AI,
MINI_MAX
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/embeddings/MiniMaxEmbeddingTypeScene.java
|
package ai.knowly.langtorch.schema.embeddings;
/**
* @author maxiao
* @date 2023/06/17
*/
public enum MiniMaxEmbeddingTypeScene {
/** Used to generate vectors for queries */
DB("db"),
/** retrieving text */
QUERY("query"),
;
private String value;
MiniMaxEmbeddingTypeScene(String value) {
this.value = value;
}
@Override
public String toString() {
return value;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/image/Image.java
|
package ai.knowly.langtorch.schema.image;
import ai.knowly.langtorch.schema.io.Output;
public class Image implements Output {
private final String url;
private Image(String url) {
this.url = url;
}
public static Image of(String url) {
return new Image(url);
}
public String getUrl() {
return url;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/image/Images.java
|
package ai.knowly.langtorch.schema.image;
import ai.knowly.langtorch.schema.io.Output;
import java.util.List;
public class Images implements Output {
Long created;
List<Image> imageData;
private Images(Long created, List<Image> imageData) {
this.created = created;
this.imageData = imageData;
}
public static Images of(Long created, List<Image> images) {
return new Images(created, images);
}
public Long getCreated() {
return created;
}
public void setCreated(Long created) {
this.created = created;
}
public List<Image> getImageData() {
return imageData;
}
public void setImageData(List<Image> imageData) {
this.imageData = imageData;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/io/DomainDocument.java
|
package ai.knowly.langtorch.schema.io;
import java.util.Optional;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NonNull;
@Data
@Builder(toBuilder = true, setterPrefix = "set")
@AllArgsConstructor(access = lombok.AccessLevel.PRIVATE)
public class DomainDocument implements Input, Output {
@NonNull private String pageContent;
private Metadata metadata;
private String id;
private Optional<Double> similarityScore;
public Optional<Metadata> getMetadata() {
return Optional.ofNullable(metadata);
}
public Optional<String> getId() {
return Optional.ofNullable(id);
}
public void setSimilarityScore(Optional<Double> similarityScore) {
this.similarityScore = similarityScore;
}
public Optional<Double> getSimilarityScore() {
return similarityScore;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/io/Input.java
|
package ai.knowly.langtorch.schema.io;
/** Input data to a model. */
public interface Input {}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/io/Metadata.java
|
package ai.knowly.langtorch.schema.io;
import java.util.HashMap;
import java.util.Map;
import lombok.*;
@Data
@Builder(toBuilder = true, setterPrefix = "set")
@AllArgsConstructor(access = AccessLevel.PRIVATE)
public class Metadata {
private static final Metadata DEFAULT_INSTANCE = Metadata.builder().build();
@Builder.Default private final Map<String, String> value = new HashMap<>();
public static Metadata getDefaultInstance() {
return DEFAULT_INSTANCE;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/io/Output.java
|
package ai.knowly.langtorch.schema.io;
/** Output data from a model. */
public interface Output {}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/text/MiniMaxMultiChatMessage.java
|
package ai.knowly.langtorch.schema.text;
import ai.knowly.langtorch.llm.minimax.schema.dto.completion.ChatCompletionRequest;
import ai.knowly.langtorch.schema.io.Input;
import ai.knowly.langtorch.schema.io.Output;
import ai.knowly.langtorch.store.memory.MemoryValue;
import com.google.common.collect.ImmutableList;
import java.util.List;
import java.util.stream.Collector;
import java.util.stream.Collectors;
/**
* @author maxiao
* @date 2023/06/11
*/
public class MiniMaxMultiChatMessage implements Input, Output, MemoryValue {
private final ImmutableList<ChatCompletionRequest.Message> messages;
private MiniMaxMultiChatMessage(Iterable<ChatCompletionRequest.Message> messages) {
this.messages = ImmutableList.copyOf(messages);
}
public static Collector<ChatCompletionRequest.Message, ?, MiniMaxMultiChatMessage>
toMultiChatMessage() {
return Collectors.collectingAndThen(
Collectors.toList(), list -> new MiniMaxMultiChatMessage(ImmutableList.copyOf(list)));
}
public static MiniMaxMultiChatMessage of(ChatCompletionRequest.Message... messages) {
return new MiniMaxMultiChatMessage(ImmutableList.copyOf(messages));
}
public static MiniMaxMultiChatMessage of(Iterable<ChatCompletionRequest.Message> messages) {
return new MiniMaxMultiChatMessage(ImmutableList.copyOf(messages));
}
public List<ChatCompletionRequest.Message> getMessages() {
return messages;
}
@Override
public String toString() {
return "MiniMaxMultiChatMessage{" + "messages=" + messages + '}';
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/text/MultiChatMessage.java
|
package ai.knowly.langtorch.schema.text;
import ai.knowly.langtorch.schema.chat.ChatMessage;
import ai.knowly.langtorch.schema.io.Input;
import ai.knowly.langtorch.schema.io.Output;
import ai.knowly.langtorch.store.memory.MemoryValue;
import com.google.common.collect.ImmutableList;
import java.util.List;
import java.util.stream.Collector;
import java.util.stream.Collectors;
public class MultiChatMessage implements Input, Output, MemoryValue {
private final ImmutableList<ChatMessage> messages;
private MultiChatMessage(Iterable<ChatMessage> messages) {
this.messages = ImmutableList.copyOf(messages);
}
public static Collector<ChatMessage, ?, MultiChatMessage> toMultiChatMessage() {
return Collectors.collectingAndThen(
Collectors.toList(), list -> new MultiChatMessage(ImmutableList.copyOf(list)));
}
public static MultiChatMessage copyOf(Iterable<ChatMessage> messages) {
return new MultiChatMessage(messages);
}
public static MultiChatMessage of(ChatMessage... messages) {
return new MultiChatMessage(ImmutableList.copyOf(messages));
}
public static MultiChatMessage of(Iterable<ChatMessage> messages) {
return new MultiChatMessage(ImmutableList.copyOf(messages));
}
public List<ChatMessage> getMessages() {
return messages;
}
@Override
public String toString() {
return "MultiChatMessage{" + "messages=" + messages + '}';
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/schema/text/SingleText.java
|
package ai.knowly.langtorch.schema.text;
import ai.knowly.langtorch.schema.io.Input;
import ai.knowly.langtorch.schema.io.Output;
/** A model input/output that is a text string. */
public class SingleText implements Input, Output {
private final String text;
private SingleText(String text) {
this.text = text;
}
public static SingleText of(String text) {
return new SingleText(text);
}
public String getText() {
return text;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/memory/Memory.java
|
package ai.knowly.langtorch.store.memory;
import java.util.List;
/** Interface for a generic memory structure. */
public interface Memory<V extends MemoryValue, C extends MemoryContext> {
/**
* Adds a value to the memory.
*
* @param value the value
*/
void add(V value);
/** Retrieves all values added into the memory. */
List<V> getAll();
/** Removes all values from the memory. */
void clear();
/** Returns the context based on entries in the memory. */
C getMemoryContext();
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/memory/MemoryContext.java
|
package ai.knowly.langtorch.store.memory;
/** Interface for memory context generated by values stored in a {@link Memory}. */
public interface MemoryContext {
String get();
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/memory/MemoryValue.java
|
package ai.knowly.langtorch.store.memory;
/** Interface for the value stored in a {@link Memory}. */
public interface MemoryValue {}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/memory
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/memory/conversation/ConversationMemory.java
|
package ai.knowly.langtorch.store.memory.conversation;
import ai.knowly.langtorch.schema.chat.ChatMessage;
import ai.knowly.langtorch.store.memory.Memory;
import java.util.ArrayList;
import java.util.List;
import lombok.Builder;
import lombok.Data;
/** Implementation of Memory for storing conversation-related key-value pairs. */
@Data
@Builder(toBuilder = true, setterPrefix = "set")
public class ConversationMemory implements Memory<ChatMessage, ConversationMemoryContext> {
@Builder.Default private List<ChatMessage> chatMessages = new ArrayList<>();
public static ConversationMemory getDefaultInstance() {
return ConversationMemory.builder().build();
}
@Override
public void add(ChatMessage value) {
chatMessages.add(value);
}
@Override
public List<ChatMessage> getAll() {
return chatMessages;
}
@Override
public void clear() {
chatMessages.clear();
}
@Override
public ConversationMemoryContext getMemoryContext() {
return ConversationMemoryContext.builder().setChatMessages(chatMessages).build();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/memory
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/memory/conversation/ConversationMemoryContext.java
|
package ai.knowly.langtorch.store.memory.conversation;
import ai.knowly.langtorch.schema.chat.ChatMessage;
import ai.knowly.langtorch.store.memory.MemoryContext;
import java.util.List;
import lombok.Builder;
import lombok.Data;
/** Implementation of MemoryContext for storing chat messages inside one conversation. */
@Data
@Builder(toBuilder = true, setterPrefix = "set")
public class ConversationMemoryContext implements MemoryContext {
private static final String DEFAULT_CONTEXT_HEADER = "Previous conversation:\n";
private static final String DEFAULT_FORMAT_FOR_EACH_MESSAGE = "%s: %s";
private final List<ChatMessage> chatMessages;
@Builder.Default private String contextHeader = DEFAULT_CONTEXT_HEADER;
@Builder.Default private String formatForEachMessage = DEFAULT_FORMAT_FOR_EACH_MESSAGE;
@Override
public String get() {
if (chatMessages.isEmpty()) {
return "";
}
StringBuilder context = new StringBuilder();
context.append(contextHeader).append("\n");
chatMessages.forEach(
chatMessage ->
context
.append(
String.format(
formatForEachMessage, chatMessage.getRole(), chatMessage.getContent()))
.append("\n"));
return context.toString();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/PGVectorStore.java
|
package ai.knowly.langtorch.store.vectordb;
import ai.knowly.langtorch.processor.EmbeddingProcessor;
import ai.knowly.langtorch.schema.embeddings.EmbeddingInput;
import ai.knowly.langtorch.schema.embeddings.EmbeddingOutput;
import ai.knowly.langtorch.schema.io.DomainDocument;
import ai.knowly.langtorch.schema.io.Metadata;
import ai.knowly.langtorch.store.vectordb.integration.VectorStore;
import ai.knowly.langtorch.store.vectordb.integration.pgvector.PGVectorService;
import ai.knowly.langtorch.store.vectordb.integration.pgvector.SqlCommandProvider;
import ai.knowly.langtorch.store.vectordb.integration.pgvector.schema.PGVectorQueryParameters;
import ai.knowly.langtorch.store.vectordb.integration.pgvector.schema.PGVectorStoreSpec;
import ai.knowly.langtorch.store.vectordb.integration.pgvector.schema.PGVectorValues;
import ai.knowly.langtorch.store.vectordb.integration.pgvector.schema.distance.DistanceStrategy;
import ai.knowly.langtorch.store.vectordb.integration.schema.SimilaritySearchQuery;
import com.google.common.flogger.FluentLogger;
import com.google.common.primitives.Floats;
import com.google.inject.Inject;
import com.pgvector.PGvector;
import lombok.NonNull;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;
/** A vector store implementation using PostgreSQL and PGVector for storing and querying vectors. */
public class PGVectorStore implements VectorStore {
private static final int EMBEDDINGS_COLUMN_COUNT = 2;
private static final int EMBEDDINGS_INDEX_ID = 0;
private static final int EMBEDDINGS_INDEX_VECTOR = 1;
private static final int METADATA_COLUMN_COUNT = 4;
private static final int METADATA_INDEX_ID = 0;
private static final int METADATA_INDEX_KEY = 1;
private static final int METADATA_INDEX_VALUE = 2;
private static final int METADATA_INDEX_VECTOR_ID = 3;
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
@NonNull private final EmbeddingProcessor embeddingsProcessor;
private final PGVectorStoreSpec pgVectorStoreSpec;
private final SqlCommandProvider sqlCommandProvider;
@NonNull private final PGVectorService pgVectorService;
private final DistanceStrategy distanceStrategy;
@Inject
public PGVectorStore(
@NonNull EmbeddingProcessor embeddingsProcessor,
PGVectorStoreSpec pgVectorStoreSpec,
@NonNull PGVectorService pgVectorService,
DistanceStrategy distanceStrategy)
throws SQLException {
this.distanceStrategy = distanceStrategy;
this.pgVectorService = pgVectorService;
this.embeddingsProcessor = embeddingsProcessor;
this.pgVectorStoreSpec = pgVectorStoreSpec;
sqlCommandProvider =
new SqlCommandProvider(
pgVectorStoreSpec.getDatabaseName(), pgVectorStoreSpec.isOverwriteExistingTables());
createNecessaryTables();
}
private void createNecessaryTables() throws SQLException {
createEmbeddingsTable();
createMetadataTable();
}
/**
* Adds a list of documents to the PGVector database.
*
* @return true if vectors added successfully, otherwise false
*/
@Override
public boolean addDocuments(List<DomainDocument> documents) {
if (documents.isEmpty()) {
return true;
}
PGVectorQueryParameters pgVectorQueryParameters = getVectorQueryParameters(documents);
List<PGVectorValues> vectorValues = pgVectorQueryParameters.getVectorValues();
PreparedStatement insertEmbeddingsStmt;
PreparedStatement insertMetadataStmt;
int result;
int metadataResult;
try {
insertEmbeddingsStmt =
pgVectorService.prepareStatement(
sqlCommandProvider.getInsertEmbeddingsQuery(
pgVectorQueryParameters.getVectorParameters()));
insertMetadataStmt =
pgVectorService.prepareStatement(
sqlCommandProvider.getInsertMetadataQuery(
pgVectorQueryParameters.getMetadataParameters()));
setQueryParameters(vectorValues, insertEmbeddingsStmt, insertMetadataStmt);
result = insertEmbeddingsStmt.executeUpdate();
metadataResult = insertMetadataStmt.executeUpdate();
} catch (SQLException e) {
logger.atSevere().withCause(e).log("Error with SQL Exception");
return false;
}
return result == vectorValues.size()
&& metadataResult == pgVectorQueryParameters.getMetadataSize();
}
/**
* Performs a similarity search using a vector query and returns a list of pairs containing the
* schema documents and their corresponding similarity scores.
*/
@Override
public List<DomainDocument> similaritySearch(SimilaritySearchQuery similaritySearchQuery) {
float[] queryVectorValuesAsFloats = getFloatVectorValues(similaritySearchQuery.getQuery());
double[] queryVectorValuesAsDoubles = getDoubleVectorValues(queryVectorValuesAsFloats);
List<DomainDocument> documentsWithScores;
Map<String, DomainDocument> documentsWithScoresMap = new LinkedHashMap<>();
try {
PreparedStatement neighborStmt =
pgVectorService.prepareStatement(
sqlCommandProvider.getSelectEmbeddingsQuery(
distanceStrategy.getSyntax(), similaritySearchQuery.getTopK()));
neighborStmt.setObject(1, new PGvector(queryVectorValuesAsFloats));
ResultSet result = neighborStmt.executeQuery();
while (result.next()) {
String vectorId = (String) result.getObject(1);
PGvector pGvector = (PGvector) result.getObject(2);
String key = (String) result.getObject(3);
String value = (String) result.getObject(4);
double[] currentVector = getDoubleVectorValues(pGvector.toArray());
double score =
distanceStrategy.calculateDistance(queryVectorValuesAsDoubles, currentVector);
documentsWithScoresMap.computeIfAbsent(
vectorId,
s -> {
Metadata defaultMetadata = Metadata.builder().build();
return DomainDocument.builder()
.setId(vectorId)
.setPageContent("")
.setSimilarityScore(Optional.of(score))
.setMetadata(defaultMetadata)
.build();
});
DomainDocument documentWithScore = documentsWithScoresMap.get(vectorId);
saveValueToMetadataIfPresent(documentWithScore, key, value);
documentsWithScoresMap.put(
vectorId, getDocumentWithScoreWithPageContent(documentWithScore, key, value));
}
documentsWithScores = new ArrayList<>(documentsWithScoresMap.values());
} catch (SQLException e) {
logger.atSevere().withCause(e).log("Error with SQL Exception");
return new ArrayList<>(documentsWithScoresMap.values());
}
return documentsWithScores;
}
private void createEmbeddingsTable() throws SQLException {
pgVectorService.executeUpdate(
sqlCommandProvider.getCreateEmbeddingsTableQuery(pgVectorStoreSpec.getVectorDimensions()));
}
private void createMetadataTable() throws SQLException {
pgVectorService.executeUpdate(sqlCommandProvider.getCreateMetadataTableQuery());
}
private PGVectorQueryParameters getVectorQueryParameters(List<DomainDocument> documents) {
List<PGVectorValues> vectorValues = new ArrayList<>();
StringBuilder vectorParameters = new StringBuilder();
StringBuilder metadataParameters = new StringBuilder();
int metadataSize = 0;
for (DomainDocument document : documents) {
List<Double> vector = createVector(document);
String id = document.getId().orElse(UUID.randomUUID().toString());
vectorValues.add(buildPGVectorValues(id, vector, document.getMetadata()));
vectorParameters.append(getVectorParameters());
metadataSize += processMetadata(metadataParameters, document.getMetadata());
}
trimStringBuilder(vectorParameters);
trimStringBuilder(metadataParameters);
return buildPGVectorQueryParameters(
vectorValues, vectorParameters.toString(), metadataParameters.toString(), metadataSize);
}
private PGVectorValues buildPGVectorValues(
String id, List<Double> vector, Optional<Metadata> metadata) {
return PGVectorValues.builder()
.setId(id)
.setValues(getFloatVectorValues(vector))
.setMetadata(metadata.orElse(Metadata.builder().build()))
.build();
}
private String getVectorParameters() {
return "(?, ?), "; // document id and vector
}
private int processMetadata(StringBuilder metadataParameters, Optional<Metadata> metadata) {
int metadataSize = 0;
if (!metadata.isPresent()) {
return metadataSize;
}
metadataSize += metadata.get().getValue().size();
for (int i = 0; i < metadata.get().getValue().entrySet().size(); i++) {
metadataParameters.append("(?, ?, ?, ?), "); // id, key, value, and document id
}
return metadataSize;
}
private void trimStringBuilder(StringBuilder stringBuilder) {
int index = stringBuilder.lastIndexOf(", ");
if (index > 0) {
stringBuilder.delete(index, stringBuilder.length());
}
}
private PGVectorQueryParameters buildPGVectorQueryParameters(
List<PGVectorValues> vectorValues,
String vectorParameters,
String metadataParameters,
int metadataSize) {
return PGVectorQueryParameters.builder()
.setVectorValues(vectorValues)
.setVectorParameters(vectorParameters)
.setMetadataParameters(metadataParameters)
.setMetadataSize(metadataSize)
.build();
}
private List<Double> createVector(DomainDocument document) {
EmbeddingOutput embeddingOutput =
embeddingsProcessor.run(
EmbeddingInput.builder()
.setModel(pgVectorStoreSpec.getModel())
.setInput(Collections.singletonList(document.getPageContent()))
.build());
return embeddingOutput.getValue().get(0).getVector();
}
private int setMetadataQueryParameters(
PGVectorValues values, int parameterIndex, PreparedStatement insertStmt) throws SQLException {
for (Map.Entry<String, String> entry : values.getMetadata().getValue().entrySet()) {
for (int j = 0; j < METADATA_COLUMN_COUNT; j++) {
switch (j) {
case METADATA_INDEX_ID:
String id = values.getId() + entry.getKey();
insertStmt.setString(parameterIndex, id);
break;
case METADATA_INDEX_KEY:
insertStmt.setString(parameterIndex, entry.getKey());
break;
case METADATA_INDEX_VALUE:
insertStmt.setString(parameterIndex, entry.getValue());
break;
case METADATA_INDEX_VECTOR_ID:
insertStmt.setString(parameterIndex, values.getId());
break;
default:
logger.atSevere().log("INVALID COLUM INDEX");
}
parameterIndex++;
}
}
return parameterIndex;
}
private int setVectorQueryParameters(
PGVectorValues values, int parameterIndex, PreparedStatement insertStmt) throws SQLException {
for (int i = 0; i < EMBEDDINGS_COLUMN_COUNT; i++) {
if (i == EMBEDDINGS_INDEX_ID) {
insertStmt.setString(parameterIndex, values.getId());
} else if (i == EMBEDDINGS_INDEX_VECTOR) {
insertStmt.setObject(parameterIndex, new PGvector(values.getValues()));
}
parameterIndex++;
}
return parameterIndex;
}
private void setQueryParameters(
List<PGVectorValues> vectorValues,
PreparedStatement insertEmbeddingsStmt,
PreparedStatement insertMetadataStmt)
throws SQLException {
int embeddingParameterIndex = 1;
int metadataParameterIndex = 1;
for (PGVectorValues values : vectorValues) {
embeddingParameterIndex =
setVectorQueryParameters(values, embeddingParameterIndex, insertEmbeddingsStmt);
metadataParameterIndex =
setMetadataQueryParameters(values, metadataParameterIndex, insertMetadataStmt);
}
}
private void saveValueToMetadataIfPresent(DomainDocument document, String key, String value) {
Optional<Metadata> metadata = document.getMetadata();
if (!metadata.isPresent() || key == null) return;
metadata.get().getValue().put(key, value);
}
private DomainDocument getDocumentWithScoreWithPageContent(
DomainDocument documentWithScore, String key, String value) {
if (key == null) return documentWithScore;
Optional<String> textKey = pgVectorStoreSpec.getTextKey();
if (!textKey.isPresent()) return documentWithScore;
boolean isTextKey = key.equals(textKey.get());
if (!isTextKey) return documentWithScore;
return documentWithScore.toBuilder().setPageContent(value).build();
}
private float[] getFloatVectorValues(List<Double> vectorValues) {
return Floats.toArray(vectorValues);
}
private double[] getDoubleVectorValues(float[] vectorValues) {
double[] doubles = new double[vectorValues.length];
for (int i = 0; i < vectorValues.length; i++) {
doubles[i] = vectorValues[i];
}
return doubles;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/EmbeddingProcessorType.java
|
package ai.knowly.langtorch.store.vectordb.integration;
/** The type of embedding processor to use */
public enum EmbeddingProcessorType {
OPENAI,
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/EmbeddingProcessorTypeNotFound.java
|
package ai.knowly.langtorch.store.vectordb.integration;
/** Thrown when the embedding processor type is not found. */
public class EmbeddingProcessorTypeNotFound extends RuntimeException {
public EmbeddingProcessorTypeNotFound(String message) {
super(message);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/VectorStore.java
|
package ai.knowly.langtorch.store.vectordb.integration;
import ai.knowly.langtorch.schema.io.DomainDocument;
import ai.knowly.langtorch.store.vectordb.integration.schema.SimilaritySearchQuery;
import java.util.List;
/** A shared interface for all Vector Store Databases */
public interface VectorStore {
// TODO:: add updateDocuments and deleteDocuments methods
boolean addDocuments(List<DomainDocument> documents);
List<DomainDocument> similaritySearch(SimilaritySearchQuery similaritySearchQuery);
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pgvector/PGVectorSQLException.java
|
package ai.knowly.langtorch.store.vectordb.integration.pgvector;
import java.sql.SQLException;
public class PGVectorSQLException extends RuntimeException {
public PGVectorSQLException(SQLException e) {
super(e);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pgvector/PGVectorService.java
|
package ai.knowly.langtorch.store.vectordb.integration.pgvector;
import com.google.common.flogger.FluentLogger;
import com.pgvector.PGvector;
import javax.inject.Inject;
import java.sql.*;
/**
* The PGVectorService class provides methods for interacting with the PostgreSQL Vector extension.
* It allows executing SQL statements, preparing statements, and querying the database.
*/
public class PGVectorService {
private static final String CREATE_VECTOR_EXTENSION_QUERY =
"CREATE EXTENSION IF NOT EXISTS vector";
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private final Connection connection;
private final Statement defaultStatement;
/**
* Constructs a new PGVectorService instance with the provided database connection.
*
* @param connection the database connection
*/
@Inject
public PGVectorService(Connection connection) {
this.connection = connection;
try {
PGvector.addVectorType(connection);
defaultStatement = connection.createStatement();
defaultStatement.executeUpdate(CREATE_VECTOR_EXTENSION_QUERY);
} catch (SQLException e) {
logger.atSevere().withCause(e).log("Error while initialising PGVectorService");
throw new PGVectorSQLException(e);
}
}
/**
* Executes the given SQL statement and returns the number of affected rows.
*
* @param sql the SQL statement to execute
* @return the number of affected rows or 0 for SQL statements that return nothing
* @throws SQLException if a database access error occurs or the SQL statement is invalid
*/
public int executeUpdate(String sql) throws SQLException {
return defaultStatement.executeUpdate(sql);
}
/**
* Creates a PreparedStatement object for sending parameterized SQL statements to the database.
*
* @param sql the SQL statement to prepare
* @return a new PreparedStatement object
* @throws SQLException if a database access error occurs or the SQL statement is invalid
*/
public PreparedStatement prepareStatement(String sql) throws SQLException {
return connection.prepareStatement(sql);
}
/**
* Executes the given SQL query and returns the ResultSet object generated by the query.
*
* @param sql the SQL query to execute
* @return the ResultSet object generated by the query
* @throws SQLException if a database access error occurs or the SQL statement is invalid
*/
public ResultSet query(String sql) throws SQLException {
return defaultStatement.executeQuery(sql);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pgvector/SqlCommandProvider.java
|
package ai.knowly.langtorch.store.vectordb.integration.pgvector;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.NonNull;
/**
* This class provides SQL commands for creating and querying the embeddings and metadata tables in
* a PostgreSQL database.
*/
@AllArgsConstructor(access = AccessLevel.PUBLIC)
public class SqlCommandProvider {
/** The name of the database that the embeddings and metadata tables will be created in. */
@NonNull private final String databaseName;
/**
* Whether or not to overwrite the existing embeddings and metadata tables if they already exist.
*/
private final boolean overwrite;
/**
* Returns a SQL query that will create the embeddings table.
*
* @param vectorDimensions The number of dimensions in the embeddings.
* @return The SQL query.
*/
public String getCreateEmbeddingsTableQuery(int vectorDimensions) {
if (vectorDimensions <= 0) {
throw new IllegalArgumentException(
"vectorDimensions must be greater than 0, was " + vectorDimensions);
}
String query = "";
if (overwrite) {
query += "DROP TABLE IF EXISTS " + getEmbeddingsTableName() + " CASCADE; ";
}
query += "CREATE TABLE IF NOT EXISTS ";
query +=
getEmbeddingsTableName()
+ " ("
+ "id TEXT PRIMARY KEY, "
+ "embedding vector("
+ vectorDimensions
+ ")"
+ ")";
return query;
}
/**
* Returns a SQL query that will create the metadata table.
*
* @return The SQL query.
*/
public String getCreateMetadataTableQuery() {
String query = "";
if (overwrite) {
query += "DROP TABLE IF EXISTS " + getMetadataTableName() + "; ";
}
query += "CREATE TABLE IF NOT EXISTS ";
query +=
getMetadataTableName()
+ " ("
+ "id TEXT PRIMARY KEY, "
+ // vectorId + key
"key TEXT, "
+ "value TEXT ,"
+ "vector_id TEXT ,"
+ "FOREIGN KEY (vector_id) REFERENCES "
+ getEmbeddingsTableName()
+ "(id)"
+ ")";
return query;
}
/**
* Returns a SQL query that will insert a new row into the embeddings table.
*
* @param parameters The parameters for the insert statement.
* @return The SQL query.
*/
public String getInsertEmbeddingsQuery(String parameters) {
return "INSERT INTO "
+ getEmbeddingsTableName()
+ " "
+ "(id, embedding) "
+ "VALUES "
+ parameters;
}
/**
* Returns a SQL query that will insert a new row into the metadata table.
*
* @param parameters The parameters for the insert statement.
* @return The SQL query.
*/
public String getInsertMetadataQuery(String parameters) {
return "INSERT INTO "
+ getMetadataTableName()
+ " "
+ "(id, key, value, vector_id) "
+ "VALUES "
+ parameters;
}
/**
* Returns a SQL query that will select a subset of the embeddings and metadata rows.
*
* @param distanceStrategy The distance strategy to use when ordering the results.
* @param limit The maximum number of rows to return.
* @return The SQL query.
*/
public String getSelectEmbeddingsQuery(String distanceStrategy, long limit) {
return "SELECT "
+ getEmbeddingsTableName()
+ ".id, embedding, key, value FROM "
+ "("
+ "SELECT "
+ getEmbeddingsTableName()
+ ".id, embedding "
+ "FROM "
+ getEmbeddingsTableName()
+ " "
+ "LIMIT "
+ limit
+ " "
+ ") AS "
+ getEmbeddingsTableName()
+ " "
+ "LEFT JOIN "
+ getMetadataTableName()
+ " ON "
+ getEmbeddingsTableName()
+ ".id = "
+ getMetadataTableName()
+ ".vector_id "
+ "ORDER BY embedding "
+ distanceStrategy
+ " ? ";
}
private String getEmbeddingsTableName() {
return databaseName + "_embeddings";
}
private String getMetadataTableName() {
return getEmbeddingsTableName() + "_metadata";
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pgvector
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pgvector/schema/PGVectorQueryParameters.java
|
package ai.knowly.langtorch.store.vectordb.integration.pgvector.schema;
import lombok.*;
import java.util.List;
/** Represents the query parameters for executing a PGVector query. */
@Data
@AllArgsConstructor(access = AccessLevel.PRIVATE)
@Builder(toBuilder = true, setterPrefix = "set")
public class PGVectorQueryParameters {
@NonNull private final List<PGVectorValues> vectorValues;
@NonNull private final String vectorParameters;
@NonNull private final String metadataParameters;
private final int metadataSize;
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pgvector
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pgvector/schema/PGVectorStoreSpec.java
|
package ai.knowly.langtorch.store.vectordb.integration.pgvector.schema;
import lombok.Builder;
import lombok.Data;
import lombok.NonNull;
import java.util.Optional;
/** Represents the specification for a PGVector store. */
@Data
@Builder(toBuilder = true, setterPrefix = "set")
public class PGVectorStoreSpec {
@Builder.Default private final String model = "text-embedding-ada-002";
@NonNull private final String databaseName;
private final String textKey;
private final int vectorDimensions;
private final boolean overwriteExistingTables;
public Optional<String> getTextKey() {
return Optional.ofNullable(textKey);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pgvector
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pgvector/schema/PGVectorValues.java
|
package ai.knowly.langtorch.store.vectordb.integration.pgvector.schema;
import ai.knowly.langtorch.schema.io.Metadata;
import lombok.*;
/** Represents the values of a PGVector. */
@Data
@AllArgsConstructor(access = AccessLevel.PRIVATE)
@Builder(toBuilder = true, setterPrefix = "set")
public class PGVectorValues {
@NonNull private final String id;
private final float @NonNull [] values;
private final Metadata metadata;
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pgvector/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pgvector/schema/distance/CosineDistanceStrategy.java
|
package ai.knowly.langtorch.store.vectordb.integration.pgvector.schema.distance;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
@AllArgsConstructor(access = AccessLevel.PACKAGE)
public class CosineDistanceStrategy implements DistanceStrategy {
@Override
public String getSyntax() {
return "<=>";
}
@Override
public double calculateDistance(double[] vector1, double[] vector2) {
if (vector1.length != vector2.length) {
throw new IllegalArgumentException("Vector dimensions do not match.");
}
double dotProduct = 0.0;
double normA = 0.0;
double normB = 0.0;
for (int i = 0; i < vector1.length; i++) {
dotProduct += vector1[i] * vector2[i];
normA += Math.pow(vector1[i], 2);
normB += Math.pow(vector2[i], 2);
}
return dotProduct / (Math.sqrt(normA) * Math.sqrt(normB));
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pgvector/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pgvector/schema/distance/DistanceStrategies.java
|
package ai.knowly.langtorch.store.vectordb.integration.pgvector.schema.distance;
import lombok.NonNull;
import org.jetbrains.annotations.NotNull;
/** Utility class to get instances for vector distance calculating strategies. */
public class DistanceStrategies {
// Private constructor to hide the implicit public one
private DistanceStrategies() {
// Empty constructor
}
@NonNull
public static DistanceStrategy euclidean() {
return new EuclideanDistanceStrategy();
}
@NotNull
public static DistanceStrategy innerProduct() {
return new InnerProductDistanceStrategy();
}
@NotNull
public static DistanceStrategy cosine() {
return new CosineDistanceStrategy();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pgvector/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pgvector/schema/distance/DistanceStrategy.java
|
package ai.knowly.langtorch.store.vectordb.integration.pgvector.schema.distance;
public interface DistanceStrategy {
String getSyntax();
/**
* Calculates the distance between two vectors based on the specified distance strategy.
*
* @param vector1 The first vector.
* @param vector2 The second vector.
* @return The calculated distance.
* @throws IllegalArgumentException if the distance strategy is invalid or the vector dimensions
* do not match.
*/
double calculateDistance(double[] vector1, double[] vector2);
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pgvector/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pgvector/schema/distance/EuclideanDistanceStrategy.java
|
package ai.knowly.langtorch.store.vectordb.integration.pgvector.schema.distance;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
@AllArgsConstructor(access = AccessLevel.PACKAGE)
public class EuclideanDistanceStrategy implements DistanceStrategy {
@Override
public String getSyntax() {
return "<->";
}
@Override
public double calculateDistance(double[] vector1, double[] vector2) {
if (vector1.length != vector2.length) {
throw new IllegalArgumentException("Vector dimensions do not match.");
}
double sumOfSquaredDifferences = 0.0;
for (int i = 0; i < vector1.length; i++) {
double difference = vector1[i] - vector2[i];
sumOfSquaredDifferences += difference * difference;
}
return Math.sqrt(sumOfSquaredDifferences);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pgvector/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pgvector/schema/distance/InnerProductDistanceStrategy.java
|
package ai.knowly.langtorch.store.vectordb.integration.pgvector.schema.distance;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
@AllArgsConstructor(access = AccessLevel.PACKAGE)
public class InnerProductDistanceStrategy implements DistanceStrategy {
@Override
public String getSyntax() {
return "<#>";
}
@Override
public double calculateDistance(double[] vector1, double[] vector2) {
if (vector1.length != vector2.length) {
throw new IllegalArgumentException("Vector dimensions do not match.");
}
double innerProduct = 0;
for (int i = 0; i < vector1.length; i++) {
innerProduct += vector1[i] * vector2[i];
}
return innerProduct;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone/PineconeAPI.java
|
package ai.knowly.langtorch.store.vectordb.integration.pinecone;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.delete.DeleteRequest;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.delete.DeleteResponse;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.fetch.FetchResponse;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.query.QueryRequest;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.query.QueryResponse;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.update.UpdateRequest;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.update.UpdateResponse;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.upsert.UpsertRequest;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.upsert.UpsertResponse;
import com.google.common.util.concurrent.ListenableFuture;
import java.util.List;
import retrofit2.http.Body;
import retrofit2.http.GET;
import retrofit2.http.POST;
import retrofit2.http.Query;
public interface PineconeAPI {
@POST("/vectors/upsert")
ListenableFuture<UpsertResponse> upsert(@Body UpsertRequest request);
@POST("/query")
ListenableFuture<QueryResponse> query(@Body QueryRequest request);
@POST("/vectors/delete")
ListenableFuture<DeleteResponse> delete(@Body DeleteRequest request);
@GET("/vectors/fetch")
ListenableFuture<FetchResponse> fetch(
@Query("namespace") String namespace, @Query("ids") List<String> ids);
@POST("/vectors/update")
ListenableFuture<UpdateResponse> update(@Body UpdateRequest request);
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone/PineconeAuthenticationInterceptor.java
|
package ai.knowly.langtorch.store.vectordb.integration.pinecone;
import java.io.IOException;
import java.util.Objects;
import okhttp3.Interceptor;
import okhttp3.Request;
import okhttp3.Response;
/** OkHttp Interceptor that adds an authorization header */
public class PineconeAuthenticationInterceptor implements Interceptor {
private final String apiKey;
PineconeAuthenticationInterceptor(String apiKey) {
Objects.requireNonNull(apiKey, "Pinecone API required");
this.apiKey = apiKey;
}
@Override
public Response intercept(Chain chain) throws IOException {
Request request =
chain
.request()
.newBuilder()
.header("accept", "application/json")
.header("content-type", "application/json")
.header("Api-Key", apiKey)
.build();
return chain.proceed(request);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone/PineconeService.java
|
package ai.knowly.langtorch.store.vectordb.integration.pinecone;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.PineconeApiExecutionException;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.PineconeHttpParseException;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.PineconeInterruptedException;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.PineconeServiceConfig;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.delete.DeleteRequest;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.delete.DeleteResponse;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.fetch.FetchRequest;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.fetch.FetchResponse;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.query.QueryRequest;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.query.QueryResponse;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.update.UpdateRequest;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.update.UpdateResponse;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.upsert.UpsertRequest;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.upsert.UpsertResponse;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategy;
import com.google.common.flogger.FluentLogger;
import com.google.common.util.concurrent.ListenableFuture;
import java.io.IOException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import okhttp3.*;
import okhttp3.OkHttpClient.Builder;
import okhttp3.logging.HttpLoggingInterceptor;
import retrofit2.HttpException;
import retrofit2.Retrofit;
import retrofit2.adapter.guava.GuavaCallAdapterFactory;
import retrofit2.converter.jackson.JacksonConverterFactory;
/** Pinecone llm. */
public class PineconeService {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private final PineconeAPI api;
private PineconeService(final PineconeServiceConfig pineconeServiceConfig) {
ObjectMapper mapper = defaultObjectMapper();
OkHttpClient client = buildClient(pineconeServiceConfig);
Retrofit retrofit = defaultRetrofit(pineconeServiceConfig.endpoint(), client, mapper);
this.api = retrofit.create(PineconeAPI.class);
}
private PineconeService(final PineconeAPI api) {
this.api = api;
}
public static PineconeService create(PineconeAPI api) {
return new PineconeService(api);
}
public static PineconeService create(PineconeServiceConfig pineconeServiceConfig) {
return new PineconeService(pineconeServiceConfig);
}
public static <T> T execute(ListenableFuture<T> apiCall) {
try {
return apiCall.get();
} catch (InterruptedException e) {
// Restore the interrupt status
Thread.currentThread().interrupt();
// Optionally, log or handle the exception here.
logger.atSevere().withCause(e).log("Thread was interrupted during API call.");
throw new PineconeInterruptedException(e);
} catch (ExecutionException e) {
if (e.getCause() instanceof HttpException) {
HttpException httpException = (HttpException) e.getCause();
try {
String errorBody = httpException.response().errorBody().string();
logger.atSevere().log("HTTP Error: %s", errorBody);
throw new PineconeHttpParseException(errorBody);
} catch (IOException ioException) {
logger.atSevere().withCause(ioException).log("Error while reading errorBody");
}
}
throw new PineconeApiExecutionException(e);
}
}
public static ObjectMapper defaultObjectMapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
mapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE);
return mapper;
}
public static OkHttpClient buildClient(PineconeServiceConfig pineconeServiceConfig) {
logger.atInfo().log("Pinecone:" + pineconeServiceConfig.apiKey());
Builder builder =
new Builder()
.addInterceptor(new PineconeAuthenticationInterceptor(pineconeServiceConfig.apiKey()))
.connectionPool(new ConnectionPool(5, 1, TimeUnit.SECONDS))
.readTimeout(pineconeServiceConfig.timeoutDuration().toMillis(), TimeUnit.MILLISECONDS);
if (pineconeServiceConfig.enableLogging()) {
HttpLoggingInterceptor logging = new HttpLoggingInterceptor();
builder.addInterceptor(logging.setLevel(HttpLoggingInterceptor.Level.BODY));
}
return builder.build();
}
public static Retrofit defaultRetrofit(
String endpoint, OkHttpClient client, ObjectMapper mapper) {
return new Retrofit.Builder()
.baseUrl(endpoint.startsWith("https://") ? endpoint : "https://" + endpoint)
.client(client)
.addConverterFactory(JacksonConverterFactory.create(mapper))
.addCallAdapterFactory(GuavaCallAdapterFactory.create())
.build();
}
public UpsertResponse upsert(UpsertRequest request) {
return execute(api.upsert(request));
}
public ListenableFuture<UpsertResponse> upsertAsync(UpsertRequest request) {
return api.upsert(request);
}
public QueryResponse query(QueryRequest request) {
return execute(api.query(request));
}
public ListenableFuture<QueryResponse> queryAsync(QueryRequest request) {
return api.query(request);
}
public DeleteResponse delete(DeleteRequest request) {
return execute(api.delete(request));
}
public ListenableFuture<DeleteResponse> queryAsync(DeleteRequest request) {
return api.delete(request);
}
public FetchResponse fetch(FetchRequest request) {
return execute(api.fetch(request.getNamespace(), request.getIds()));
}
public ListenableFuture<FetchResponse> fetchAsync(FetchRequest request) {
return api.fetch(request.getNamespace(), request.getIds());
}
public UpdateResponse update(UpdateRequest request) {
return execute(api.update(request));
}
public ListenableFuture<UpdateResponse> updateAsync(UpdateRequest request) {
return api.update(request);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone/PineconeVectorStore.java
|
package ai.knowly.langtorch.store.vectordb.integration.pinecone;
import ai.knowly.langtorch.processor.EmbeddingProcessor;
import ai.knowly.langtorch.schema.embeddings.EmbeddingInput;
import ai.knowly.langtorch.schema.embeddings.EmbeddingOutput;
import ai.knowly.langtorch.schema.io.DomainDocument;
import ai.knowly.langtorch.schema.io.Metadata;
import ai.knowly.langtorch.store.vectordb.integration.VectorStore;
import ai.knowly.langtorch.store.vectordb.integration.schema.SimilaritySearchQuery;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.PineconeVectorStoreSpec;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.Vector;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.query.Match;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.query.QueryRequest;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.query.QueryResponse;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.upsert.UpsertRequest;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.upsert.UpsertResponse;
import com.google.common.collect.ImmutableList;
import java.util.*;
import javax.inject.Inject;
/**
* The PineconeVectorStore class is an implementation of the VectorStore interface, which provides
* integration with the Pinecone service for storing and querying vectors.
*/
public class PineconeVectorStore implements VectorStore {
// Constants
private final EmbeddingProcessor embeddingProcessor;
private final PineconeVectorStoreSpec pineconeVectorStoreSpec;
@Inject
public PineconeVectorStore(
EmbeddingProcessor embeddingProcessor, PineconeVectorStoreSpec pineconeVectorStoreSpec) {
this.embeddingProcessor = embeddingProcessor;
this.pineconeVectorStoreSpec = pineconeVectorStoreSpec;
}
/**
* Adds the specified documents to the Pinecone vector store database.
*
* @return true if documents added successfully, otherwise false
*/
@Override
public boolean addDocuments(List<DomainDocument> documents) {
if (documents.isEmpty()) return true;
return addVectors(
documents.stream().map(this::createVector).collect(ImmutableList.toImmutableList()));
}
/**
* Adds a list of vectors to the Pinecone vector store database.
*
* @return true if vectors added successfully, otherwise false
*/
private boolean addVectors(List<Vector> vectors) {
UpsertRequest.UpsertRequestBuilder upsertRequestBuilder =
UpsertRequest.builder().setVectors(vectors);
this.pineconeVectorStoreSpec.getNamespace().ifPresent(upsertRequestBuilder::setNamespace);
UpsertResponse response =
this.pineconeVectorStoreSpec.getPineconeService().upsert(upsertRequestBuilder.build());
return response.getUpsertedCount() == vectors.size();
}
/**
* Creates an instance of Vector from given DomainDocument
*
* @param document the document from which a Vector will be created
* @return an instance of {@link Vector}
*/
private Vector createVector(DomainDocument document) {
EmbeddingOutput embeddingOutput =
embeddingProcessor.run(
EmbeddingInput.builder()
.setModel(pineconeVectorStoreSpec.getModel())
.setInput(Collections.singletonList(document.getPageContent()))
.build());
return Vector.builder()
.setId(document.getId().orElse(UUID.randomUUID().toString()))
.setMetadata(document.getMetadata().orElse(Metadata.getDefaultInstance()).getValue())
.setValues(embeddingOutput.getValue().get(0).getVector())
.build();
}
/**
* Performs a similarity search using a vector query and returns a list of pairs containing the
* schema documents and their corresponding similarity scores.
*/
@Override
public List<DomainDocument> similaritySearch(SimilaritySearchQuery similaritySearchQuery) {
QueryRequest.QueryRequestBuilder requestBuilder =
QueryRequest.builder()
.setIncludeMetadata(true)
.setTopK(similaritySearchQuery.getTopK())
.setVector(similaritySearchQuery.getQuery())
.setFilter(similaritySearchQuery.getFilter());
pineconeVectorStoreSpec.getNamespace().ifPresent(requestBuilder::setNamespace);
QueryResponse response =
pineconeVectorStoreSpec.getPineconeService().query(requestBuilder.build());
List<DomainDocument> result = new ArrayList<>();
// create mapping of PineCone metadata to schema meta data
if (response.getMatches() != null) {
for (Match match : response.getMatches()) {
if (!pineconeVectorStoreSpec.getTextKey().isPresent()) {
continue;
}
Metadata metadata =
match.getMetadata() == null
? Metadata.getDefaultInstance()
: Metadata.builder().setValue(match.getMetadata()).build();
if (match.getScore() != null) {
result.add(
DomainDocument.builder()
.setPageContent(
metadata.getValue().get(this.pineconeVectorStoreSpec.getTextKey().get()))
.setMetadata(metadata)
.setSimilarityScore(Optional.of(match.getScore()))
.build());
}
}
}
return result;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone/schema/PineconeApiExecutionException.java
|
package ai.knowly.langtorch.store.vectordb.integration.pinecone.schema;
import java.util.concurrent.ExecutionException;
public class PineconeApiExecutionException extends RuntimeException {
public PineconeApiExecutionException(ExecutionException e) {
super(e);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone/schema/PineconeHttpParseException.java
|
package ai.knowly.langtorch.store.vectordb.integration.pinecone.schema;
public class PineconeHttpParseException extends RuntimeException {
public PineconeHttpParseException(String msg) {
super(msg);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone/schema/PineconeInterruptedException.java
|
package ai.knowly.langtorch.store.vectordb.integration.pinecone.schema;
public class PineconeInterruptedException extends RuntimeException {
public PineconeInterruptedException(InterruptedException e) {
super(e);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone/schema/PineconeServiceConfig.java
|
package ai.knowly.langtorch.store.vectordb.integration.pinecone.schema;
import com.google.auto.value.AutoValue;
import java.time.Duration;
@AutoValue
public abstract class PineconeServiceConfig {
public static Builder builder() {
return new AutoValue_PineconeServiceConfig.Builder()
.setTimeoutDuration(Duration.ofSeconds(10))
.setEnableLogging(false);
}
public abstract String apiKey();
public abstract String endpoint();
public abstract Duration timeoutDuration();
public abstract boolean enableLogging();
@AutoValue.Builder
public abstract static class Builder {
public abstract Builder setEndpoint(String endpoint);
public abstract Builder setApiKey(String newApiKey);
public abstract Builder setTimeoutDuration(Duration timeoutDuration);
public abstract Builder setEnableLogging(boolean enableLogging);
public abstract PineconeServiceConfig build();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone/schema/PineconeVectorStoreSpec.java
|
package ai.knowly.langtorch.store.vectordb.integration.pinecone.schema;
import ai.knowly.langtorch.store.vectordb.integration.pinecone.PineconeService;
import java.util.Optional;
import lombok.Builder;
import lombok.Data;
import lombok.NonNull;
@Data
@Builder(toBuilder = true, setterPrefix = "set")
public class PineconeVectorStoreSpec {
@NonNull private final PineconeService pineconeService;
private final String namespace;
private final String textKey;
@Builder.Default private final String model = "text-embedding-ada-002";
public Optional<String> getNamespace() {
return Optional.ofNullable(namespace);
}
public Optional<String> getTextKey() {
return Optional.ofNullable(textKey);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone/schema/dto/SparseValues.java
|
package ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder(toBuilder = true, setterPrefix = "set")
@NoArgsConstructor
@AllArgsConstructor
public class SparseValues {
@JsonProperty("indices")
private List<Integer> indices;
@JsonProperty("values")
private List<Double> values;
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone/schema
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone/schema/dto/Vector.java
|
package ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
import java.util.Map;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder(toBuilder = true, setterPrefix = "set")
@NoArgsConstructor
@AllArgsConstructor
public class Vector {
@JsonProperty("id")
private String id;
@JsonProperty("values")
private List<Double> values;
@JsonProperty("sparseValues")
private SparseValues sparseValues;
@JsonProperty("metadata")
private Map<String, String> metadata;
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone/schema/dto
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/store/vectordb/integration/pinecone/schema/dto/delete/DeleteRequest.java
|
package ai.knowly.langtorch.store.vectordb.integration.pinecone.schema.dto.delete;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
import java.util.Map;
import lombok.Builder;
import lombok.Data;
@Data
@Builder(toBuilder = true, setterPrefix = "set")
public class DeleteRequest {
@JsonProperty("ids")
private List<String> ids;
@JsonProperty("deleteAll")
private boolean deleteAll;
@JsonProperty("namespace")
private String namespace;
@JsonProperty("filter")
private Map<String, String> filter;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.