index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/PromptFeedback.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
import java.util.ArrayList;
import java.util.List;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class PromptFeedback {
private BlockedReason blockReason;
private String blockReasonMessage;
private List<SafetyRating> safetyRatings;
PromptFeedback(Builder builder) {
blockReason = builder.blockReason;
blockReasonMessage = builder.blockReasonMessage;
safetyRatings = builder.safetyRatings;
}
public BlockedReason getBlockReason() {
return blockReason;
}
public String getBlockReasonMessage() {
return blockReasonMessage;
}
public List<SafetyRating> getSafetyRatings() {
return safetyRatings;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code PromptFeedback}. */
public static final class Builder {
BlockedReason blockReason;
String blockReasonMessage;
List<SafetyRating> safetyRatings = new ArrayList<>();
public Builder blockReason(BlockedReason blockReason) {
this.blockReason = blockReason;
return this;
}
public Builder blockReasonMessage(String blockReasonMessage) {
this.blockReasonMessage = blockReasonMessage;
return this;
}
public Builder safetyRatings(List<SafetyRating> safetyRatings) {
this.safetyRatings.clear();
this.safetyRatings.addAll(safetyRatings);
return this;
}
public Builder addSafetyRating(SafetyRating safetyRating) {
this.safetyRatings.add(safetyRating);
return this;
}
public Builder addSafetyRating(SafetyRating.Builder safetyRating) {
this.safetyRatings.add(safetyRating.build());
return this;
}
public PromptFeedback build() {
return new PromptFeedback(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/RagChunk.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class RagChunk {
private RagChunkPageSpan pageSpan;
private String text;
RagChunk(Builder builder) {
pageSpan = builder.pageSpan;
text = builder.text;
}
public RagChunkPageSpan getPageSpan() {
return pageSpan;
}
public String getText() {
return text;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code RagChunk}. */
public static final class Builder {
RagChunkPageSpan pageSpan;
String text;
public Builder pageSpan(RagChunkPageSpan pageSpan) {
this.pageSpan = pageSpan;
return this;
}
public Builder pageSpan(RagChunkPageSpan.Builder pageSpan) {
this.pageSpan = pageSpan.build();
return this;
}
public Builder text(String text) {
this.text = text;
return this;
}
public RagChunk build() {
return new RagChunk(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/RagChunkPageSpan.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class RagChunkPageSpan {
private Integer firstPage;
private Integer lastPage;
RagChunkPageSpan(Builder builder) {
firstPage = builder.firstPage;
lastPage = builder.lastPage;
}
public Integer getFirstPage() {
return firstPage;
}
public Integer getLastPage() {
return lastPage;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code RagChunkPageSpan}. */
public static final class Builder {
Integer firstPage;
Integer lastPage;
public Builder firstPage(Integer firstPage) {
this.firstPage = firstPage;
return this;
}
public Builder lastPage(Integer lastPage) {
this.lastPage = lastPage;
return this;
}
public RagChunkPageSpan build() {
return new RagChunkPageSpan(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/RagRetrievalConfig.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class RagRetrievalConfig {
private RagRetrievalConfigFilter filter;
private RagRetrievalConfigHybridSearch hybridSearch;
private RagRetrievalConfigRanking ranking;
private Integer topK;
RagRetrievalConfig(Builder builder) {
filter = builder.filter;
hybridSearch = builder.hybridSearch;
ranking = builder.ranking;
topK = builder.topK;
}
public RagRetrievalConfigFilter getFilter() {
return filter;
}
public RagRetrievalConfigHybridSearch getHybridSearch() {
return hybridSearch;
}
public RagRetrievalConfigRanking getRanking() {
return ranking;
}
public Integer getTopK() {
return topK;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code RagRetrievalConfig}. */
public static final class Builder {
RagRetrievalConfigFilter filter;
RagRetrievalConfigHybridSearch hybridSearch;
RagRetrievalConfigRanking ranking;
Integer topK;
public Builder filter(RagRetrievalConfigFilter filter) {
this.filter = filter;
return this;
}
public Builder filter(RagRetrievalConfigFilter.Builder filter) {
this.filter = filter.build();
return this;
}
public Builder hybridSearch(RagRetrievalConfigHybridSearch hybridSearch) {
this.hybridSearch = hybridSearch;
return this;
}
public Builder hybridSearch(RagRetrievalConfigHybridSearch.Builder hybridSearch) {
this.hybridSearch = hybridSearch.build();
return this;
}
public Builder ranking(RagRetrievalConfigRanking ranking) {
this.ranking = ranking;
return this;
}
public Builder ranking(RagRetrievalConfigRanking.Builder ranking) {
this.ranking = ranking.build();
return this;
}
public Builder topK(Integer topK) {
this.topK = topK;
return this;
}
public RagRetrievalConfig build() {
return new RagRetrievalConfig(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/RagRetrievalConfigFilter.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class RagRetrievalConfigFilter {
private String metadataFilter;
private Double vectorDistanceThreshold;
private Double vectorSimilarityThreshold;
RagRetrievalConfigFilter(Builder builder) {
metadataFilter = builder.metadataFilter;
vectorDistanceThreshold = builder.vectorDistanceThreshold;
vectorSimilarityThreshold = builder.vectorSimilarityThreshold;
}
public String getMetadataFilter() {
return metadataFilter;
}
public Double getVectorDistanceThreshold() {
return vectorDistanceThreshold;
}
public Double getVectorSimilarityThreshold() {
return vectorSimilarityThreshold;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code RagRetrievalConfigFilter}. */
public static final class Builder {
String metadataFilter;
Double vectorDistanceThreshold;
Double vectorSimilarityThreshold;
public Builder metadataFilter(String metadataFilter) {
this.metadataFilter = metadataFilter;
return this;
}
public Builder vectorDistanceThreshold(Double vectorDistanceThreshold) {
this.vectorDistanceThreshold = vectorDistanceThreshold;
return this;
}
public Builder vectorSimilarityThreshold(Double vectorSimilarityThreshold) {
this.vectorSimilarityThreshold = vectorSimilarityThreshold;
return this;
}
public RagRetrievalConfigFilter build() {
return new RagRetrievalConfigFilter(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/RagRetrievalConfigHybridSearch.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class RagRetrievalConfigHybridSearch {
private Float alpha;
RagRetrievalConfigHybridSearch(Builder builder) {
alpha = builder.alpha;
}
public Float getAlpha() {
return alpha;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code RagRetrievalConfigHybridSearch}. */
public static final class Builder {
Float alpha;
public Builder alpha(Float alpha) {
this.alpha = alpha;
return this;
}
public RagRetrievalConfigHybridSearch build() {
return new RagRetrievalConfigHybridSearch(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/RagRetrievalConfigRanking.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class RagRetrievalConfigRanking {
private RagRetrievalConfigRankingLlmRanker llmRanker;
private RagRetrievalConfigRankingRankService rankService;
RagRetrievalConfigRanking(Builder builder) {
llmRanker = builder.llmRanker;
rankService = builder.rankService;
}
public RagRetrievalConfigRankingLlmRanker getLlmRanker() {
return llmRanker;
}
public RagRetrievalConfigRankingRankService getRankService() {
return rankService;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code RagRetrievalConfigRanking}. */
public static final class Builder {
RagRetrievalConfigRankingLlmRanker llmRanker;
RagRetrievalConfigRankingRankService rankService;
public Builder llmRanker(RagRetrievalConfigRankingLlmRanker llmRanker) {
this.llmRanker = llmRanker;
return this;
}
public Builder llmRanker(RagRetrievalConfigRankingLlmRanker.Builder llmRanker) {
this.llmRanker = llmRanker.build();
return this;
}
public Builder rankService(RagRetrievalConfigRankingRankService rankService) {
this.rankService = rankService;
return this;
}
public Builder rankService(RagRetrievalConfigRankingRankService.Builder rankService) {
this.rankService = rankService.build();
return this;
}
public RagRetrievalConfigRanking build() {
return new RagRetrievalConfigRanking(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/RagRetrievalConfigRankingLlmRanker.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class RagRetrievalConfigRankingLlmRanker {
private String modelName;
RagRetrievalConfigRankingLlmRanker(Builder builder) {
modelName = builder.modelName;
}
public String getModelName() {
return modelName;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code RagRetrievalConfigRankingLlmRanker}. */
public static final class Builder {
String modelName;
public Builder modelName(String modelName) {
this.modelName = modelName;
return this;
}
public RagRetrievalConfigRankingLlmRanker build() {
return new RagRetrievalConfigRankingLlmRanker(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/RagRetrievalConfigRankingRankService.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class RagRetrievalConfigRankingRankService {
private String modelName;
RagRetrievalConfigRankingRankService(Builder builder) {
modelName = builder.modelName;
}
public String getModelName() {
return modelName;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code RagRetrievalConfigRankingRankService}. */
public static final class Builder {
String modelName;
public Builder modelName(String modelName) {
this.modelName = modelName;
return this;
}
public RagRetrievalConfigRankingRankService build() {
return new RagRetrievalConfigRankingRankService(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/Retrieval.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class Retrieval {
private Boolean disableAttribution;
private VertexRagStore vertexRagStore;
Retrieval(Builder builder) {
disableAttribution = builder.disableAttribution;
vertexRagStore = builder.vertexRagStore;
}
public Boolean getDisableAttribution() {
return disableAttribution;
}
public VertexRagStore getVertexRagStore() {
return vertexRagStore;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code Retrieval}. */
public static final class Builder {
Boolean disableAttribution;
VertexRagStore vertexRagStore;
public Builder disableAttribution(Boolean disableAttribution) {
this.disableAttribution = disableAttribution;
return this;
}
public Builder vertexRagStore(VertexRagStore vertexRagStore) {
this.vertexRagStore = vertexRagStore;
return this;
}
public Builder vertexRagStore(VertexRagStore.Builder vertexRagStore) {
this.vertexRagStore = vertexRagStore.build();
return this;
}
public Retrieval build() {
return new Retrieval(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/RetrievalConfig.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class RetrievalConfig {
private String languageCode;
private LatLng latLng;
RetrievalConfig(Builder builder) {
languageCode = builder.languageCode;
latLng = builder.latLng;
}
public String getLanguageCode() {
return languageCode;
}
public LatLng getLatLng() {
return latLng;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code RetrievalConfig}. */
public static final class Builder {
String languageCode;
LatLng latLng;
public Builder languageCode(String languageCode) {
this.languageCode = languageCode;
return this;
}
public Builder latLng(LatLng latLng) {
this.latLng = latLng;
return this;
}
public RetrievalConfig build() {
return new RetrievalConfig(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/RetrievalMetadata.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class RetrievalMetadata {
private Float googleSearchDynamicRetrievalScore;
RetrievalMetadata(Builder builder) {
googleSearchDynamicRetrievalScore = builder.googleSearchDynamicRetrievalScore;
}
public Float getGoogleSearchDynamicRetrievalScore() {
return googleSearchDynamicRetrievalScore;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code RetrievalMetadata}. */
public static final class Builder {
Float googleSearchDynamicRetrievalScore;
public Builder googleSearchDynamicRetrievalScore(Float googleSearchDynamicRetrievalScore) {
this.googleSearchDynamicRetrievalScore = googleSearchDynamicRetrievalScore;
return this;
}
public RetrievalMetadata build() {
return new RetrievalMetadata(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/SafetyRating.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class SafetyRating {
private Boolean blocked;
private HarmCategory category;
private HarmBlockThreshold overwrittenThreshold;
private HarmProbability probability;
private Float probabilityScore;
private HarmSeverity severity;
private Float severityScore;
SafetyRating(Builder builder) {
blocked = builder.blocked;
category = builder.category;
overwrittenThreshold = builder.overwrittenThreshold;
probability = builder.probability;
probabilityScore = builder.probabilityScore;
severity = builder.severity;
severityScore = builder.severityScore;
}
public Boolean getBlocked() {
return blocked;
}
public HarmCategory getCategory() {
return category;
}
public HarmBlockThreshold getOverwrittenThreshold() {
return overwrittenThreshold;
}
public HarmProbability getProbability() {
return probability;
}
public Float getProbabilityScore() {
return probabilityScore;
}
public HarmSeverity getSeverity() {
return severity;
}
public Float getSeverityScore() {
return severityScore;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code SafetyRating}. */
public static final class Builder {
Boolean blocked;
HarmCategory category;
HarmBlockThreshold overwrittenThreshold;
HarmProbability probability;
Float probabilityScore;
HarmSeverity severity;
Float severityScore;
public Builder blocked(Boolean blocked) {
this.blocked = blocked;
return this;
}
public Builder category(HarmCategory category) {
this.category = category;
return this;
}
public Builder overwrittenThreshold(HarmBlockThreshold overwrittenThreshold) {
this.overwrittenThreshold = overwrittenThreshold;
return this;
}
public Builder probability(HarmProbability probability) {
this.probability = probability;
return this;
}
public Builder probabilityScore(Float probabilityScore) {
this.probabilityScore = probabilityScore;
return this;
}
public Builder severity(HarmSeverity severity) {
this.severity = severity;
return this;
}
public Builder severityScore(Float severityScore) {
this.severityScore = severityScore;
return this;
}
public SafetyRating build() {
return new SafetyRating(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/SafetySetting.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class SafetySetting {
private HarmCategory category;
private HarmBlockMethod method;
private HarmBlockThreshold threshold;
SafetySetting(Builder builder) {
category = builder.category;
method = builder.method;
threshold = builder.threshold;
}
public HarmCategory getCategory() {
return category;
}
public HarmBlockMethod getMethod() {
return method;
}
public HarmBlockThreshold getThreshold() {
return threshold;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code SafetySetting}. */
public static final class Builder {
HarmCategory category;
HarmBlockMethod method;
HarmBlockThreshold threshold;
public Builder category(HarmCategory category) {
this.category = category;
return this;
}
public Builder method(HarmBlockMethod method) {
this.method = method;
return this;
}
public Builder threshold(HarmBlockThreshold threshold) {
this.threshold = threshold;
return this;
}
public SafetySetting build() {
return new SafetySetting(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/Schema.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
import com.google.gson.annotations.SerializedName;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class Schema {
private List<Schema> anyOf;
@SerializedName("default")
private Object defaultName;
private String description;
@SerializedName("enum")
private List<String> enumName;
private Object example;
private String format;
private Schema items;
private Long maxItems;
private Long maxLength;
private Long maxProperties;
private Double maximum;
private Long minItems;
private Long minLength;
private Long minProperties;
private Double minimum;
private Boolean nullable;
private String pattern;
private Map<String, Schema> properties;
private List<String> propertyOrdering;
private List<String> required;
private String title;
private Type type;
Schema(Builder builder) {
anyOf = builder.anyOf;
defaultName = builder.defaultName;
description = builder.description;
enumName = builder.enumName;
example = builder.example;
format = builder.format;
items = builder.items;
maxItems = builder.maxItems;
maxLength = builder.maxLength;
maxProperties = builder.maxProperties;
maximum = builder.maximum;
minItems = builder.minItems;
minLength = builder.minLength;
minProperties = builder.minProperties;
minimum = builder.minimum;
nullable = builder.nullable;
pattern = builder.pattern;
properties = builder.properties;
propertyOrdering = builder.propertyOrdering;
required = builder.required;
title = builder.title;
type = builder.type;
}
public List<Schema> getAnyOf() {
return anyOf;
}
public Object getDefaultName() {
return defaultName;
}
public String getDescription() {
return description;
}
public List<String> getEnumName() {
return enumName;
}
public Object getExample() {
return example;
}
public String getFormat() {
return format;
}
public Schema getItems() {
return items;
}
public Long getMaxItems() {
return maxItems;
}
public Long getMaxLength() {
return maxLength;
}
public Long getMaxProperties() {
return maxProperties;
}
public Double getMaximum() {
return maximum;
}
public Long getMinItems() {
return minItems;
}
public Long getMinLength() {
return minLength;
}
public Long getMinProperties() {
return minProperties;
}
public Double getMinimum() {
return minimum;
}
public Boolean getNullable() {
return nullable;
}
public String getPattern() {
return pattern;
}
public Map<String, Schema> getProperties() {
return properties;
}
public List<String> getPropertyOrdering() {
return propertyOrdering;
}
public List<String> getRequired() {
return required;
}
public String getTitle() {
return title;
}
public Type getType() {
return type;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code Schema}. */
public static final class Builder {
List<Schema> anyOf = new ArrayList<>();
Object defaultName;
String description;
List<String> enumName;
Object example;
String format;
Schema items;
Long maxItems;
Long maxLength;
Long maxProperties;
Double maximum;
Long minItems;
Long minLength;
Long minProperties;
Double minimum;
Boolean nullable;
String pattern;
Map<String, Schema> properties;
List<String> propertyOrdering;
List<String> required;
String title;
Type type;
public Builder anyOf(List<Schema> anyOf) {
this.anyOf.clear();
this.anyOf.addAll(anyOf);
return this;
}
public Builder addAnyOf(Schema anyOf) {
this.anyOf.add(anyOf);
return this;
}
public Builder addAnyOf(Schema.Builder anyOf) {
this.anyOf.add(anyOf.build());
return this;
}
public Builder defaultName(Object defaultName) {
this.defaultName = defaultName;
return this;
}
public Builder description(String description) {
this.description = description;
return this;
}
public Builder enumName(List<String> enumName) {
this.enumName = enumName;
return this;
}
public Builder example(Object example) {
this.example = example;
return this;
}
public Builder format(String format) {
this.format = format;
return this;
}
public Builder items(Schema items) {
this.items = items;
return this;
}
public Builder items(Schema.Builder items) {
this.items = items.build();
return this;
}
public Builder maxItems(Long maxItems) {
this.maxItems = maxItems;
return this;
}
public Builder maxLength(Long maxLength) {
this.maxLength = maxLength;
return this;
}
public Builder maxProperties(Long maxProperties) {
this.maxProperties = maxProperties;
return this;
}
public Builder maximum(Double maximum) {
this.maximum = maximum;
return this;
}
public Builder minItems(Long minItems) {
this.minItems = minItems;
return this;
}
public Builder minLength(Long minLength) {
this.minLength = minLength;
return this;
}
public Builder minProperties(Long minProperties) {
this.minProperties = minProperties;
return this;
}
public Builder minimum(Double minimum) {
this.minimum = minimum;
return this;
}
public Builder nullable(Boolean nullable) {
this.nullable = nullable;
return this;
}
public Builder pattern(String pattern) {
this.pattern = pattern;
return this;
}
public Builder properties(Map<String, Schema> properties) {
this.properties = properties;
return this;
}
public Builder propertyOrdering(List<String> propertyOrdering) {
this.propertyOrdering = propertyOrdering;
return this;
}
public Builder required(List<String> required) {
this.required = required;
return this;
}
public Builder title(String title) {
this.title = title;
return this;
}
public Builder type(Type type) {
this.type = type;
return this;
}
public Schema build() {
return new Schema(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/SearchEntryPoint.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
import java.util.Base64;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class SearchEntryPoint {
private String renderedContent;
private String sdkBlob;
SearchEntryPoint(Builder builder) {
renderedContent = builder.renderedContent;
sdkBlob = builder.sdkBlob;
}
public String getRenderedContent() {
return renderedContent;
}
public String getSdkBlob() {
return sdkBlob;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code SearchEntryPoint}. */
public static final class Builder {
String renderedContent;
String sdkBlob;
public Builder renderedContent(String renderedContent) {
this.renderedContent = renderedContent;
return this;
}
public Builder sdkBlob(byte[] sdkBlob) {
this.sdkBlob = Base64.getEncoder().encodeToString(sdkBlob);
return this;
}
public SearchEntryPoint build() {
return new SearchEntryPoint(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/Segment.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class Segment {
private Integer endIndex;
private Integer partIndex;
private Integer startIndex;
private String text;
Segment(Builder builder) {
endIndex = builder.endIndex;
partIndex = builder.partIndex;
startIndex = builder.startIndex;
text = builder.text;
}
public Integer getEndIndex() {
return endIndex;
}
public Integer getPartIndex() {
return partIndex;
}
public Integer getStartIndex() {
return startIndex;
}
public String getText() {
return text;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code Segment}. */
public static final class Builder {
Integer endIndex;
Integer partIndex;
Integer startIndex;
String text;
public Builder endIndex(Integer endIndex) {
this.endIndex = endIndex;
return this;
}
public Builder partIndex(Integer partIndex) {
this.partIndex = partIndex;
return this;
}
public Builder startIndex(Integer startIndex) {
this.startIndex = startIndex;
return this;
}
public Builder text(String text) {
this.text = text;
return this;
}
public Segment build() {
return new Segment(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/SpeakerVoiceConfig.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class SpeakerVoiceConfig {
private String speaker;
private VoiceConfig voiceConfig;
SpeakerVoiceConfig(Builder builder) {
speaker = builder.speaker;
voiceConfig = builder.voiceConfig;
}
public String getSpeaker() {
return speaker;
}
public VoiceConfig getVoiceConfig() {
return voiceConfig;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code SpeakerVoiceConfig}. */
public static final class Builder {
String speaker;
VoiceConfig voiceConfig;
public Builder speaker(String speaker) {
this.speaker = speaker;
return this;
}
public Builder voiceConfig(VoiceConfig voiceConfig) {
this.voiceConfig = voiceConfig;
return this;
}
public Builder voiceConfig(VoiceConfig.Builder voiceConfig) {
this.voiceConfig = voiceConfig.build();
return this;
}
public SpeakerVoiceConfig build() {
return new SpeakerVoiceConfig(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/SpeechConfig.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class SpeechConfig {
private String languageCode;
private MultiSpeakerVoiceConfig multiSpeakerVoiceConfig;
private VoiceConfig voiceConfig;
SpeechConfig(Builder builder) {
languageCode = builder.languageCode;
multiSpeakerVoiceConfig = builder.multiSpeakerVoiceConfig;
voiceConfig = builder.voiceConfig;
}
public String getLanguageCode() {
return languageCode;
}
public MultiSpeakerVoiceConfig getMultiSpeakerVoiceConfig() {
return multiSpeakerVoiceConfig;
}
public VoiceConfig getVoiceConfig() {
return voiceConfig;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code SpeechConfig}. */
public static final class Builder {
String languageCode;
MultiSpeakerVoiceConfig multiSpeakerVoiceConfig;
VoiceConfig voiceConfig;
public Builder languageCode(String languageCode) {
this.languageCode = languageCode;
return this;
}
public Builder multiSpeakerVoiceConfig(MultiSpeakerVoiceConfig multiSpeakerVoiceConfig) {
this.multiSpeakerVoiceConfig = multiSpeakerVoiceConfig;
return this;
}
public Builder multiSpeakerVoiceConfig(
MultiSpeakerVoiceConfig.Builder multiSpeakerVoiceConfig) {
this.multiSpeakerVoiceConfig = multiSpeakerVoiceConfig.build();
return this;
}
public Builder voiceConfig(VoiceConfig voiceConfig) {
this.voiceConfig = voiceConfig;
return this;
}
public Builder voiceConfig(VoiceConfig.Builder voiceConfig) {
this.voiceConfig = voiceConfig.build();
return this;
}
public SpeechConfig build() {
return new SpeechConfig(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/ThinkingConfig.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class ThinkingConfig {
private Boolean includeThoughts;
private Integer thinkingBudget;
ThinkingConfig(Builder builder) {
includeThoughts = builder.includeThoughts;
thinkingBudget = builder.thinkingBudget;
}
public Boolean getIncludeThoughts() {
return includeThoughts;
}
public Integer getThinkingBudget() {
return thinkingBudget;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code ThinkingConfig}. */
public static final class Builder {
Boolean includeThoughts;
Integer thinkingBudget;
public Builder includeThoughts(Boolean includeThoughts) {
this.includeThoughts = includeThoughts;
return this;
}
public Builder thinkingBudget(Integer thinkingBudget) {
this.thinkingBudget = thinkingBudget;
return this;
}
public ThinkingConfig build() {
return new ThinkingConfig(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/Tool.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
import java.util.ArrayList;
import java.util.List;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class Tool {
private List<FunctionDeclaration> functionDeclarations;
private GoogleSearch googleSearch;
private GoogleSearchRetrieval googleSearchRetrieval;
private Retrieval retrieval;
Tool(Builder builder) {
functionDeclarations = builder.functionDeclarations;
googleSearch = builder.googleSearch;
googleSearchRetrieval = builder.googleSearchRetrieval;
retrieval = builder.retrieval;
}
public List<FunctionDeclaration> getFunctionDeclarations() {
return functionDeclarations;
}
public GoogleSearch getGoogleSearch() {
return googleSearch;
}
public GoogleSearchRetrieval getGoogleSearchRetrieval() {
return googleSearchRetrieval;
}
public Retrieval getRetrieval() {
return retrieval;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code Tool}. */
public static final class Builder {
List<FunctionDeclaration> functionDeclarations = new ArrayList<>();
GoogleSearch googleSearch;
GoogleSearchRetrieval googleSearchRetrieval;
Retrieval retrieval;
public Builder functionDeclarations(List<FunctionDeclaration> functionDeclarations) {
this.functionDeclarations.clear();
this.functionDeclarations.addAll(functionDeclarations);
return this;
}
public Builder addFunctionDeclaration(FunctionDeclaration functionDeclaration) {
this.functionDeclarations.add(functionDeclaration);
return this;
}
public Builder addFunctionDeclaration(FunctionDeclaration.Builder functionDeclaration) {
this.functionDeclarations.add(functionDeclaration.build());
return this;
}
public Builder googleSearch(GoogleSearch googleSearch) {
this.googleSearch = googleSearch;
return this;
}
public Builder googleSearch(GoogleSearch.Builder googleSearch) {
this.googleSearch = googleSearch.build();
return this;
}
public Builder googleSearchRetrieval(GoogleSearchRetrieval googleSearchRetrieval) {
this.googleSearchRetrieval = googleSearchRetrieval;
return this;
}
public Builder googleSearchRetrieval(GoogleSearchRetrieval.Builder googleSearchRetrieval) {
this.googleSearchRetrieval = googleSearchRetrieval.build();
return this;
}
public Builder retrieval(Retrieval retrieval) {
this.retrieval = retrieval;
return this;
}
public Builder retrieval(Retrieval.Builder retrieval) {
this.retrieval = retrieval.build();
return this;
}
public Tool build() {
return new Tool(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/ToolConfig.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class ToolConfig {
private FunctionCallingConfig functionCallingConfig;
private RetrievalConfig retrievalConfig;
ToolConfig(Builder builder) {
functionCallingConfig = builder.functionCallingConfig;
retrievalConfig = builder.retrievalConfig;
}
public FunctionCallingConfig getFunctionCallingConfig() {
return functionCallingConfig;
}
public RetrievalConfig getRetrievalConfig() {
return retrievalConfig;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code ToolConfig}. */
public static final class Builder {
FunctionCallingConfig functionCallingConfig;
RetrievalConfig retrievalConfig;
public Builder functionCallingConfig(FunctionCallingConfig functionCallingConfig) {
this.functionCallingConfig = functionCallingConfig;
return this;
}
public Builder functionCallingConfig(FunctionCallingConfig.Builder functionCallingConfig) {
this.functionCallingConfig = functionCallingConfig.build();
return this;
}
public Builder retrievalConfig(RetrievalConfig retrievalConfig) {
this.retrievalConfig = retrievalConfig;
return this;
}
public Builder retrievalConfig(RetrievalConfig.Builder retrievalConfig) {
this.retrievalConfig = retrievalConfig.build();
return this;
}
public ToolConfig build() {
return new ToolConfig(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/TrafficType.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** An enum represent Gemini schema. */
public enum TrafficType {
TRAFFIC_TYPE_UNSPECIFIED,
ON_DEMAND,
PROVISIONED_THROUGHPUT
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/Type.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** An enum represent Gemini schema. */
public enum Type {
TYPE_UNSPECIFIED,
STRING,
NUMBER,
INTEGER,
BOOLEAN,
ARRAY,
OBJECT,
NULL
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/UrlContextMetadata.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
import java.util.ArrayList;
import java.util.List;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class UrlContextMetadata {
private List<UrlMetadata> urlMetadata;
UrlContextMetadata(Builder builder) {
urlMetadata = builder.urlMetadata;
}
public List<UrlMetadata> getUrlMetadata() {
return urlMetadata;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code UrlContextMetadata}. */
public static final class Builder {
List<UrlMetadata> urlMetadata = new ArrayList<>();
public Builder urlMetadata(List<UrlMetadata> urlMetadata) {
this.urlMetadata.clear();
this.urlMetadata.addAll(urlMetadata);
return this;
}
public Builder addUrlMetadata(UrlMetadata urlMetadata) {
this.urlMetadata.add(urlMetadata);
return this;
}
public Builder addUrlMetadata(UrlMetadata.Builder urlMetadata) {
this.urlMetadata.add(urlMetadata.build());
return this;
}
public UrlContextMetadata build() {
return new UrlContextMetadata(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/UrlMetadata.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class UrlMetadata {
private String retrievedUrl;
private UrlRetrievalStatus urlRetrievalStatus;
UrlMetadata(Builder builder) {
retrievedUrl = builder.retrievedUrl;
urlRetrievalStatus = builder.urlRetrievalStatus;
}
public String getRetrievedUrl() {
return retrievedUrl;
}
public UrlRetrievalStatus getUrlRetrievalStatus() {
return urlRetrievalStatus;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code UrlMetadata}. */
public static final class Builder {
String retrievedUrl;
UrlRetrievalStatus urlRetrievalStatus;
public Builder retrievedUrl(String retrievedUrl) {
this.retrievedUrl = retrievedUrl;
return this;
}
public Builder urlRetrievalStatus(UrlRetrievalStatus urlRetrievalStatus) {
this.urlRetrievalStatus = urlRetrievalStatus;
return this;
}
public UrlMetadata build() {
return new UrlMetadata(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/UrlRetrievalStatus.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** An enum represent Gemini schema. */
public enum UrlRetrievalStatus {
URL_RETRIEVAL_STATUS_UNSPECIFIED,
URL_RETRIEVAL_STATUS_SUCCESS,
URL_RETRIEVAL_STATUS_ERROR
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/UsageMetadata.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
import java.util.ArrayList;
import java.util.List;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class UsageMetadata {
private List<ModalityTokenCount> cacheTokensDetails;
private Integer cachedContentTokenCount;
private Integer candidatesTokenCount;
private List<ModalityTokenCount> candidatesTokensDetails;
private Integer promptTokenCount;
private List<ModalityTokenCount> promptTokensDetails;
private Integer thoughtsTokenCount;
private Integer toolUsePromptTokenCount;
private List<ModalityTokenCount> toolUsePromptTokensDetails;
private Integer totalTokenCount;
private TrafficType trafficType;
UsageMetadata(Builder builder) {
cacheTokensDetails = builder.cacheTokensDetails;
cachedContentTokenCount = builder.cachedContentTokenCount;
candidatesTokenCount = builder.candidatesTokenCount;
candidatesTokensDetails = builder.candidatesTokensDetails;
promptTokenCount = builder.promptTokenCount;
promptTokensDetails = builder.promptTokensDetails;
thoughtsTokenCount = builder.thoughtsTokenCount;
toolUsePromptTokenCount = builder.toolUsePromptTokenCount;
toolUsePromptTokensDetails = builder.toolUsePromptTokensDetails;
totalTokenCount = builder.totalTokenCount;
trafficType = builder.trafficType;
}
public List<ModalityTokenCount> getCacheTokensDetails() {
return cacheTokensDetails;
}
public Integer getCachedContentTokenCount() {
return cachedContentTokenCount;
}
public Integer getCandidatesTokenCount() {
return candidatesTokenCount;
}
public List<ModalityTokenCount> getCandidatesTokensDetails() {
return candidatesTokensDetails;
}
public Integer getPromptTokenCount() {
return promptTokenCount;
}
public List<ModalityTokenCount> getPromptTokensDetails() {
return promptTokensDetails;
}
public Integer getThoughtsTokenCount() {
return thoughtsTokenCount;
}
public Integer getToolUsePromptTokenCount() {
return toolUsePromptTokenCount;
}
public List<ModalityTokenCount> getToolUsePromptTokensDetails() {
return toolUsePromptTokensDetails;
}
public Integer getTotalTokenCount() {
return totalTokenCount;
}
public TrafficType getTrafficType() {
return trafficType;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code UsageMetadata}. */
public static final class Builder {
List<ModalityTokenCount> cacheTokensDetails = new ArrayList<>();
Integer cachedContentTokenCount;
Integer candidatesTokenCount;
List<ModalityTokenCount> candidatesTokensDetails = new ArrayList<>();
Integer promptTokenCount;
List<ModalityTokenCount> promptTokensDetails = new ArrayList<>();
Integer thoughtsTokenCount;
Integer toolUsePromptTokenCount;
List<ModalityTokenCount> toolUsePromptTokensDetails = new ArrayList<>();
Integer totalTokenCount;
TrafficType trafficType;
public Builder cacheTokensDetails(List<ModalityTokenCount> cacheTokensDetails) {
this.cacheTokensDetails.clear();
this.cacheTokensDetails.addAll(cacheTokensDetails);
return this;
}
public Builder addCacheTokensDetail(ModalityTokenCount cacheTokensDetail) {
this.cacheTokensDetails.add(cacheTokensDetail);
return this;
}
public Builder addCacheTokensDetail(ModalityTokenCount.Builder cacheTokensDetail) {
this.cacheTokensDetails.add(cacheTokensDetail.build());
return this;
}
public Builder cachedContentTokenCount(Integer cachedContentTokenCount) {
this.cachedContentTokenCount = cachedContentTokenCount;
return this;
}
public Builder candidatesTokenCount(Integer candidatesTokenCount) {
this.candidatesTokenCount = candidatesTokenCount;
return this;
}
public Builder candidatesTokensDetails(List<ModalityTokenCount> candidatesTokensDetails) {
this.candidatesTokensDetails.clear();
this.candidatesTokensDetails.addAll(candidatesTokensDetails);
return this;
}
public Builder addCandidatesTokensDetail(ModalityTokenCount candidatesTokensDetail) {
this.candidatesTokensDetails.add(candidatesTokensDetail);
return this;
}
public Builder addCandidatesTokensDetail(
ModalityTokenCount.Builder candidatesTokensDetail) {
this.candidatesTokensDetails.add(candidatesTokensDetail.build());
return this;
}
public Builder promptTokenCount(Integer promptTokenCount) {
this.promptTokenCount = promptTokenCount;
return this;
}
public Builder promptTokensDetails(List<ModalityTokenCount> promptTokensDetails) {
this.promptTokensDetails.clear();
this.promptTokensDetails.addAll(promptTokensDetails);
return this;
}
public Builder addPromptTokensDetail(ModalityTokenCount promptTokensDetail) {
this.promptTokensDetails.add(promptTokensDetail);
return this;
}
public Builder addPromptTokensDetail(ModalityTokenCount.Builder promptTokensDetail) {
this.promptTokensDetails.add(promptTokensDetail.build());
return this;
}
public Builder thoughtsTokenCount(Integer thoughtsTokenCount) {
this.thoughtsTokenCount = thoughtsTokenCount;
return this;
}
public Builder toolUsePromptTokenCount(Integer toolUsePromptTokenCount) {
this.toolUsePromptTokenCount = toolUsePromptTokenCount;
return this;
}
public Builder toolUsePromptTokensDetails(
List<ModalityTokenCount> toolUsePromptTokensDetails) {
this.toolUsePromptTokensDetails.clear();
this.toolUsePromptTokensDetails.addAll(toolUsePromptTokensDetails);
return this;
}
public Builder addToolUsePromptTokensDetail(ModalityTokenCount toolUsePromptTokensDetail) {
this.toolUsePromptTokensDetails.add(toolUsePromptTokensDetail);
return this;
}
public Builder addToolUsePromptTokensDetail(
ModalityTokenCount.Builder toolUsePromptTokensDetail) {
this.toolUsePromptTokensDetails.add(toolUsePromptTokensDetail.build());
return this;
}
public Builder totalTokenCount(Integer totalTokenCount) {
this.totalTokenCount = totalTokenCount;
return this;
}
public Builder trafficType(TrafficType trafficType) {
this.trafficType = trafficType;
return this;
}
public UsageMetadata build() {
return new UsageMetadata(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/VertexRagStore.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
import java.util.ArrayList;
import java.util.List;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class VertexRagStore {
private List<String> ragCorpora;
private List<VertexRagStoreRagResource> ragResources;
private RagRetrievalConfig ragRetrievalConfig;
private Integer similarityTopK;
private Boolean storeContext;
private Double vectorDistanceThreshold;
VertexRagStore(Builder builder) {
ragCorpora = builder.ragCorpora;
ragResources = builder.ragResources;
ragRetrievalConfig = builder.ragRetrievalConfig;
similarityTopK = builder.similarityTopK;
storeContext = builder.storeContext;
vectorDistanceThreshold = builder.vectorDistanceThreshold;
}
public List<String> getRagCorpora() {
return ragCorpora;
}
public List<VertexRagStoreRagResource> getRagResources() {
return ragResources;
}
public RagRetrievalConfig getRagRetrievalConfig() {
return ragRetrievalConfig;
}
public Integer getSimilarityTopK() {
return similarityTopK;
}
public Boolean getStoreContext() {
return storeContext;
}
public Double getVectorDistanceThreshold() {
return vectorDistanceThreshold;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code VertexRagStore}. */
public static final class Builder {
List<String> ragCorpora;
List<VertexRagStoreRagResource> ragResources = new ArrayList<>();
RagRetrievalConfig ragRetrievalConfig;
Integer similarityTopK;
Boolean storeContext;
Double vectorDistanceThreshold;
public Builder ragCorpora(List<String> ragCorpora) {
this.ragCorpora = ragCorpora;
return this;
}
public Builder ragResources(List<VertexRagStoreRagResource> ragResources) {
this.ragResources.clear();
this.ragResources.addAll(ragResources);
return this;
}
public Builder addRagResource(VertexRagStoreRagResource ragResource) {
this.ragResources.add(ragResource);
return this;
}
public Builder addRagResource(VertexRagStoreRagResource.Builder ragResource) {
this.ragResources.add(ragResource.build());
return this;
}
public Builder ragRetrievalConfig(RagRetrievalConfig ragRetrievalConfig) {
this.ragRetrievalConfig = ragRetrievalConfig;
return this;
}
public Builder ragRetrievalConfig(RagRetrievalConfig.Builder ragRetrievalConfig) {
this.ragRetrievalConfig = ragRetrievalConfig.build();
return this;
}
public Builder similarityTopK(Integer similarityTopK) {
this.similarityTopK = similarityTopK;
return this;
}
public Builder storeContext(Boolean storeContext) {
this.storeContext = storeContext;
return this;
}
public Builder vectorDistanceThreshold(Double vectorDistanceThreshold) {
this.vectorDistanceThreshold = vectorDistanceThreshold;
return this;
}
public VertexRagStore build() {
return new VertexRagStore(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/VertexRagStoreRagResource.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
import java.util.List;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class VertexRagStoreRagResource {
private String ragCorpus;
private List<String> ragFileIds;
VertexRagStoreRagResource(Builder builder) {
ragCorpus = builder.ragCorpus;
ragFileIds = builder.ragFileIds;
}
public String getRagCorpus() {
return ragCorpus;
}
public List<String> getRagFileIds() {
return ragFileIds;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code VertexRagStoreRagResource}. */
public static final class Builder {
String ragCorpus;
List<String> ragFileIds;
public Builder ragCorpus(String ragCorpus) {
this.ragCorpus = ragCorpus;
return this;
}
public Builder ragFileIds(List<String> ragFileIds) {
this.ragFileIds = ragFileIds;
return this;
}
public VertexRagStoreRagResource build() {
return new VertexRagStoreRagResource(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/VideoMetadata.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class VideoMetadata {
private String endOffset;
private Double fps;
private String startOffset;
VideoMetadata(Builder builder) {
endOffset = builder.endOffset;
fps = builder.fps;
startOffset = builder.startOffset;
}
public String getEndOffset() {
return endOffset;
}
public Double getFps() {
return fps;
}
public String getStartOffset() {
return startOffset;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code VideoMetadata}. */
public static final class Builder {
String endOffset;
Double fps;
String startOffset;
public Builder endOffset(String endOffset) {
this.endOffset = endOffset;
return this;
}
public Builder fps(Double fps) {
this.fps = fps;
return this;
}
public Builder startOffset(String startOffset) {
this.startOffset = startOffset;
return this;
}
public VideoMetadata build() {
return new VideoMetadata(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/VoiceConfig.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.gemini.types;
/** A data class represents Gemini schema. */
@SuppressWarnings("MissingJavadocMethod")
public class VoiceConfig {
private PrebuiltVoiceConfig prebuiltVoiceConfig;
VoiceConfig(Builder builder) {
prebuiltVoiceConfig = builder.prebuiltVoiceConfig;
}
public PrebuiltVoiceConfig getPrebuiltVoiceConfig() {
return prebuiltVoiceConfig;
}
public static Builder builder() {
return new Builder();
}
/** Builder class for {@code VoiceConfig}. */
public static final class Builder {
PrebuiltVoiceConfig prebuiltVoiceConfig;
public Builder prebuiltVoiceConfig(PrebuiltVoiceConfig prebuiltVoiceConfig) {
this.prebuiltVoiceConfig = prebuiltVoiceConfig;
return this;
}
public Builder prebuiltVoiceConfig(PrebuiltVoiceConfig.Builder prebuiltVoiceConfig) {
this.prebuiltVoiceConfig = prebuiltVoiceConfig.build();
return this;
}
public VoiceConfig build() {
return new VoiceConfig(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/gemini/types/package-info.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes for Gemini types. */
package ai.djl.genai.gemini.types;
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/huggingface/Details.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.huggingface;
import com.google.gson.annotations.SerializedName;
import java.util.List;
/** A data class represents text generation schema. */
public class Details {
@SerializedName("finish_reason")
private String finishReason;
@SerializedName("generated_tokens")
private int generatedTokens;
private String inputs;
private List<Token> tokens;
private List<Token> prefill;
/**
* Constructs a {@code Details} instance.
*
* @param finishReason the finish reason
* @param generatedTokens the number of generated tokens
* @param inputs the input text
* @param tokens the tokens
* @param prefill the prefill tokens
*/
public Details(
String finishReason,
int generatedTokens,
String inputs,
List<Token> tokens,
List<Token> prefill) {
this.finishReason = finishReason;
this.generatedTokens = generatedTokens;
this.inputs = inputs;
this.tokens = tokens;
this.prefill = prefill;
}
/**
* Returns the finish reason.
*
* @return the finish reason
*/
public String getFinishReason() {
return finishReason;
}
/**
* Returns the number of generated tokens.
*
* @return the number of generated tokens
*/
public int getGeneratedTokens() {
return generatedTokens;
}
/**
* Returns the input text.
*
* @return the input text
*/
public String getInputs() {
return inputs;
}
/**
* Returns the tokens details.
*
* @return the tokens details
*/
public List<Token> getTokens() {
return tokens;
}
/**
* Returns the prefill tokens information.
*
* @return the prefill tokens information
*/
public List<Token> getPrefill() {
return prefill;
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/huggingface/GenerationConfig.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.huggingface;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/** A data class represents text generation schema. */
public class GenerationConfig {
private Map<String, Object> parameters;
GenerationConfig(Map<String, Object> parameters) {
this.parameters = parameters;
}
/**
* Returns the text generation parameters.
*
* @return the text generation parameters
*/
public Map<String, Object> getParameters() {
return parameters;
}
/**
* Creates a builder to build a {@code GenerationConfig}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** The builder for {@code GenerationConfig}. */
public static final class Builder {
private Map<String, Object> parameters = new ConcurrentHashMap<>();
/**
* Sets if do_sample.
*
* @param doSample if do_sample
* @return the builder
*/
public Builder doSample(Boolean doSample) {
parameters.put("do_sample", doSample);
return this;
}
/**
* Sets the seed.
*
* @param seed the seed
* @return the builder
*/
public Builder seed(Integer seed) {
parameters.put("seed", seed);
return this;
}
/**
* Sets the temperature.
*
* @param temperature the temperature
* @return the builder
*/
public Builder temperature(Float temperature) {
parameters.put("temperature", temperature);
return this;
}
/**
* Sets the repetition penalty.
*
* @param repetitionPenalty the repetition penalty
* @return the builder
*/
public Builder repetitionPenalty(Float repetitionPenalty) {
parameters.put("repetition_penalty", repetitionPenalty);
return this;
}
/**
* Sets the top_k.
*
* @param topK the top_k
* @return the builder
*/
public Builder topK(Integer topK) {
parameters.put("top_k", topK);
return this;
}
/**
* Sets the top_p.
*
* @param topP the top_p
* @return the builder
*/
public Builder topP(Float topP) {
parameters.put("top_p", topP);
return this;
}
/**
* Sets the max new tokens.
*
* @param maxNewTokens the max new tokens
* @return the builder
*/
public Builder maxNewTokens(Integer maxNewTokens) {
parameters.put("max_new_tokens", maxNewTokens);
return this;
}
/**
* Sets if return the details.
*
* @param details if return the details
* @return the builder
*/
public Builder details(Boolean details) {
parameters.put("details", details);
return this;
}
/**
* Sets if return full text.
*
* @param returnFullText if return full text
* @return the builder
*/
public Builder returnFullText(Boolean returnFullText) {
parameters.put("return_full_text", returnFullText);
return this;
}
/**
* Sets the stop sequences.
*
* @param stopSequences the stop sequences
* @return the builder
*/
public Builder stopSequences(List<String> stopSequences) {
parameters.put("stop_sequences", stopSequences);
return this;
}
/**
* Sets if return the decoder input details.
*
* @param decoderInputDetails if return the decoder input details
* @return the builder
*/
public Builder decoderInputDetails(Boolean decoderInputDetails) {
parameters.put("decoder_input_details", decoderInputDetails);
return this;
}
/**
* Sets the custom text generation parameter.
*
* @param key the parameter key
* @param value the parameter value
* @return the builder
*/
public Builder addParameter(String key, Object value) {
parameters.put(key, value);
return this;
}
/**
* Returns a new {@code GenerationConfig} instance.
*
* @return a new {@code GenerationConfig} instance
*/
public GenerationConfig build() {
return new GenerationConfig(parameters);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/huggingface/GenerationInput.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.huggingface;
import java.util.Map;
/** A data class represents text generation schema. */
public class GenerationInput {
private Object inputs;
private Map<String, Object> parameters;
private Boolean stream;
GenerationInput(Builder builder) {
this.inputs = builder.inputs;
this.parameters = builder.parameters;
this.stream = builder.stream;
}
/**
* Returns the inputs.
*
* @return the inputs
*/
public Object getInputs() {
return inputs;
}
/**
* Returns the text generation parameters.
*
* @return the text generation parameters
*/
public Map<String, Object> getParameters() {
return parameters;
}
/**
* Returns if streaming response.
*
* @return if streaming response
*/
public Boolean getStream() {
return stream;
}
/**
* Creates a builder to build a {@code GenerationInput}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/**
* Creates a builder with the specified input text.
*
* @param text the input text
* @return a new builder
*/
public static Builder text(String text) {
return builder().input(text);
}
/**
* Creates a builder with the specified text content.
*
* @param inputs the inputs
* @return a new builder
*/
public static Builder text(String[] inputs) {
return builder().inputs(inputs);
}
/** The builder for {@code ChatInput}. */
public static final class Builder {
Object inputs;
Map<String, Object> parameters;
Boolean stream;
/**
* Sets the input text.
*
* @param text the input text
* @return the builder
*/
public Builder input(String text) {
this.inputs = text;
return this;
}
/**
* Sets the input messages.
*
* @param inputs the input messages
* @return the builder
*/
public Builder inputs(String... inputs) {
this.inputs = inputs;
return this;
}
/**
* Sets the input messages.
*
* @param inputs the input messages
* @return the builder
*/
public Builder inputs(Object inputs) {
this.inputs = inputs;
return this;
}
/**
* Sets the generation parameters.
*
* @param config generation parameters
* @return the builder
*/
public Builder config(GenerationConfig config) {
this.parameters = config.getParameters();
return this;
}
/**
* Sets the generation parameters.
*
* @param config generation parameters
* @return the builder
*/
public Builder config(GenerationConfig.Builder config) {
return config(config.build());
}
/**
* Sets if return response in stream.
*
* @param stream if return response in stream
* @return the builder
*/
public Builder stream(Boolean stream) {
this.stream = stream;
return this;
}
/**
* Builds the {@code ChatInput} instance.
*
* @return the {@code ChatInput} instance
*/
public GenerationInput build() {
return new GenerationInput(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/huggingface/GenerationOutput.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.huggingface;
import com.google.gson.annotations.SerializedName;
/** A data class represents text generation schema. */
public class GenerationOutput {
@SerializedName("generated_text")
private String generatedText;
private Details details;
private Token token;
/**
* Constructs a {@code GenerationOutput} instance.
*
* @param generatedText the generated text
* @param details the details
* @param token the token
*/
public GenerationOutput(String generatedText, Details details, Token token) {
this.generatedText = generatedText;
this.details = details;
this.token = token;
}
/**
* Returns the generated text.
*
* @return the generated text
*/
public String getGeneratedText() {
return generatedText;
}
/**
* Returns the details.
*
* @return the details
*/
public Details getDetails() {
return details;
}
/**
* Returns the token.
*
* @return the token
*/
public Token getToken() {
return token;
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/huggingface/StreamGenerationOutput.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.huggingface;
import ai.djl.util.JsonUtils;
import java.util.Iterator;
/** A stream version of {@link GenerationOutput}. */
public class StreamGenerationOutput implements Iterable<GenerationOutput> {
private transient Iterator<String> output;
StreamGenerationOutput(Iterator<String> output) {
this.output = output;
}
/** {@inheritDoc} */
@Override
public Iterator<GenerationOutput> iterator() {
return new Iterator<GenerationOutput>() {
/** {@inheritDoc} */
@Override
public boolean hasNext() {
return output.hasNext();
}
/** {@inheritDoc} */
@Override
public GenerationOutput next() {
String json = output.next();
if (json.isEmpty()) {
return new GenerationOutput(null, null, null);
}
return JsonUtils.GSON.fromJson(json, GenerationOutput.class);
}
};
}
/**
* Customizes schema deserialization.
*
* @param output the output iterator
* @return the deserialized {@code StreamGenerationOutput} instance
*/
public static StreamGenerationOutput fromJson(Iterator<String> output) {
return new StreamGenerationOutput(output);
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/huggingface/Token.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.huggingface;
import com.google.gson.annotations.SerializedName;
/** A data class represents text generation schema. */
public class Token {
private int id;
private String text;
@SerializedName("log_prob")
private double logprob;
/**
* Constructs a {@code Token} instance.
*
* @param id the token id
* @param text the text
* @param logprob the log probability
*/
public Token(int id, String text, double logprob) {
this.id = id;
this.text = text;
this.logprob = logprob;
}
/**
* Returns the token id.
*
* @return the token id
*/
public int getId() {
return id;
}
/**
* Returns the token text.
*
* @return the token text
*/
public String getText() {
return text;
}
/**
* Returns the log probability.
*
* @return the log probability
*/
public double getLogprob() {
return logprob;
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/huggingface/package-info.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes for text generation. */
package ai.djl.genai.huggingface;
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/openai/ChatInput.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.openai;
import ai.djl.genai.anthropic.AnthropicInput;
import ai.djl.genai.anthropic.Source;
import ai.djl.genai.anthropic.ToolChoice;
import ai.djl.genai.gemini.GeminiInput;
import ai.djl.genai.gemini.types.Blob;
import ai.djl.genai.gemini.types.FunctionDeclaration;
import ai.djl.genai.gemini.types.GenerationConfig;
import ai.djl.genai.gemini.types.Part;
import ai.djl.genai.gemini.types.Schema;
import ai.djl.genai.gemini.types.ThinkingConfig;
import ai.djl.util.JsonSerializable;
import ai.djl.util.JsonUtils;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.annotations.SerializedName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/** The chat completion style input. */
@SuppressWarnings("serial")
public class ChatInput implements JsonSerializable {
private static final long serialVersionUID = 1L;
private static final Logger logger = LoggerFactory.getLogger(ChatInput.class);
private static final Pattern URL_PATTERN = Pattern.compile("data:([\\w/]+);base64,(.+)");
static final Gson GSON = new Gson();
private transient Type inputType;
private String model;
private List<Message> messages;
@SerializedName("frequency_penalty")
private Float frequencyPenalty;
@SerializedName("logit_bias")
Map<String, Double> logitBias;
private Boolean logprobs;
@SerializedName("topLogprobs")
private Integer topLogprobs;
@SerializedName("max_completion_tokens")
private Integer maxCompletionTokens;
private Integer n;
@SerializedName("presence_penalty")
private Float presencePenalty;
@SerializedName("reasoning_effort")
private String reasoningEffort;
private Integer seed;
private List<String> stop;
private Boolean stream;
private Float temperature;
@SerializedName("top_p")
private Float topP;
private String user;
@SerializedName("ignore_eos")
private Boolean ignoreEos;
private List<Tool> tools;
@SerializedName("tool_choice")
private Object toolChoice;
@SerializedName("extra_body")
private Object extraBody;
ChatInput(Builder builder) {
inputType = builder.inputType;
model = builder.model;
messages = builder.messages;
frequencyPenalty = builder.frequencyPenalty;
logitBias = builder.logitBias;
logprobs = builder.logprobs;
topLogprobs = builder.topLogprobs;
maxCompletionTokens = builder.maxCompletionTokens;
n = builder.n;
presencePenalty = builder.presencePenalty;
reasoningEffort = builder.reasoningEffort;
seed = builder.seed;
stop = builder.stop;
stream = builder.stream;
temperature = builder.temperature;
topP = builder.topP;
user = builder.user;
ignoreEos = builder.ignoreEos;
tools = builder.tools;
toolChoice = builder.toolChoice;
extraBody = builder.extraBody;
}
/** {@inheritDoc} */
@Override
public JsonElement serialize() {
if (inputType == Type.GEMINI) {
return JsonUtils.GSON.toJsonTree(toGemini());
} else if (inputType == Type.ANTHROPIC || inputType == Type.ANTHROPIC_VERTEX) {
return JsonUtils.GSON.toJsonTree(toAnthropic());
}
return GSON.toJsonTree(this);
}
@SuppressWarnings("unchecked")
private GeminiInput toGemini() {
GeminiInput.Builder builder = GeminiInput.builder();
GenerationConfig.Builder config = GenerationConfig.builder();
for (Message message : messages) {
String role = message.getRole();
Object obj = message.getContent();
ai.djl.genai.gemini.types.Content.Builder cb =
ai.djl.genai.gemini.types.Content.builder();
if (obj instanceof String) {
cb.addPart(Part.text((String) obj));
} else {
for (Content content : (List<Content>) obj) {
String type = content.getType();
if ("image_url".equals(type)) {
Content.ImageContent ic = content.getImageUrl();
String url = ic.getUrl();
Matcher m = URL_PATTERN.matcher(url);
if (m.matches()) {
Blob blob =
Blob.builder().data(m.group(2)).mimeType(m.group(1)).build();
cb.addPart(Part.builder().inlineData(blob));
} else {
Blob blob = Blob.builder().data(url).build();
cb.addPart(Part.builder().inlineData(blob));
}
} else if ("file".equals(type)) {
Content.FileContent fc = content.getFile();
cb.addPart(Part.fileData(fc.getFileData(), null));
} else if ("text".equals(type)) {
cb.addPart(Part.text(content.getText()));
} else {
throw new IllegalArgumentException("Unsupported type: " + type);
}
}
}
if ("system".equals(role)) {
config.systemInstruction(cb.build().getParts().get(0).getText());
} else {
builder.addContent(cb.role(role));
}
}
if (tools != null && !tools.isEmpty()) {
for (Tool tool : tools) {
if (!"function".equals(tool.getType())) {
logger.warn("Unsupported tool type: {}", tool.getType());
continue;
}
Function function = tool.getFunction();
Object param = function.getParameters();
Map<String, Object> parameters = (Map<String, Object>) param;
Map<String, Map<String, String>> properties =
(Map<String, Map<String, String>>) parameters.get("properties");
List<String> required = (List<String>) parameters.get("required");
String returnType = ((String) parameters.get("type")).toUpperCase(Locale.ROOT);
Map<String, Schema> prop = new LinkedHashMap<>(); // NOPMD
for (Map.Entry<String, Map<String, String>> entry : properties.entrySet()) {
String t = entry.getValue().get("type").toUpperCase(Locale.ROOT);
Schema schema =
Schema.builder()
.type(ai.djl.genai.gemini.types.Type.valueOf(t))
.build();
prop.put(entry.getKey(), schema);
}
Schema sc =
Schema.builder()
.type(ai.djl.genai.gemini.types.Type.valueOf(returnType))
.required(required)
.properties(prop)
.build();
FunctionDeclaration fd =
FunctionDeclaration.builder()
.name(function.getName())
.description(function.getDescription())
.parameters(sc)
.build();
ai.djl.genai.gemini.types.Tool t =
ai.djl.genai.gemini.types.Tool.builder().addFunctionDeclaration(fd).build();
config.addTool(t);
}
}
config.responseLogprobs(logprobs);
config.logprobs(topLogprobs);
config.frequencyPenalty(frequencyPenalty);
config.presencePenalty(presencePenalty);
config.maxOutputTokens(maxCompletionTokens);
config.seed(seed);
config.stopSequences(stop);
config.candidateCount(n);
config.topP(topP);
config.temperature(temperature);
if ("high".equalsIgnoreCase(reasoningEffort)) {
config.thinkingConfig(ThinkingConfig.builder().includeThoughts(true));
} else if ("medium".equalsIgnoreCase(reasoningEffort)) {
config.thinkingConfig(
ThinkingConfig.builder().includeThoughts(true).thinkingBudget(512));
}
builder.generationConfig(config.build());
return builder.build();
}
@SuppressWarnings("unchecked")
private AnthropicInput toAnthropic() {
AnthropicInput.Builder builder = AnthropicInput.builder();
builder.model(model).stream(stream).stopSequences(stop).temperature(temperature).topP(topP);
if (maxCompletionTokens != null) {
builder.maxTokens(maxCompletionTokens);
}
for (Message message : messages) {
String role = message.getRole();
if ("system".equals(role)) {
builder.systemInstructions((String) message.getContent());
continue;
}
ai.djl.genai.anthropic.Message.Builder mb = ai.djl.genai.anthropic.Message.builder();
mb.role(role);
Object obj = message.getContent();
if (obj instanceof String) {
mb.text((String) obj);
} else {
for (Content content : (List<Content>) obj) {
String type = content.getType();
if ("image_url".equals(type)) {
Content.ImageContent ic = content.getImageUrl();
String url = ic.getUrl();
Matcher m = URL_PATTERN.matcher(url);
if (m.matches()) {
String mimeType = m.group(1);
String data = m.group(2);
ai.djl.genai.anthropic.Content.Builder cb =
ai.djl.genai.anthropic.Content.builder();
cb.type("image")
.source(
Source.builder()
.type("base64")
.mediaType(mimeType)
.data(data));
mb.addContent(cb);
} else {
mb.addContent(ai.djl.genai.anthropic.Content.image(url));
}
} else if ("text".equals(type)) {
mb.addContent(ai.djl.genai.anthropic.Content.text(content.getText()));
} else {
throw new IllegalArgumentException("Unsupported type: " + type);
}
}
}
builder.addMessage(mb);
}
if (tools != null && !tools.isEmpty()) {
for (Tool tool : tools) {
if (!"function".equals(tool.getType())) {
logger.warn("Unsupported tool type: {}", tool.getType());
continue;
}
Function function = tool.getFunction();
Object param = function.getParameters();
ai.djl.genai.anthropic.Tool t =
ai.djl.genai.anthropic.Tool.builder()
.name(function.getName())
.description(function.getDescription())
.inputSchema(param)
.build();
builder.addTool(t);
}
}
if ("auto".equals(toolChoice)) {
builder.toolChoice(ToolChoice.builder().type("auto").build());
}
if (inputType == Type.ANTHROPIC_VERTEX) {
builder.anthropicVersion("vertex-2023-10-16");
} else if (inputType == Type.ANTHROPIC) {
builder.anthropicVersion("2023-10-16");
}
return builder.build();
}
/**
* Returns the model.
*
* @return the model
*/
public String getModel() {
return model;
}
/**
* Returns the messages.
*
* @return the messages
*/
public List<Message> getMessages() {
return messages;
}
/**
* Returns the frequency penalty.
*
* @return the frequency penalty
*/
public Float getFrequencyPenalty() {
return frequencyPenalty;
}
/**
* Returns the logit bias.
*
* @return the logit bias
*/
public Map<String, Double> getLogitBias() {
return logitBias;
}
/**
* Returns the logprobs.
*
* @return the logprobs
*/
public Boolean getLogprobs() {
return logprobs;
}
/**
* Returns the top logprobs.
*
* @return the top logprobs
*/
public Integer getTopLogprobs() {
return topLogprobs;
}
/**
* Returns the max tokens.
*
* @return the max tokens
*/
public Integer getMaxCompletionTokens() {
return maxCompletionTokens;
}
/**
* Returns the N.
*
* @return the n
*/
public Integer getN() {
return n;
}
/**
* Returns the presence penalty.
*
* @return the presence penalty
*/
public Float getPresencePenalty() {
return presencePenalty;
}
/**
* Returns the reasoning effort.
*
* @return the reasoning effort
*/
public String getReasoningEffort() {
return reasoningEffort;
}
/**
* Returns the seed.
*
* @return the seed
*/
public Integer getSeed() {
return seed;
}
/**
* Returns the stop char sequences.
*
* @return the stop char sequences
*/
public List<String> getStop() {
return stop;
}
/**
* Returns true if {@code stream} is enabled.
*
* @return true if {@code stream} is enabled
*/
public Boolean getStream() {
return stream;
}
/**
* Returns the temperature.
*
* @return the temperature
*/
public Float getTemperature() {
return temperature;
}
/**
* Returns the top p value.
*
* @return the top p value
*/
public Float getTopP() {
return topP;
}
/**
* Returns the user.
*
* @return the user
*/
public String getUser() {
return user;
}
/**
* Returns if ignore eos.
*
* @return if ignore eos
*/
public Boolean getIgnoreEos() {
return ignoreEos;
}
/**
* Returns the {@link Tool}s.
*
* @return the {@code Tool}s
*/
public List<Tool> getTools() {
return tools;
}
/**
* Returns the tool choice.
*
* @return the tool choice
*/
public Object getToolChoice() {
return toolChoice;
}
/**
* Returns the extra body.
*
* @return the extra body
*/
public Object getExtraBody() {
return extraBody;
}
/**
* Creates a builder to build a {@code ChatInput}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/**
* Creates a builder with the specified model.
*
* @param model the model
* @return a new builder
*/
public static Builder model(String model) {
return builder().model(model);
}
/**
* Creates a builder with the specified text content.
*
* @param text the text
* @return a new builder
*/
public static Builder text(String text) {
return builder().addMessage(Message.text(text));
}
/**
* Creates a builder with the specified image url.
*
* @param imageUrl the image url
* @return a new builder
*/
public static Builder image(String imageUrl) {
return builder().addImage(imageUrl);
}
/**
* Creates a builder with the specified image data.
*
* @param image the image binary data
* @param mimeType the mime type of the image
* @return a new builder
*/
public static Builder image(byte[] image, String mimeType) {
return builder().addImage(image, mimeType);
}
/**
* Creates a builder with the specified file data.
*
* @param id the file id
* @param data the file data
* @param fileName the file name
* @return a new builder
*/
public static Builder file(String id, byte[] data, String fileName) {
return builder().addFile(id, data, fileName);
}
/** The builder for {@code ChatInput}. */
public static final class Builder {
Type inputType = Type.CHAT_COMPLETION;
String model;
List<Message> messages = new ArrayList<>();
Float frequencyPenalty;
Map<String, Double> logitBias;
Boolean logprobs;
Integer topLogprobs;
Integer maxCompletionTokens;
Integer n;
Float presencePenalty;
String reasoningEffort;
Integer seed;
List<String> stop;
Boolean stream;
Float temperature;
Float topP;
String user;
Boolean ignoreEos;
List<Tool> tools;
Object toolChoice;
Object extraBody;
/**
* Sets the input type.
*
* @param inputType the model
* @return the builder
*/
public Builder inputType(Type inputType) {
this.inputType = inputType;
return this;
}
/**
* Sets the model.
*
* @param model the model
* @return the builder
*/
public Builder model(String model) {
this.model = model;
return this;
}
/**
* Sets the messages.
*
* @param messages the messages
* @return the builder
*/
public Builder messages(List<Message> messages) {
this.messages.clear();
this.messages.addAll(messages);
return this;
}
/**
* Adds the message.
*
* @param message the message
* @return the builder
*/
public Builder addMessage(Message message) {
this.messages.add(message);
return this;
}
/**
* Adds the message.
*
* @param message the message
* @return the builder
*/
public Builder addMessage(Message.Builder message) {
return addMessage(message.build());
}
/**
* Adds the text content.
*
* @param text the text
* @return the builder
*/
public Builder addText(String text) {
this.messages.add(Message.text(text).build());
return this;
}
/**
* Adds the image content.
*
* @param imageUrl the image url
* @return the builder
*/
public Builder addImage(String imageUrl) {
this.messages.add(Message.image(imageUrl).build());
return this;
}
/**
* Adds the image content.
*
* @param image the image data
* @param mimeType the mime type of the image
* @return the builder
*/
public Builder addImage(byte[] image, String mimeType) {
this.messages.add(Message.image(image, mimeType).build());
return this;
}
/**
* Adds the file content.
*
* @param data the file data
* @param fileName the mime type
* @return the builder
*/
public Builder addFile(String id, byte[] data, String fileName) {
this.messages.add(Message.file(id, data, fileName).build());
return this;
}
/**
* Sets the frequency penalty.
*
* @param frequencyPenalty the frequency penalty
* @return the builder
*/
public Builder frequencyPenalty(Float frequencyPenalty) {
this.frequencyPenalty = frequencyPenalty;
return this;
}
/**
* Sets the logit bias.
*
* @param logitBias the logit bias
* @return the builder
*/
public Builder logitBias(Map<String, Double> logitBias) {
this.logitBias = logitBias;
return this;
}
/**
* Sets the logprobs.
*
* @param logprobs the logprobs
* @return the builder
*/
public Builder logprobs(Boolean logprobs) {
this.logprobs = logprobs;
return this;
}
/**
* Sets the top logprobs.
*
* @param topLogprobs the top logprobs
* @return the builder
*/
public Builder topLogprobs(Integer topLogprobs) {
this.topLogprobs = topLogprobs;
return this;
}
/**
* Sets the max tokens.
*
* @param maxCompletionTokens the max tokens
* @return the builder
*/
public Builder maxCompletionTokens(Integer maxCompletionTokens) {
this.maxCompletionTokens = maxCompletionTokens;
return this;
}
/**
* Sets the N.
*
* @param n the N
* @return the builder
*/
public Builder n(Integer n) {
this.n = n;
return this;
}
/**
* Sets the presence penalty.
*
* @param presencePenalty the presence penalty
* @return the builder
*/
public Builder presencePenalty(Float presencePenalty) {
this.presencePenalty = presencePenalty;
return this;
}
/**
* Sets the reasoning effort.
*
* @param reasoningEffort the reasoning effort
* @return the builder
*/
public Builder reasoningEffort(String reasoningEffort) {
this.reasoningEffort = reasoningEffort;
return this;
}
/**
* Sets the seed.
*
* @param seed the seed
* @return the builder
*/
public Builder seed(Integer seed) {
this.seed = seed;
return this;
}
/**
* Sets the stop sequences.
*
* @param stop the stop sequences
* @return the builder
*/
public Builder stop(List<String> stop) {
this.stop = stop;
return this;
}
/**
* Sets stream mode.
*
* @param stream the stream mode
* @return the builder
*/
public Builder stream(Boolean stream) {
this.stream = stream;
return this;
}
/**
* Sets the temperature.
*
* @param temperature the temperature
* @return the builder
*/
public Builder temperature(Float temperature) {
this.temperature = temperature;
return this;
}
/**
* Sets the topP.
*
* @param topP the top p
* @return the builder
*/
public Builder topP(Float topP) {
this.topP = topP;
return this;
}
/**
* Sets the user.
*
* @param user the user
* @return the builder
*/
public Builder user(String user) {
this.user = user;
return this;
}
/**
* Sets if ignore eos.
*
* @param ignoreEos if ignore eos
* @return the builder
*/
public Builder ignoreEos(Boolean ignoreEos) {
this.ignoreEos = ignoreEos;
return this;
}
/**
* Sets the tools.
*
* @param tools the tools
* @return the tools
*/
public Builder tools(List<Tool> tools) {
this.tools = tools;
return this;
}
/**
* Sets the tools.
*
* @param tools the tools
* @return the tools
*/
public Builder tools(Tool... tools) {
return tools(Arrays.asList(tools));
}
/**
* Sets the tool choice mode.
*
* @param toolChoice the tool choice mode
* @return the builder
*/
public Builder toolChoice(String toolChoice) {
this.toolChoice = toolChoice;
return this;
}
/**
* Sets the tool choice.
*
* @param toolChoice the tool choice
* @return the builder
*/
public Builder toolChoice(Tool toolChoice) {
this.toolChoice = toolChoice;
return this;
}
/**
* Sets the extra body.
*
* @param extraBody the extra body
* @return the builder
*/
public Builder extraBody(Object extraBody) {
this.extraBody = extraBody;
return this;
}
/**
* Builds the {@code ChatInput} instance.
*
* @return the {@code ChatInput} instance
*/
public ChatInput build() {
return new ChatInput(this);
}
}
/** The target model server input schema type. */
public enum Type {
CHAT_COMPLETION,
GEMINI,
ANTHROPIC,
ANTHROPIC_VERTEX,
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/openai/ChatOutput.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.openai;
import ai.djl.genai.anthropic.AnthropicOutput;
import ai.djl.genai.gemini.GeminiOutput;
import ai.djl.genai.gemini.types.Blob;
import ai.djl.genai.gemini.types.Candidate;
import ai.djl.genai.gemini.types.FileData;
import ai.djl.genai.gemini.types.FinishReason;
import ai.djl.genai.gemini.types.FunctionCall;
import ai.djl.genai.gemini.types.LogprobsResult;
import ai.djl.genai.gemini.types.LogprobsResultCandidate;
import ai.djl.genai.gemini.types.LogprobsResultTopCandidates;
import ai.djl.genai.gemini.types.Part;
import ai.djl.genai.gemini.types.UsageMetadata;
import ai.djl.util.JsonUtils;
import com.google.gson.JsonObject;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/** The chat completion style output. */
public class ChatOutput {
private String id;
private String object;
private Long created;
private List<Choice> choices;
private String model;
private Usage usage;
ChatOutput() {}
ChatOutput(
String id,
String object,
Long created,
List<Choice> choices,
String model,
Usage usage) {
this.id = id;
this.object = object;
this.created = created;
this.choices = choices;
this.model = model;
this.usage = usage;
}
/**
* Returns the id.
*
* @return the id
*/
public String getId() {
return id;
}
/**
* Returns the object.
*
* @return the object
*/
public String getObject() {
return object;
}
/**
* Returns the created time.
*
* @return the created time
*/
public Long getCreated() {
return created;
}
/**
* Returns the choices.
*
* @return the choices
*/
public List<Choice> getChoices() {
return choices;
}
/**
* Returns the model name.
*
* @return the model name
*/
public String getModel() {
return model;
}
/**
* Returns the usage.
*
* @return the usage
*/
public Usage getUsage() {
return usage;
}
/**
* Returns the aggregated text output.
*
* @return the aggregated text output
*/
@SuppressWarnings("unchecked")
public String getTextOutput() {
if (choices == null || choices.isEmpty()) {
return "";
}
Message message = choices.get(0).getMessage();
if (message == null) {
message = choices.get(0).getDelta();
}
if (message == null) {
return "";
}
Object content = message.getContent();
if (content instanceof String) {
return (String) content;
} else if (content instanceof List) {
List<Content> contents = (List<Content>) content;
StringBuilder sb = new StringBuilder();
for (Content part : contents) {
if ("text".equals(part.getType())) {
sb.append(part.getText());
}
}
return sb.toString();
}
return "";
}
/**
* Returns the {@link ToolCall} response.
*
* @return the {@link ToolCall} response
*/
public ToolCall getToolCall() {
List<ToolCall> list = getToolCalls();
if (list.isEmpty()) {
return null;
}
return list.get(0);
}
/**
* Returns a list of {@link ToolCall} response.
*
* @return a list of {@link ToolCall} response
*/
public List<ToolCall> getToolCalls() {
List<ToolCall> ret = new ArrayList<>();
if (choices != null && !choices.isEmpty()) {
for (Choice choice : choices) {
Message message = choice.getMessage();
if (message != null) {
List<ToolCall> toolsCalls = message.getToolCalls();
if (toolsCalls != null && !toolsCalls.isEmpty()) {
ret.add(toolsCalls.get(0));
}
}
}
}
return ret;
}
/**
* Returns the per token log probability.
*
* @return the per token log probability
*/
public List<Logprob> getLogprobs() {
if (choices != null && !choices.isEmpty()) {
List<Logprob> logprobs = choices.get(0).getLogprobs();
if (logprobs != null) {
return logprobs;
}
}
return Collections.emptyList();
}
/**
* Customizes schema deserialization.
*
* @param json the output json string
* @return the deserialized {@code ChatOutput} instance
*/
public static ChatOutput fromJson(String json) {
JsonObject element = JsonUtils.GSON.fromJson(json, JsonObject.class);
if (element.has("candidates")) {
GeminiOutput gemini = JsonUtils.GSON.fromJson(element, GeminiOutput.class);
return fromGemini(gemini);
} else if (element.has("type")) {
AnthropicOutput ant = JsonUtils.GSON.fromJson(element, AnthropicOutput.class);
return fromAnthropic(ant);
}
return ChatInput.GSON.fromJson(element, ChatOutput.class);
}
static ChatOutput fromGemini(GeminiOutput gemini) {
String id = gemini.getResponseId();
String create = gemini.getCreateTime();
Long time = null;
if (create != null) {
time = Instant.parse(create).toEpochMilli();
}
Usage usage = null;
UsageMetadata um = gemini.getUsageMetadata();
if (um != null) {
usage =
new Usage(
um.getCandidatesTokenCount(),
um.getPromptTokenCount(),
um.getTotalTokenCount());
}
String model = gemini.getModelVersion();
List<Candidate> candidates = gemini.getCandidates();
List<Choice> choices = new ArrayList<>(candidates.size());
for (Candidate candidate : candidates) {
ai.djl.genai.gemini.types.Content content = candidate.getContent();
String role = content.getRole();
List<Part> parts = content.getParts();
Message.Builder message = Message.builder().role(role);
if (parts != null) {
for (Part part : parts) {
String text = part.getText();
Blob inline = part.getInlineData();
FileData fileData = part.getFileData();
FunctionCall func = part.getFunctionCall();
if (text != null) {
message.addText(text);
} else if (inline != null) {
String url = "data:" + inline.getMimeType() + ";base64," + inline.getData();
message.addContent(new Content(new Content.ImageContent(url)));
} else if (fileData != null) {
String fileUri = fileData.getFileUri();
String fileName = fileData.getDisplayName();
message.addContent(
new Content(new Content.FileContent(fileUri, null, fileName)));
} else if (func != null) {
String callId = func.getId();
String args = JsonUtils.GSON_COMPACT.toJson(func.getArgs());
ToolCall.Function function = new ToolCall.Function(args, func.getName());
ToolCall toolCall = new ToolCall(callId, "function", function);
message.toolCalls(toolCall).toolCallId(callId);
}
}
}
List<Logprob> logprobs = null;
LogprobsResult lr = candidate.getLogprobsResult();
if (lr != null) {
List<LogprobsResultCandidate> lrcs = lr.getChosenCandidates();
List<LogprobsResultTopCandidates> tlcs = lr.getTopCandidates();
logprobs = new ArrayList<>();
int index = 0;
for (LogprobsResultCandidate lrc : lrcs) {
List<TopLogprob> topLogprobs = null;
if (tlcs != null && index < tlcs.size()) {
topLogprobs = new ArrayList<>();
for (LogprobsResultCandidate tlc : tlcs.get(index).getCandidates()) {
topLogprobs.add(
new TopLogprob(tlc.getToken(), tlc.getLogProbability(), null));
}
}
logprobs.add(
new Logprob(
lrc.getToken(), lrc.getLogProbability(), null, topLogprobs));
++index;
}
}
FinishReason reason = candidate.getFinishReason();
Choice choice =
new Choice(
candidate.getIndex(), message.build(), logprobs, null, reason.name());
choices.add(choice);
}
return new ChatOutput(id, "chat.completion", time, choices, model, usage);
}
static ChatOutput fromAnthropic(AnthropicOutput ant) {
String id = ant.getId();
Usage usage = null;
ai.djl.genai.anthropic.Usage um = ant.getUsage();
if (um != null) {
usage =
new Usage(
um.getOutputTokens(),
um.getInputTokens(),
um.getOutputTokens() + um.getInputTokens());
}
String model = ant.getModel();
List<Choice> choices = new ArrayList<>();
int index = 0;
for (ai.djl.genai.anthropic.Content content : ant.getContent()) {
Message.Builder message = Message.builder().role("model");
String type = content.getType();
if ("text".equals(type)) {
message.text(content.getText());
} else if ("tool_use".equals(type)) {
String callId = content.getId();
String args = JsonUtils.GSON_COMPACT.toJson(content.getInput());
ToolCall.Function function = new ToolCall.Function(args, content.getName());
ToolCall toolCall = new ToolCall(callId, "function", function);
message.toolCalls(toolCall).toolCallId(callId);
} else {
// tool_result, server_tool_use, web_search_tool_result, thinking, redacted_thinking
throw new IllegalArgumentException("Unsupported response type: " + type);
}
Integer idx = content.getIndex();
if (idx != null) {
index = idx;
} else {
index++;
}
Choice choice = new Choice(index, message.build(), null, null, ant.getStopReason());
choices.add(choice);
}
return new ChatOutput(
id, "chat.completion", System.currentTimeMillis(), choices, model, usage);
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/openai/Choice.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.openai;
import com.google.gson.annotations.SerializedName;
import java.util.List;
/** A data class represents chat completion schema. */
@SuppressWarnings("MissingJavadocMethod")
public class Choice {
private Integer index;
private Message message;
private List<Logprob> logprobs;
private Message delta;
@SerializedName("finish_reason")
private String finishReason;
public Choice(
Integer index,
Message message,
List<Logprob> logprobs,
Message delta,
String finishReason) {
this.index = index;
this.message = message;
this.logprobs = logprobs;
this.delta = delta;
this.finishReason = finishReason;
}
public Integer getIndex() {
return index;
}
public Message getMessage() {
return message;
}
public Message getDelta() {
return delta;
}
public List<Logprob> getLogprobs() {
return logprobs;
}
public String getFinishReason() {
return finishReason;
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/openai/Content.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.openai;
import com.google.gson.annotations.SerializedName;
import java.util.Base64;
/** A data class represents chat completion schema. */
@SuppressWarnings("MissingJavadocMethod")
public class Content {
private String type;
private String text;
@SerializedName("image_url")
private ImageContent imageUrl;
private FileContent file;
public Content(String text) {
this.type = "text";
this.text = text;
}
public Content(ImageContent imageUrl) {
this.type = "image_url";
this.imageUrl = imageUrl;
}
public Content(FileContent file) {
this.type = "file";
this.file = file;
}
public String getType() {
return type;
}
public String getText() {
return text;
}
public ImageContent getImageUrl() {
return imageUrl;
}
public FileContent getFile() {
return file;
}
public static Content text(String text) {
return new Content(text);
}
public static Content image(String imageUrl) {
return new Content(new ImageContent(imageUrl));
}
public static Content image(byte[] image, String mimeType) {
String data = "data:" + mimeType + ";base64," + Base64.getEncoder().encodeToString(image);
return new Content(new ImageContent(data));
}
public static Content file(String id, byte[] data, String fileName) {
String encoded = Base64.getEncoder().encodeToString(data);
return new Content(new FileContent(id, encoded, fileName));
}
/** A data class represents chat completion schema. */
public static final class ImageContent {
private String url;
public ImageContent(String url) {
this.url = url;
}
public String getUrl() {
return url;
}
}
/** A data class represents chat completion schema. */
public static final class FileContent {
@SerializedName("file_data")
private String fileData;
@SerializedName("file_id")
private String fileId;
private String filename;
FileContent(String fileData, String fileId, String filename) {
this.fileData = fileData;
this.fileId = fileId;
this.filename = filename;
}
public String getFileData() {
return fileData;
}
public String getFileId() {
return fileId;
}
public String getFilename() {
return filename;
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/openai/Function.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.openai;
import ai.djl.genai.FunctionUtils;
import ai.djl.util.JsonUtils;
import ai.djl.util.Pair;
import ai.djl.util.PairList;
import com.google.gson.JsonObject;
import java.io.Serializable;
import java.lang.reflect.Method;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/** A data class represents chat completion schema. */
@SuppressWarnings({"MissingJavadocMethod", "serial"})
public class Function implements Serializable {
private static final long serialVersionUID = 1L;
private String name;
private String description;
private Object parameters;
public Function(Builder builder) {
name = builder.name;
description = builder.description;
parameters = builder.parameters;
}
public String getName() {
return name;
}
public String getDescription() {
return description;
}
public Object getParameters() {
return parameters;
}
/**
* Creates a builder to build a {@code Function}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
public static Builder function(Method method) {
Map<String, Object> parameters = new ConcurrentHashMap<>();
PairList<String, String> pairs = FunctionUtils.getParameters(method);
Map<String, Map<String, String>> properties = new LinkedHashMap<>(); // NOPMD
for (Pair<String, String> pair : pairs) {
Map<String, String> prop = new ConcurrentHashMap<>();
prop.put("type", pair.getValue());
properties.put(pair.getKey(), prop);
}
parameters.put("type", "object");
parameters.put("properties", properties);
parameters.put("required", pairs.keys());
return builder().name(method.getName()).parameters(parameters);
}
/** The builder for {@code Function}. */
public static final class Builder {
String name;
String description;
Object parameters;
public Builder name(String name) {
this.name = name;
return this;
}
public Builder description(String description) {
this.description = description;
return this;
}
public Builder parameters(Object parameters) {
this.parameters = parameters;
return this;
}
public Builder parameters(String parameters) {
this.parameters = JsonUtils.GSON.fromJson(parameters, JsonObject.class);
return this;
}
public Function build() {
return new Function(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/openai/Logprob.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.openai;
import com.google.gson.annotations.SerializedName;
import java.util.List;
/** A data class represents chat completion schema. */
@SuppressWarnings("MissingJavadocMethod")
public class Logprob {
private String token;
private float logprob;
private List<Byte> bytes;
@SerializedName("top_logprobs")
private List<TopLogprob> topLogprobs;
public Logprob(String token, float logprob, List<Byte> bytes, List<TopLogprob> topLogprobs) {
this.token = token;
this.logprob = logprob;
this.bytes = bytes;
this.topLogprobs = topLogprobs;
}
public String getToken() {
return token;
}
public float getLogprob() {
return logprob;
}
public List<Byte> getBytes() {
return bytes;
}
public List<TopLogprob> getTopLogprobs() {
return topLogprobs;
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/openai/Message.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.openai;
import com.google.gson.annotations.SerializedName;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/** A data class represents chat completion schema. */
@SuppressWarnings({"MissingJavadocMethod", "serial"})
public class Message implements Serializable {
private static final long serialVersionUID = 1L;
private String role;
private Object content;
private String name;
@SerializedName("tool_calls")
private List<ToolCall> toolCalls;
@SerializedName("tool_call_id")
private String toolCallId;
Message(Builder builder) {
role = builder.role;
if (!builder.contents.isEmpty()) {
content = builder.contents;
} else {
content = builder.text;
}
name = builder.name;
toolCalls = builder.toolCalls;
toolCallId = builder.toolCallId;
}
public String getRole() {
return role;
}
public Object getContent() {
return content;
}
public String getName() {
return name;
}
public List<ToolCall> getToolCalls() {
return toolCalls;
}
public String getToolCallId() {
return toolCallId;
}
public static Builder text(String text) {
return text(text, "user");
}
public static Builder text(String text, String role) {
return builder().text(text).role(role);
}
public static Builder image(String imageUrl) {
return image(imageUrl, "user");
}
public static Builder image(byte[] image, String mimeType) {
return image(image, mimeType, "user");
}
public static Builder image(byte[] image, String mimeType, String role) {
return builder().addImage(image, mimeType).role(role);
}
public static Builder image(String imageUrl, String role) {
return builder().addImage(imageUrl).role(role);
}
public static Builder file(String id, byte[] data, String fileName) {
return file(id, data, fileName, "user");
}
public static Builder file(String id, byte[] data, String fileName, String role) {
return builder().addFile(id, data, fileName).role(role);
}
/**
* Creates a builder to build a {@code Message}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** The builder for {@code Message}. */
public static final class Builder {
String role;
String text;
List<Content> contents = new ArrayList<>();
String name;
List<ToolCall> toolCalls;
String toolCallId;
public Builder role(String role) {
this.role = role;
return this;
}
public Builder text(String text) {
this.text = text;
return this;
}
public Builder name(String name) {
this.name = name;
return this;
}
public Builder toolCalls(List<ToolCall> toolCalls) {
this.toolCalls = toolCalls;
return this;
}
public Builder toolCalls(ToolCall... toolCalls) {
return toolCalls(Arrays.asList(toolCalls));
}
public Builder toolCallId(String toolCallId) {
this.toolCallId = toolCallId;
return this;
}
public Builder addText(String text) {
contents.add(Content.text(text));
return this;
}
public Builder addImage(String imageUrl) {
contents.add(Content.image(imageUrl));
return this;
}
public Builder addImage(byte[] image, String mimeType) {
contents.add(Content.image(image, mimeType));
return this;
}
public Builder addFile(String id, byte[] data, String fileName) {
contents.add(Content.file(id, data, fileName));
return this;
}
public Builder addContent(Content content) {
contents.add(content);
return this;
}
public Message build() {
return new Message(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/openai/StreamChatOutput.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.openai;
import ai.djl.genai.anthropic.AnthropicOutput;
import ai.djl.genai.anthropic.StreamAnthropicOutput;
import ai.djl.util.JsonUtils;
import com.google.gson.JsonObject;
import java.util.Iterator;
/** A stream version of {@link ChatOutput}. */
public class StreamChatOutput implements Iterable<ChatOutput> {
private transient Iterator<String> output;
StreamChatOutput(Iterator<String> output) {
this.output = output;
}
/** {@inheritDoc} */
@Override
public Iterator<ChatOutput> iterator() {
return new Iterator<ChatOutput>() {
/** {@inheritDoc} */
@Override
public boolean hasNext() {
return output.hasNext();
}
/** {@inheritDoc} */
@Override
public ChatOutput next() {
String json = output.next();
if (json.isEmpty() || "[DONE]".equals(json)) {
return new ChatOutput();
}
JsonObject element = JsonUtils.GSON.fromJson(json, JsonObject.class);
if (element.has("type")) {
AnthropicOutput.Builder builder = AnthropicOutput.builder();
StreamAnthropicOutput.next(builder, element, output);
AnthropicOutput ant = builder.build();
return ChatOutput.fromAnthropic(ant);
}
return ChatOutput.fromJson(json);
}
};
}
/**
* Customizes schema deserialization.
*
* @param output the output iterator
* @return the deserialized {@code StreamChatOutput} instance
*/
public static StreamChatOutput fromJson(Iterator<String> output) {
return new StreamChatOutput(output);
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/openai/Tool.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.openai;
import java.io.Serializable;
/** A data class represents chat completion schema. */
@SuppressWarnings("MissingJavadocMethod")
public class Tool implements Serializable {
private static final long serialVersionUID = 1L;
private String type;
private Function function;
Tool(Builder builder) {
type = builder.type;
function = builder.function;
}
public String getType() {
return type;
}
public Function getFunction() {
return function;
}
/**
* Creates a new function {@code Tool}.
*
* @param function the function for the tool
* @return a new {@code Tool} instance
*/
public static Tool of(Function function) {
return builder().type("function").function(function).build();
}
/**
* Creates a new function {@code Tool}.
*
* @param function the function for the tool
* @return a new {@code Tool} instance
*/
public static Tool of(Function.Builder function) {
return builder().type("function").function(function).build();
}
/**
* Creates a builder to build a {@code Tool}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** The builder for {@code Tool}. */
public static final class Builder {
String type;
Function function;
public Builder type(String type) {
this.type = type;
return this;
}
public Builder function(Function function) {
this.function = function;
return this;
}
public Builder function(Function.Builder function) {
return function(function.build());
}
public Tool build() {
return new Tool(this);
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/openai/ToolCall.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.openai;
import java.io.Serializable;
/** A data class represents chat completion schema. */
@SuppressWarnings("MissingJavadocMethod")
public class ToolCall implements Serializable {
private static final long serialVersionUID = 1L;
private String id;
private String type;
private Function function;
public ToolCall(String id, String type, Function function) {
this.id = id;
this.type = type;
this.function = function;
}
public String getId() {
return id;
}
public String getType() {
return type;
}
public Function getFunction() {
return function;
}
/** A data class represents chat completion schema. */
public static final class Function implements Serializable {
private static final long serialVersionUID = 1L;
private String arguments;
private String name;
public Function(String arguments, String name) {
this.arguments = arguments;
this.name = name;
}
public String getArguments() {
return arguments;
}
public String getName() {
return name;
}
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/openai/TopLogprob.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.openai;
import java.util.List;
/** A data class represents chat completion schema. */
@SuppressWarnings("MissingJavadocMethod")
public class TopLogprob {
private String token;
private float logprob;
private List<Byte> bytes;
public TopLogprob(String token, float logprob, List<Byte> bytes) {
this.token = token;
this.logprob = logprob;
this.bytes = bytes;
}
public String getToken() {
return token;
}
public float getLogprob() {
return logprob;
}
public List<Byte> getBytes() {
return bytes;
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/openai/Usage.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.genai.openai;
import com.google.gson.annotations.SerializedName;
/** A data class represents chat completion schema. */
@SuppressWarnings("MissingJavadocMethod")
public class Usage {
@SerializedName("completion_tokens")
private int completionTokens;
@SerializedName("prompt_tokens")
private int promptTokens;
@SerializedName("total_tokens")
private int totalTokens;
public Usage(int completionTokens, int promptTokens, int totalTokens) {
this.completionTokens = completionTokens;
this.promptTokens = promptTokens;
this.totalTokens = totalTokens;
}
public int getCompletionTokens() {
return completionTokens;
}
public int getPromptTokens() {
return promptTokens;
}
public int getTotalTokens() {
return totalTokens;
}
}
|
0
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai
|
java-sources/ai/djl/genai/genai/0.34.0/ai/djl/genai/openai/package-info.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes for chat completion. */
package ai.djl.genai.openai;
|
0
|
java-sources/ai/djl/hadoop/hadoop/0.34.0/ai/djl/hadoop
|
java-sources/ai/djl/hadoop/hadoop/0.34.0/ai/djl/hadoop/hdfs/HdfsRepository.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.hadoop.hdfs;
import ai.djl.Application;
import ai.djl.repository.AbstractRepository;
import ai.djl.repository.Artifact;
import ai.djl.repository.FilenameUtils;
import ai.djl.repository.MRL;
import ai.djl.repository.Metadata;
import ai.djl.repository.Repository;
import ai.djl.repository.zoo.DefaultModelZoo;
import ai.djl.util.Progress;
import ai.djl.util.Utils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* A {@code HdfsRepository} is a {@link Repository} located on a Hadoop HDFS.
*
* @see Repository
*/
public class HdfsRepository extends AbstractRepository {
private static final Logger logger = LoggerFactory.getLogger(HdfsRepository.class);
private Configuration config;
private String prefix;
private String artifactId;
private String modelName;
private Metadata metadata;
private boolean resolved;
private boolean isDirectory;
HdfsRepository(String name, URI uri, Configuration config) {
super(name, uri);
this.config = config;
prefix = uri.getPath();
String fileName = Paths.get(prefix).toFile().getName();
isDirectory = !FilenameUtils.isArchiveFile(fileName);
if (!isDirectory) {
fileName = FilenameUtils.getNamePart(fileName);
}
modelName = arguments.get("model_name");
artifactId = arguments.get("artifact_id");
if (artifactId == null) {
artifactId = fileName;
}
if (modelName == null) {
modelName = artifactId;
}
if (prefix.isEmpty()) {
prefix = "/";
}
try {
this.uri =
new URI(
uri.getScheme(),
uri.getUserInfo(),
uri.getHost(),
uri.getPort(),
null,
null,
null);
} catch (URISyntaxException e) {
throw new AssertionError(e);
}
}
/** {@inheritDoc} */
@Override
public boolean isRemote() {
return true;
}
/** {@inheritDoc} */
@Override
public Metadata locate(MRL mrl) throws IOException {
return getMetadata();
}
/** {@inheritDoc} */
@Override
public Artifact resolve(MRL mrl, Map<String, String> filter) throws IOException {
Metadata m = locate(mrl);
if (m == null) {
return null;
}
List<Artifact> artifacts = m.getArtifacts();
if (artifacts.isEmpty()) {
return null;
}
return artifacts.get(0);
}
/** {@inheritDoc} */
@Override
protected void download(Path tmp, URI baseUri, Artifact.Item item, Progress progress)
throws IOException {
FileSystem fs = FileSystem.get(uri, config);
org.apache.hadoop.fs.Path path = new org.apache.hadoop.fs.Path(item.getUri());
logger.debug("Downloading artifact: {} ...", path);
try (InputStream is = fs.open(path)) {
save(is, tmp, item, progress);
}
}
/** {@inheritDoc} */
@Override
public List<MRL> getResources() {
try {
Metadata m = getMetadata();
if (m != null && !m.getArtifacts().isEmpty()) {
MRL mrl = model(Application.UNDEFINED, m.getGroupId(), m.getArtifactId());
return Collections.singletonList(mrl);
}
} catch (IOException e) {
logger.warn("Failed to scan: {}", uri, e);
}
return Collections.emptyList();
}
private synchronized Metadata getMetadata() throws IOException {
if (resolved) {
return metadata;
}
resolved = true;
Artifact artifact = listFiles();
if (artifact == null) {
logger.debug("No object found in hdfs: {}", uri);
return null;
}
metadata = new Metadata.MatchAllMetadata();
String hash = Utils.hash(uri.resolve(prefix).toString());
MRL mrl = model(Application.UNDEFINED, DefaultModelZoo.GROUP_ID, hash);
metadata.setRepositoryUri(mrl.toURI());
metadata.setArtifactId(artifactId);
metadata.setArtifacts(Collections.singletonList(artifact));
return metadata;
}
private Artifact listFiles() throws IOException {
FileSystem fs = FileSystem.get(uri, config);
FileStatus[] status = fs.listStatus(new org.apache.hadoop.fs.Path(prefix));
if (status == null || status.length == 0) {
return null;
}
Artifact artifact = new Artifact();
artifact.setName(modelName);
artifact.getArguments().putAll(arguments);
Map<String, Artifact.Item> files = new ConcurrentHashMap<>();
artifact.setFiles(files);
if (isDirectory) {
Path fullPath = Paths.get(prefix);
for (FileStatus st : status) {
Artifact.Item item = new Artifact.Item();
String key = st.getPath().getName();
if (!key.endsWith("/")) {
item.setUri(fullPath.resolve(key).toString());
item.setSize(st.getLen());
item.setArtifact(artifact);
if ("dir".equals(item.getType())) {
item.setName(""); // avoid creating extra folder
}
files.put(key, item);
}
}
} else {
Artifact.Item item = new Artifact.Item();
item.setUri(prefix);
item.setName(""); // avoid creating extra folder
item.setArtifact(artifact);
item.setSize(status[0].getLen());
files.put(artifactId, item);
}
return artifact;
}
}
|
0
|
java-sources/ai/djl/hadoop/hadoop/0.34.0/ai/djl/hadoop
|
java-sources/ai/djl/hadoop/hadoop/0.34.0/ai/djl/hadoop/hdfs/HdfsRepositoryFactory.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.hadoop.hdfs;
import ai.djl.repository.Repository;
import ai.djl.repository.RepositoryFactory;
import org.apache.hadoop.conf.Configuration;
import java.net.URI;
import java.util.Collections;
import java.util.Set;
/** A class responsible to create {@link HdfsRepository} instances. */
public class HdfsRepositoryFactory implements RepositoryFactory {
private Configuration config;
/** Creates an {@code HdfsRepositoryFactory} instance with default {@code Configuration}. */
public HdfsRepositoryFactory() {
this(new Configuration());
}
/**
* Creates an {@code HdfsRepositoryFactory} instance with the specified {@code Configuration}.
*
* @param config the {@code Configuration}
*/
public HdfsRepositoryFactory(Configuration config) {
this.config = config;
}
/** {@inheritDoc} */
@Override
public Repository newInstance(String name, URI uri) {
String scheme = uri.getScheme();
if (!"hdfs".equalsIgnoreCase(scheme)) {
throw new IllegalArgumentException("Invalid hdfs url: " + uri);
}
return new HdfsRepository(name, uri, config);
}
/** {@inheritDoc} */
@Override
public Set<String> getSupportedScheme() {
return Collections.singleton("hdfs");
}
}
|
0
|
java-sources/ai/djl/hadoop/hadoop/0.34.0/ai/djl/hadoop
|
java-sources/ai/djl/hadoop/hadoop/0.34.0/ai/djl/hadoop/hdfs/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains a built-in implementation of Repository class that can be used for loading models from
* Hadoop HDFS file system.
*/
package ai.djl.hadoop.hdfs;
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine/rust/RsEngine.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.engine.rust;
import ai.djl.Device;
import ai.djl.Model;
import ai.djl.engine.Engine;
import ai.djl.engine.EngineException;
import ai.djl.engine.StandardCapabilities;
import ai.djl.huggingface.tokenizers.jni.LibUtils;
import ai.djl.ndarray.NDManager;
/** The {@code RsEngine} is an implementation of the {@link Engine} rust engine. */
public final class RsEngine extends Engine {
public static final String ENGINE_NAME = "Rust";
static final int RANK = 4;
private RsEngine() {}
@SuppressWarnings("PMD.AvoidRethrowingException")
static Engine newInstance() {
try {
LibUtils.checkStatus();
return new RsEngine();
} catch (EngineException e) {
throw e;
} catch (Throwable t) {
throw new EngineException("Failed to load Rust native library", t);
}
}
/** {@inheritDoc} */
@Override
public Engine getAlternativeEngine() {
return null;
}
/** {@inheritDoc} */
@Override
public String getEngineName() {
return ENGINE_NAME;
}
/** {@inheritDoc} */
@Override
public int getRank() {
return RANK;
}
/** {@inheritDoc} */
@Override
public String getVersion() {
return Engine.getDjlVersion();
}
/** {@inheritDoc} */
@Override
public boolean hasCapability(String capability) {
if (StandardCapabilities.MKL.equals(capability)) {
return true;
} else if (StandardCapabilities.CUDA.equals(capability)) {
return RustLibrary.isCudaAvailable();
}
return false;
}
/** {@inheritDoc} */
@Override
public Model newModel(String name, Device device) {
return new RsModel(name, device);
}
/** {@inheritDoc} */
@Override
public NDManager newBaseManager() {
return RsNDManager.getSystemManager().newSubManager();
}
/** {@inheritDoc} */
@Override
public NDManager newBaseManager(Device device) {
return RsNDManager.getSystemManager().newSubManager(device);
}
/** {@inheritDoc} */
@Override
public String toString() {
StringBuilder sb = new StringBuilder(200);
sb.append(getEngineName()).append(':').append(getVersion()).append(", ");
sb.append(getEngineName())
.append(':')
.append(getVersion())
.append(", capabilities: [\n\t" + StandardCapabilities.MKL);
if (hasCapability(StandardCapabilities.CUDA)) {
sb.append(",\n\t").append(StandardCapabilities.CUDA); // NOPMD
}
sb.append(']');
return sb.toString();
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine/rust/RsEngineProvider.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.engine.rust;
import ai.djl.engine.Engine;
import ai.djl.engine.EngineProvider;
/** {@code RsEngineProvider} is the Rust implementation of {@link EngineProvider}. */
public class RsEngineProvider implements EngineProvider {
/** {@inheritDoc} */
@Override
public String getEngineName() {
return RsEngine.ENGINE_NAME;
}
/** {@inheritDoc} */
@Override
public int getEngineRank() {
return RsEngine.RANK;
}
/** {@inheritDoc} */
@Override
public Engine getEngine() {
return InstanceHolder.INSTANCE;
}
private static class InstanceHolder {
static final Engine INSTANCE = RsEngine.newInstance();
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine/rust/RsModel.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.engine.rust;
import ai.djl.BaseModel;
import ai.djl.Device;
import ai.djl.MalformedModelException;
import ai.djl.Model;
import ai.djl.ndarray.types.DataType;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
/** {@code RsModel} is the Rust implementation of {@link Model}. */
public class RsModel extends BaseModel {
private final AtomicReference<Long> handle;
/**
* Constructs a new Model on a given device.
*
* @param name the model name
* @param device the device the model should be located on
*/
RsModel(String name, Device device) {
super(name);
manager = RsNDManager.getSystemManager().newSubManager(device);
manager.setName("RsModel");
dataType = DataType.FLOAT16;
handle = new AtomicReference<>();
}
/** {@inheritDoc} */
@Override
public void load(Path modelPath, String prefix, Map<String, ?> options)
throws IOException, MalformedModelException {
if (Files.notExists(modelPath)) {
throw new FileNotFoundException(
"Model directory doesn't exist: " + modelPath.toAbsolutePath());
}
setModelDir(modelPath);
if (block == null) {
Device device = manager.getDevice();
handle.set(
RustLibrary.loadModel(
modelDir.toAbsolutePath().toString(),
dataType.ordinal(),
device.getDeviceType(),
device.getDeviceId()));
block = new RsSymbolBlock((RsNDManager) manager, handle.get());
} else {
loadBlock(prefix, options);
}
}
/** {@inheritDoc} */
@Override
public void close() {
Long pointer = handle.getAndSet(null);
if (pointer != null) {
RustLibrary.deleteModel(pointer);
}
super.close();
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine/rust/RsNDArray.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.engine.rust;
import ai.djl.Device;
import ai.djl.engine.EngineException;
import ai.djl.ndarray.BaseNDManager;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.NDScope;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.ndarray.types.SparseFormat;
import ai.djl.util.NativeResource;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.stream.IntStream;
/** {@code RsNDArray} is the Rust implementation of {@link NDArray}. */
@SuppressWarnings("try")
public class RsNDArray extends NativeResource<Long> implements NDArray {
private String name;
private Device device;
private DataType dataType;
private Shape shape;
private RsNDManager manager;
private RsNDArrayEx ndArrayEx;
// keep a reference to direct buffer to avoid GC release the memory
@SuppressWarnings("PMD.UnusedPrivateField")
private ByteBuffer dataRef;
/**
* Constructs a Rust {@code NDArray} from a native handle (internal. Use {@link NDManager}
* instead).
*
* @param manager the manager to attach the new array to
* @param handle the pointer to the native Rust memory
*/
@SuppressWarnings("this-escape")
public RsNDArray(RsNDManager manager, long handle) {
this(manager, handle, null, null);
}
@SuppressWarnings("this-escape")
RsNDArray(RsNDManager manager, long handle, DataType dataType) {
this(manager, handle, dataType, null);
}
/**
* Constructs a Rust {@code NDArray} from a native handle (internal. Use {@link NDManager}
* instead) with the data that is hold on Java side.
*
* @param manager the manager to attach the new array to
* @param handle the pointer to the native Rust memory
* @param dataType the {@link DataType} to be set
* @param data the direct buffer of the data
*/
@SuppressWarnings("this-escape")
public RsNDArray(RsNDManager manager, long handle, DataType dataType, ByteBuffer data) {
super(handle);
this.dataType = dataType;
this.manager = manager;
this.ndArrayEx = new RsNDArrayEx(this);
dataRef = data;
manager.attachInternal(getUid(), this);
NDScope.register(this);
}
/** {@inheritDoc} */
@Override
public RsNDManager getManager() {
return manager;
}
/** {@inheritDoc} */
@Override
public String getName() {
return name;
}
/** {@inheritDoc} */
@Override
public void setName(String name) {
this.name = name;
}
/** {@inheritDoc} */
@Override
public DataType getDataType() {
if (dataType == null) {
int type = RustLibrary.getDataType(getHandle());
dataType = DataType.values()[type];
}
return dataType;
}
/** {@inheritDoc} */
@Override
public Device getDevice() {
if (device == null) {
int[] dev = RustLibrary.getDevice(getHandle());
String deviceType;
switch (dev[0]) {
case 0:
deviceType = Device.Type.CPU;
break;
case 1:
deviceType = Device.Type.GPU;
break;
case 2:
deviceType = "mps";
break;
default:
throw new EngineException("Unknown device type: " + dev[0]);
}
device = Device.of(deviceType, dev[1]);
}
return device;
}
/** {@inheritDoc} */
@Override
public Shape getShape() {
if (shape == null) {
shape = new Shape(RustLibrary.getShape(getHandle()));
}
return shape;
}
/** {@inheritDoc} */
@Override
public SparseFormat getSparseFormat() {
return SparseFormat.DENSE;
}
/** {@inheritDoc} */
@Override
public RsNDArray toDevice(Device device, boolean copy) {
if (device.equals(getDevice()) && !copy) {
return this;
}
String deviceType = device.getDeviceType();
long newHandle = RustLibrary.toDevice(getHandle(), deviceType, device.getDeviceId());
return toArray(newHandle, null, false, true);
}
/** {@inheritDoc} */
@Override
public RsNDArray toType(DataType dataType, boolean copy) {
if (dataType.equals(getDataType()) && !copy) {
return this;
}
if (dataType == DataType.BOOLEAN) {
long newHandle = RustLibrary.toBoolean(getHandle());
return toArray(newHandle, dataType, false, true);
}
if (this.dataType == DataType.INT64
&& dataType == DataType.FLOAT16
&& getDevice().isGpu()) {
// TODO:
throw new UnsupportedOperationException("FP16 to I64 is not supported on GPU.");
}
int dType = manager.toRustDataType(dataType);
long newHandle = RustLibrary.toDataType(getHandle(), dType);
return toArray(newHandle, dataType, false, true);
}
/** {@inheritDoc} */
@Override
public void setRequiresGradient(boolean requiresGrad) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public RsNDArray getGradient() {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public boolean hasGradient() {
return false;
}
/** {@inheritDoc} */
@Override
public NDArray stopGradient() {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public ByteBuffer toByteBuffer(boolean tryDirect) {
byte[] buf = RustLibrary.toByteArray(getHandle());
ByteBuffer bb = ByteBuffer.wrap(buf);
bb.order(ByteOrder.nativeOrder());
return bb;
}
/** {@inheritDoc} */
@Override
public String[] toStringArray(Charset charset) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public void set(Buffer buffer) {
int size = Math.toIntExact(size());
DataType type = getDataType();
BaseNDManager.validateBuffer(buffer, type, size);
// TODO how do we handle the exception happened in the middle
dataRef = null;
if (buffer.isDirect() && buffer instanceof ByteBuffer) {
// If NDArray is on the GPU, it is native code responsibility to control the data life
// cycle
if (!getDevice().isGpu()) {
dataRef = (ByteBuffer) buffer;
}
intern(manager.create(buffer, getShape(), type).toDevice(getDevice(), false));
return;
}
// int8, uint8, boolean use ByteBuffer, so need to explicitly input DataType
ByteBuffer buf = manager.allocateDirect(size * type.getNumOfBytes());
BaseNDManager.copyBuffer(buffer, buf);
// If NDArray is on the GPU, it is native code responsibility to control the data life cycle
if (!getDevice().isGpu()) {
dataRef = buf;
}
intern(manager.create(buf, getShape(), type).toDevice(getDevice(), false));
}
/** {@inheritDoc} */
@Override
public NDArray gather(NDArray index, int axis) {
// try (NDScope ignore = new NDScope()) {
// long indexHandle = manager.from(index).getHandle();
// return toArray(RustLibrary.gather(getHandle(), indexHandle, axis), true);
// }
// TODO:
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray gatherNd(NDArray index) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray take(NDManager manager, NDArray index) {
try (NDScope ignore = new NDScope()) {
long indexHandle = this.manager.from(index).getHandle();
long newHandle = RustLibrary.take(getHandle(), indexHandle);
RsNDArray array = new RsNDArray((RsNDManager) manager, newHandle);
NDScope.unregister(array);
return array;
}
}
/** {@inheritDoc} */
@Override
public NDArray put(NDArray index, NDArray value) {
try (NDScope ignore = new NDScope()) {
long indexHandle = manager.from(index).getHandle();
long valueHandle = manager.from(value).getHandle();
return toArray(RustLibrary.put(getHandle(), indexHandle, valueHandle), true);
}
}
/** {@inheritDoc} */
@Override
public NDArray scatter(NDArray index, NDArray value, int axis) {
// try (NDScope ignore = new NDScope()) {
// long indexHandle = manager.from(index).getHandle();
// long valueHandle = manager.from(value).getHandle();
// return toArray(RustLibrary.scatter(getHandle(), indexHandle, valueHandle,
// axis), true);
// }
// TODO:
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public void attach(NDManager manager) {
detach();
this.manager = (RsNDManager) manager;
manager.attachInternal(getUid(), this);
}
/** {@inheritDoc} */
@Override
public void returnResource(NDManager manager) {
detach();
this.manager = (RsNDManager) manager;
manager.attachUncappedInternal(getUid(), this);
}
/** {@inheritDoc} */
@Override
public void tempAttach(NDManager manager) {
NDManager original = this.manager;
detach();
this.manager = (RsNDManager) manager;
manager.tempAttachInternal(original, getUid(), this);
}
/** {@inheritDoc} */
@Override
public void detach() {
manager.detachInternal(getUid());
manager = RsNDManager.getSystemManager();
}
/** {@inheritDoc} */
@Override
public NDArray duplicate() {
return toArray(RustLibrary.duplicate(getHandle()), dataType, false, true);
}
/** {@inheritDoc} */
@Override
public RsNDArray booleanMask(NDArray index, int axis) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray sequenceMask(NDArray sequenceLength, float value) {
throw new UnsupportedOperationException("Not implemented yet");
}
/** {@inheritDoc} */
@Override
public NDArray sequenceMask(NDArray sequenceLength) {
throw new UnsupportedOperationException("Not implemented yet");
}
/** {@inheritDoc} */
@Override
public boolean contentEquals(Number number) {
return contentEquals(manager.create(number));
}
/** {@inheritDoc} */
@Override
public boolean contentEquals(NDArray other) {
if (other == null || (!shapeEquals(other))) {
return false;
}
if (getDataType() != other.getDataType()) {
return false;
}
return RustLibrary.contentEqual(getHandle(), manager.from(other).getHandle());
}
/** {@inheritDoc} */
@Override
public RsNDArray eq(Number n) {
try (NDArray number = manager.create(n)) {
return eq(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray eq(NDArray other) {
try (NDScope ignore = new NDScope()) {
long newHandle = RustLibrary.eq(getHandle(), manager.from(other).getHandle());
return toArray(newHandle, DataType.BOOLEAN, true, false);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray neq(Number n) {
try (NDArray number = manager.create(n)) {
return neq(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray neq(NDArray other) {
try (NDScope ignore = new NDScope()) {
long newHandle = RustLibrary.neq(getHandle(), manager.from(other).getHandle());
return toArray(newHandle, DataType.BOOLEAN, true, false);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray gt(Number n) {
try (NDArray number = manager.create(n)) {
return gt(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray gt(NDArray other) {
try (NDScope ignore = new NDScope()) {
long newHandle = RustLibrary.gt(getHandle(), manager.from(other).getHandle());
return toArray(newHandle, DataType.BOOLEAN, true, false);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray gte(Number n) {
try (NDArray number = manager.create(n)) {
return gte(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray gte(NDArray other) {
try (NDScope ignore = new NDScope()) {
long newHandle = RustLibrary.gte(getHandle(), manager.from(other).getHandle());
return toArray(newHandle, DataType.BOOLEAN, true, false);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray lt(Number n) {
try (NDArray number = manager.create(n)) {
return lt(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray lt(NDArray other) {
try (NDScope ignore = new NDScope()) {
long newHandle = RustLibrary.lt(getHandle(), manager.from(other).getHandle());
return toArray(newHandle, DataType.BOOLEAN, true, false);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray lte(Number n) {
try (NDArray number = manager.create(n)) {
return lte(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray lte(NDArray other) {
try (NDScope ignore = new NDScope()) {
long newHandle = RustLibrary.lte(getHandle(), manager.from(other).getHandle());
return toArray(newHandle, DataType.BOOLEAN, true, false);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray add(Number n) {
try (NDArray number = manager.create(n)) {
return add(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray add(NDArray other) {
try (NDScope ignore = new NDScope()) {
return toArray(RustLibrary.add(getHandle(), manager.from(other).getHandle()), true);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray sub(Number n) {
try (NDArray number = manager.create(n)) {
return sub(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray sub(NDArray other) {
try (NDScope ignore = new NDScope()) {
return toArray(RustLibrary.sub(getHandle(), manager.from(other).getHandle()), true);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray mul(Number n) {
try (NDArray number = manager.create(n)) {
return mul(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray mul(NDArray other) {
try (NDScope ignore = new NDScope()) {
return toArray(RustLibrary.mul(getHandle(), manager.from(other).getHandle()), true);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray div(Number n) {
try (NDArray number = manager.create(n)) {
return div(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray div(NDArray other) {
try (NDScope ignore = new NDScope()) {
return toArray(RustLibrary.div(getHandle(), manager.from(other).getHandle()), true);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray mod(Number n) {
try (NDArray number = manager.create(n)) {
return mod(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray mod(NDArray other) {
try (NDScope ignore = new NDScope()) {
long otherHandle = manager.from(other).getHandle();
return toArray(RustLibrary.remainder(getHandle(), otherHandle), true);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray pow(Number n) {
try (NDArray number = manager.create(n)) {
return pow(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray pow(NDArray other) {
try (NDScope ignore = new NDScope()) {
return toArray(RustLibrary.pow(getHandle(), manager.from(other).getHandle()), true);
}
}
/** {@inheritDoc} */
@Override
public NDArray xlogy(NDArray other) {
if (isScalar() || other.isScalar()) {
throw new IllegalArgumentException("scalar is not allowed for xlogy()");
}
try (NDScope ignore = new NDScope()) {
return toArray(RustLibrary.xlogy(getHandle(), manager.from(other).getHandle()), true);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray addi(Number n) {
try (NDArray number = manager.create(n)) {
return addi(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray addi(NDArray other) {
intern(add(other));
return this;
}
/** {@inheritDoc} */
@Override
public RsNDArray subi(Number n) {
try (NDArray number = manager.create(n)) {
return subi(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray subi(NDArray other) {
intern(sub(other));
return this;
}
/** {@inheritDoc} */
@Override
public RsNDArray muli(Number n) {
try (NDArray number = manager.create(n)) {
return muli(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray muli(NDArray other) {
intern(mul(other));
return this;
}
/** {@inheritDoc} */
@Override
public RsNDArray divi(Number n) {
try (NDArray number = manager.create(n)) {
return divi(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray divi(NDArray other) {
intern(div(other));
return this;
}
/** {@inheritDoc} */
@Override
public RsNDArray modi(Number n) {
try (NDArray number = manager.create(n)) {
return modi(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray modi(NDArray other) {
intern(mod(other));
return this;
}
/** {@inheritDoc} */
@Override
public RsNDArray powi(Number n) {
try (NDArray number = manager.create(n)) {
return powi(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray powi(NDArray other) {
intern(pow(other));
return this;
}
/** {@inheritDoc} */
@Override
public RsNDArray signi() {
intern(sign());
return this;
}
/** {@inheritDoc} */
@Override
public RsNDArray negi() {
intern(neg());
return this;
}
/** {@inheritDoc} */
@Override
public RsNDArray sign() {
return toArray(RustLibrary.sign(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray maximum(Number n) {
try (NDArray number = manager.create(n)) {
return maximum(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray maximum(NDArray other) {
try (NDScope ignore = new NDScope()) {
return toArray(RustLibrary.maximum(getHandle(), manager.from(other).getHandle()), true);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray minimum(Number n) {
try (NDArray number = manager.create(n)) {
return minimum(number);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray minimum(NDArray other) {
try (NDScope ignore = new NDScope()) {
return toArray(RustLibrary.minimum(getHandle(), manager.from(other).getHandle()), true);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray all() {
NDArray noneZero = countNonzero();
RsNDArray ret = (RsNDArray) manager.create(noneZero.getLong() == size());
noneZero.close();
return ret;
}
/** {@inheritDoc} */
@Override
public RsNDArray any() {
NDArray noneZero = countNonzero();
RsNDArray ret = (RsNDArray) manager.create(noneZero.getLong() > 0);
noneZero.close();
return ret;
}
/** {@inheritDoc} */
@Override
public RsNDArray none() {
NDArray noneZero = countNonzero();
RsNDArray ret = (RsNDArray) manager.create(noneZero.getLong() == 0);
noneZero.close();
return ret;
}
/** {@inheritDoc} */
@Override
public NDArray countNonzero() {
try (NDScope ignore = new NDScope()) {
return toArray(RustLibrary.countNonzero(getHandle()), true);
}
}
/** {@inheritDoc} */
@Override
public NDArray countNonzero(int axis) {
try (NDScope ignore = new NDScope()) {
return toArray(RustLibrary.countNonzeroWithAxis(getHandle(), axis), true);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray neg() {
return toArray(RustLibrary.neg(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray abs() {
return toArray(RustLibrary.abs(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray square() {
return toArray(RustLibrary.square(getHandle()));
}
/** {@inheritDoc} */
@Override
public NDArray sqrt() {
return toArray(RustLibrary.sqrt(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray cbrt() {
try (RsNDArray array = (RsNDArray) manager.create(1.0 / 3)) {
return toArray(RustLibrary.pow(getHandle(), array.getHandle()), true);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray floor() {
return toArray(RustLibrary.floor(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray ceil() {
return toArray(RustLibrary.ceil(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray round() {
return toArray(RustLibrary.round(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray trunc() {
return toArray(RustLibrary.trunc(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray exp() {
return toArray(RustLibrary.exp(getHandle()));
}
/** {@inheritDoc} */
@Override
public NDArray gammaln() {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public RsNDArray log() {
return toArray(RustLibrary.log(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray log10() {
return toArray(RustLibrary.log10(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray log2() {
return toArray(RustLibrary.log2(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray sin() {
return toArray(RustLibrary.sin(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray cos() {
return toArray(RustLibrary.cos(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray tan() {
return toArray(RustLibrary.tan(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray asin() {
return toArray(RustLibrary.asin(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray acos() {
return toArray(RustLibrary.acos(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray atan() {
return toArray(RustLibrary.atan(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray atan2(NDArray other) {
try (NDScope ignore = new NDScope()) {
long otherHandle = manager.from(other).getHandle();
return toArray(RustLibrary.atan2(getHandle(), otherHandle), true);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray sinh() {
return toArray(RustLibrary.sinh(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray cosh() {
return toArray(RustLibrary.cosh(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray tanh() {
return toArray(RustLibrary.tanh(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray asinh() {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public RsNDArray acosh() {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public RsNDArray atanh() {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public RsNDArray toDegrees() {
return mul(180.0).div(Math.PI);
}
/** {@inheritDoc} */
@Override
public RsNDArray toRadians() {
return mul(Math.PI).div(180.0);
}
/** {@inheritDoc} */
@Override
public RsNDArray max() {
if (isScalar()) {
return this;
}
return toArray(RustLibrary.max(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray max(int[] axes, boolean keepDims) {
if (axes.length > 1) {
// TODO fix this
throw new UnsupportedOperationException("Only 1 axis is support!");
}
return toArray(RustLibrary.maxWithAxis(getHandle(), axes[0], keepDims));
}
/** {@inheritDoc} */
@Override
public RsNDArray min() {
if (isScalar()) {
return this;
}
return toArray(RustLibrary.min(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray min(int[] axes, boolean keepDims) {
if (axes.length > 1) {
// TODO fix this
throw new UnsupportedOperationException("Only 1 axis is support!");
}
return toArray(RustLibrary.minWithAxis(getHandle(), axes[0], keepDims));
}
/** {@inheritDoc} */
@Override
public RsNDArray sum() {
if (isScalar()) {
return this;
}
return toArray(RustLibrary.sum(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray sum(int[] axes, boolean keepDims) {
return toArray(RustLibrary.sumWithAxis(getHandle(), axes, keepDims));
}
/** {@inheritDoc} */
@Override
public NDArray cumProd(int axis) {
return toArray(RustLibrary.cumProd(getHandle(), axis));
}
/** {@inheritDoc} */
@Override
public NDArray cumProd(int axis, DataType dataType) {
return toArray(RustLibrary.cumProdWithType(getHandle(), axis, dataType.ordinal()));
}
/** {@inheritDoc} */
@Override
public RsNDArray prod() {
return toArray(RustLibrary.prod(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray prod(int[] axes, boolean keepDims) {
if (axes.length > 1) {
throw new UnsupportedOperationException("Only 1 axis is support!");
}
return toArray(RustLibrary.cumProdWithAxis(getHandle(), axes[0], keepDims));
}
/** {@inheritDoc} */
@Override
public RsNDArray mean() {
return toArray(RustLibrary.mean(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray mean(int[] axes, boolean keepDims) {
return toArray(RustLibrary.meanWithAxis(getHandle(), axes, keepDims));
}
/** {@inheritDoc} */
@Override
public RsNDArray normalize(double p, long dim, double eps) {
return toArray(RustLibrary.normalize(getHandle(), p, dim, eps));
}
/** {@inheritDoc} */
@Override
public RsNDArray rotate90(int times, int[] axes) {
if (axes.length != 2) {
throw new IllegalArgumentException("Axes must be 2");
}
return toArray(RustLibrary.rot90(getHandle(), times, axes));
}
/** {@inheritDoc} */
@Override
public RsNDArray trace(int offset, int axis1, int axis2) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList split(long[] indices, int axis) {
if (indices.length == 0) {
return new NDList(this);
}
long lastIndex = getShape().get(axis);
if (indices[indices.length - 1] != lastIndex) {
long[] tmp = new long[indices.length + 1];
System.arraycopy(indices, 0, tmp, 0, indices.length);
tmp[indices.length] = lastIndex;
indices = tmp;
}
return toList(RustLibrary.split(getHandle(), indices, axis));
}
/** {@inheritDoc} */
@Override
public RsNDArray flatten() {
return toArray(RustLibrary.flatten(getHandle()));
}
/** {@inheritDoc} */
@Override
public NDArray flatten(int startDim, int endDim) {
return toArray(RustLibrary.flattenWithDims(getHandle(), startDim, endDim));
}
/** {@inheritDoc} */
@Override
public NDArray fft(long length, long axis) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray rfft(long length, long axis) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray ifft(long length, long axis) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray irfft(long length, long axis) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray stft(
long nFft,
long hopLength,
boolean center,
NDArray window,
boolean normalize,
boolean returnComplex) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray fft2(long[] sizes, long[] axes) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray pad(Shape padding, double value) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray ifft2(long[] sizes, long[] axes) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public RsNDArray reshape(Shape shape) {
long prod = 1;
int neg = -1;
long[] dims = shape.getShape();
for (int i = 0; i < dims.length; ++i) {
if (dims[i] < 0) {
if (neg != -1) {
throw new IllegalArgumentException("only 1 negative axis is allowed");
}
neg = i;
} else {
prod *= dims[i];
}
}
if (neg != -1) {
long total = getShape().size();
if (total % prod != 0) {
throw new IllegalArgumentException("unsupported dimensions");
}
dims[neg] = total / prod;
}
return toArray(RustLibrary.reshape(getHandle(), shape.getShape()));
}
/** {@inheritDoc} */
@Override
public RsNDArray expandDims(int axis) {
return toArray(RustLibrary.expandDims(getHandle(), axis));
}
/** {@inheritDoc} */
@Override
public RsNDArray squeeze(int[] axes) {
return toArray(RustLibrary.squeeze(getHandle(), axes));
}
/** {@inheritDoc} */
@Override
public NDList unique(Integer dim, boolean sorted, boolean returnInverse, boolean returnCounts) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public RsNDArray logicalAnd(NDArray other) {
try (NDScope ignore = new NDScope()) {
long otherHandle = manager.from(other).getHandle();
return toArray(RustLibrary.logicalAnd(getHandle(), otherHandle), true);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray logicalOr(NDArray other) {
try (NDScope ignore = new NDScope()) {
long otherHandle = manager.from(other).getHandle();
return toArray(RustLibrary.logicalOr(getHandle(), otherHandle), true);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray logicalXor(NDArray other) {
try (NDScope ignore = new NDScope()) {
long otherHandle = manager.from(other).getHandle();
return toArray(RustLibrary.logicalXor(getHandle(), otherHandle), true);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray logicalNot() {
return toArray(RustLibrary.logicalNot(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray argSort(int axis, boolean ascending) {
return toArray(RustLibrary.argSort(getHandle(), axis, ascending));
}
/** {@inheritDoc} */
@Override
public RsNDArray sort() {
return sort(-1);
}
/** {@inheritDoc} */
@Override
public RsNDArray sort(int axis) {
return toArray(RustLibrary.sort(getHandle(), axis, false));
}
/** {@inheritDoc} */
@Override
public RsNDArray softmax(int axis) {
if (getShape().isScalar() || shape.size() == 0) {
return (RsNDArray) duplicate();
}
return toArray(RustLibrary.softmax(getHandle(), axis));
}
/** {@inheritDoc} */
@Override
public RsNDArray logSoftmax(int axis) {
return toArray(RustLibrary.logSoftmax(getHandle(), axis));
}
/** {@inheritDoc} */
@Override
public RsNDArray cumSum() {
// TODO: change default behavior on cumSum
if (isScalar()) {
return (RsNDArray) reshape(1);
}
if (isEmpty()) {
return (RsNDArray) reshape(0);
}
return cumSum(0);
}
/** {@inheritDoc} */
@Override
public RsNDArray cumSum(int axis) {
if (getShape().dimension() > 3) {
throw new UnsupportedOperationException("Only 3 dimensions or less is supported");
}
return toArray(RustLibrary.cumSum(getHandle(), axis));
}
/** {@inheritDoc} */
@Override
public NDArray diagonal() {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override
public NDArray diagonal(int offset) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override
public NDArray diagonal(int offset, int axis1, int axis2) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override
public void intern(NDArray replaced) {
RsNDArray arr = (RsNDArray) replaced;
Long oldHandle = handle.getAndSet(arr.handle.getAndSet(null));
RustLibrary.deleteTensor(oldHandle);
// dereference old ndarray
arr.close();
}
/** {@inheritDoc} */
@Override
public RsNDArray isInfinite() {
return toArray(RustLibrary.isInf(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray isNaN() {
return toArray(RustLibrary.isNaN(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray tile(long repeats) {
// zero-dim
if (isEmpty()) {
return (RsNDArray) duplicate();
}
// scalar
int dim = (isScalar()) ? 1 : getShape().dimension();
long[] repeatsArray = new long[dim];
Arrays.fill(repeatsArray, repeats);
return tile(repeatsArray);
}
/** {@inheritDoc} */
@Override
public RsNDArray tile(int axis, long repeat) {
return toArray(RustLibrary.tileWithAxis(getHandle(), axis, repeat));
}
/** {@inheritDoc} */
@Override
public RsNDArray tile(long[] repeats) {
return toArray(RustLibrary.tile(getHandle(), repeats));
}
/** {@inheritDoc} */
@Override
public RsNDArray tile(Shape desiredShape) {
return toArray(RustLibrary.tileWithShape(getHandle(), desiredShape.getShape()));
}
/** {@inheritDoc} */
@Override
public RsNDArray repeat(long repeats) {
// zero-dim
if (isEmpty()) {
return (RsNDArray) duplicate();
}
// scalar
int dim = (isScalar()) ? 1 : getShape().dimension();
long[] repeatsArray = new long[dim];
Arrays.fill(repeatsArray, repeats);
return repeat(repeatsArray);
}
/** {@inheritDoc} */
@Override
public RsNDArray repeat(int axis, long repeat) {
return toArray(RustLibrary.repeat(getHandle(), repeat, axis));
}
/** {@inheritDoc} */
@Override
public RsNDArray repeat(long[] repeats) {
RsNDArray result = this;
for (int dim = 0; dim < repeats.length; dim++) {
RsNDArray temp = result;
result = result.repeat(dim, repeats[dim]);
if (temp != this) {
temp.close();
}
}
return result;
}
/** {@inheritDoc} */
@Override
public RsNDArray repeat(Shape desiredShape) {
return repeat(repeatsToMatchShape(desiredShape));
}
private long[] repeatsToMatchShape(Shape desiredShape) {
Shape curShape = getShape();
int dimension = curShape.dimension();
if (desiredShape.dimension() > dimension) {
throw new IllegalArgumentException("The desired shape has too many dimensions");
}
if (desiredShape.dimension() < dimension) {
int additionalDimensions = dimension - desiredShape.dimension();
desiredShape = curShape.slice(0, additionalDimensions).addAll(desiredShape);
}
long[] repeats = new long[dimension];
for (int i = 0; i < dimension; i++) {
if (curShape.get(i) == 0 || desiredShape.get(i) % curShape.get(i) != 0) {
throw new IllegalArgumentException(
"The desired shape is not a multiple of the original shape");
}
repeats[i] = Math.round(Math.ceil((double) desiredShape.get(i) / curShape.get(i)));
}
return repeats;
}
/** {@inheritDoc} */
@Override
public RsNDArray dot(NDArray other) {
int selfDim = this.getShape().dimension();
int otherDim = other.getShape().dimension();
if (selfDim != otherDim || selfDim > 2) {
throw new UnsupportedOperationException(
"Dimension mismatch or dimension is greater than 2. Dot product is only"
+ " applied on two 1D vectors. For high dimensions, please use .matMul"
+ " instead.");
}
try (NDScope ignore = new NDScope()) {
return toArray(RustLibrary.dot(getHandle(), manager.from(other).getHandle()), true);
}
}
/** {@inheritDoc} */
@Override
public NDArray matMul(NDArray other) {
if (getShape().dimension() < 2 || getShape().dimension() < 2) {
throw new IllegalArgumentException("only 2d tensors are supported for matMul()");
}
try (NDScope ignore = new NDScope()) {
long otherHandle = manager.from(other).getHandle();
return toArray(RustLibrary.matmul(getHandle(), otherHandle), true);
}
}
/** {@inheritDoc} */
@Override
public NDArray batchMatMul(NDArray other) {
if (getShape().dimension() != 3 || getShape().dimension() != 3) {
throw new IllegalArgumentException("only 3d tensors are allowed for batchMatMul()");
}
try (NDScope ignore = new NDScope()) {
long otherHandle = manager.from(other).getHandle();
return toArray(RustLibrary.batchMatMul(getHandle(), otherHandle), true);
}
}
/** {@inheritDoc} */
@Override
public RsNDArray clip(Number min, Number max) {
return toArray(RustLibrary.clip(getHandle(), min.doubleValue(), max.doubleValue()));
}
/** {@inheritDoc} */
@Override
public RsNDArray swapAxes(int axis1, int axis2) {
return toArray(RustLibrary.transpose(getHandle(), axis1, axis2));
}
/** {@inheritDoc} */
@Override
public NDArray flip(int... axes) {
return toArray(RustLibrary.flip(getHandle(), axes));
}
/** {@inheritDoc} */
@Override
public RsNDArray transpose() {
int dim = getShape().dimension();
int[] reversedShape = IntStream.range(0, dim).map(i -> dim - i - 1).toArray();
return transpose(reversedShape);
}
/** {@inheritDoc} */
@Override
public RsNDArray transpose(int... axes) {
if (isScalar() && axes.length > 0) {
throw new IllegalArgumentException("axes don't match NDArray");
}
return toArray(RustLibrary.permute(getHandle(), axes));
}
/** {@inheritDoc} */
@Override
public RsNDArray broadcast(Shape shape) {
return toArray(RustLibrary.broadcast(getHandle(), shape.getShape()));
}
/** {@inheritDoc} */
@Override
public RsNDArray argMax() {
if (isEmpty()) {
throw new IllegalArgumentException("attempt to get argMax of an empty NDArray");
}
if (isScalar()) {
return (RsNDArray) manager.create(0L);
}
return toArray(RustLibrary.argMax(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray argMax(int axis) {
if (isScalar()) {
return (RsNDArray) manager.create(0L);
}
return toArray(RustLibrary.argMaxWithAxis(getHandle(), axis, false));
}
/** {@inheritDoc} */
@Override
public NDList topK(int k, int axis, boolean largest, boolean sorted) {
return toList(RustLibrary.topK(getHandle(), k, axis, largest, sorted));
}
/** {@inheritDoc} */
@Override
public RsNDArray argMin() {
if (isEmpty()) {
throw new IllegalArgumentException("attempt to get argMin of an empty NDArray");
}
if (isScalar()) {
return (RsNDArray) manager.create(0L);
}
return toArray(RustLibrary.argMin(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray argMin(int axis) {
if (isScalar()) {
return (RsNDArray) manager.create(0L);
}
return toArray(RustLibrary.argMinWithAxis(getHandle(), axis, false));
}
/** {@inheritDoc} */
@Override
public RsNDArray percentile(Number percentile) {
return toArray(RustLibrary.percentile(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray percentile(Number percentile, int[] axes) {
return toArray(RustLibrary.percentileWithAxes(getHandle(), percentile.doubleValue(), axes));
}
/** {@inheritDoc} */
@Override
public RsNDArray median() {
return median(new int[] {-1});
}
/** {@inheritDoc} */
@Override
public RsNDArray median(int[] axes) {
if (axes.length != 1) {
throw new UnsupportedOperationException(
"Not supporting zero or multi-dimension median");
}
NDList result = toList(RustLibrary.median(getHandle(), axes[0], false));
result.get(1).close();
return (RsNDArray) result.get(0);
}
/** {@inheritDoc} */
@Override
public RsNDArray toDense() {
return (RsNDArray) duplicate();
}
/** {@inheritDoc} */
@Override
public RsNDArray toSparse(SparseFormat fmt) {
throw new UnsupportedOperationException("Not supported");
}
/** {@inheritDoc} */
@Override
public RsNDArray nonzero() {
return toArray(RustLibrary.nonZero(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray erfinv() {
return toArray(RustLibrary.erfinv(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray erf() {
return toArray(RustLibrary.erf(getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray inverse() {
return toArray(RustLibrary.inverse(getHandle()));
}
/** {@inheritDoc} */
@Override
public NDArray norm(boolean keepDims) {
return toArray(RustLibrary.norm(getHandle(), 2, new int[] {}, keepDims));
}
/** {@inheritDoc} */
@Override
public NDArray norm(int order, int[] axes, boolean keepDims) {
return toArray(RustLibrary.norm(getHandle(), order, axes, keepDims));
}
/** {@inheritDoc} */
@Override
public NDArray oneHot(int depth, float onValue, float offValue, DataType dataType) {
return toArray(
RustLibrary.oneHot(getHandle(), depth, onValue, offValue, dataType.ordinal()));
}
/** {@inheritDoc} */
@Override
public NDArray batchDot(NDArray other) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray complex() {
return toArray(RustLibrary.complex(getHandle()));
}
/** {@inheritDoc} */
@Override
public NDArray real() {
return toArray(RustLibrary.real(getHandle()));
}
/** {@inheritDoc} */
@Override
public NDArray conj() {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public RsNDArrayEx getNDArrayInternal() {
if (ndArrayEx == null) {
throw new UnsupportedOperationException(
"NDArray operation is not supported for String tensor");
}
return ndArrayEx;
}
/** {@inheritDoc} */
@Override
public NDArray diff(int n, int dim) {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public String toString() {
if (isReleased()) {
return "This array is already closed";
}
return toDebugString();
}
/** {@inheritDoc} */
@Override
public boolean equals(Object obj) {
if (obj instanceof NDArray) {
return contentEquals((NDArray) obj);
}
return false;
}
/** {@inheritDoc} */
@Override
public int hashCode() {
return 0;
}
/** {@inheritDoc} */
@Override
public void close() {
onClose();
Long pointer = handle.getAndSet(null);
if (pointer != null && pointer != -1) {
RustLibrary.deleteTensor(pointer);
}
manager.detachInternal(getUid());
dataRef = null;
}
private RsNDArray toArray(long newHandle) {
return toArray(newHandle, false);
}
private RsNDArray toArray(long newHandle, boolean unregister) {
return toArray(newHandle, null, unregister, false);
}
private RsNDArray toArray(
long newHandle, DataType dataType, boolean unregister, boolean withName) {
RsNDArray array = new RsNDArray(manager, newHandle, dataType);
if (withName) {
array.setName(getName());
}
if (unregister) {
NDScope.unregister(array);
}
return array;
}
private NDList toList(long[] handles) {
NDList list = new NDList(handles.length);
for (long h : handles) {
list.add(new RsNDArray(manager, h));
}
return list;
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine/rust/RsNDArrayEx.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.engine.rust;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.NDScope;
import ai.djl.ndarray.NDUtils;
import ai.djl.ndarray.index.NDArrayIndexer;
import ai.djl.ndarray.internal.NDArrayEx;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.ndarray.types.SparseFormat;
import ai.djl.nn.recurrent.RNN;
import java.util.List;
/** {@code PtNDArrayEx} is the Rust implementation of the {@link NDArrayEx}. */
@SuppressWarnings("try")
public class RsNDArrayEx implements NDArrayEx {
private RsNDArray array;
/**
* Constructs an {@code PtNDArrayEx} given a {@link NDArray}.
*
* @param parent the {@link NDArray} to extend
*/
RsNDArrayEx(RsNDArray parent) {
this.array = parent;
}
/** {@inheritDoc} */
@Override
public RsNDArray rdivi(NDArray b) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public RsNDArray rmodi(NDArray b) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public RsNDArray rpowi(Number n) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public RsNDArray relu() {
return new RsNDArray(array.getManager(), RustLibrary.relu(array.getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray sigmoid() {
return new RsNDArray(array.getManager(), RustLibrary.sigmoid(array.getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray tanh() {
return array.tanh();
}
/** {@inheritDoc} */
@Override
public RsNDArray softPlus() {
return new RsNDArray(array.getManager(), RustLibrary.softPlus(array.getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray softSign() {
return new RsNDArray(array.getManager(), RustLibrary.softSign(array.getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray leakyRelu(float alpha) {
return new RsNDArray(array.getManager(), RustLibrary.leakyRelu(array.getHandle(), alpha));
}
/** {@inheritDoc} */
@Override
public RsNDArray elu(float alpha) {
return new RsNDArray(array.getManager(), RustLibrary.elu(array.getHandle(), alpha));
}
/** {@inheritDoc} */
@Override
public RsNDArray selu() {
return new RsNDArray(array.getManager(), RustLibrary.selu(array.getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray gelu() {
return new RsNDArray(array.getManager(), RustLibrary.gelu(array.getHandle()));
}
/** {@inheritDoc} */
@Override
public RsNDArray maxPool(Shape kernelShape, Shape stride, Shape padding, boolean ceilMode) {
return new RsNDArray(
array.getManager(),
RustLibrary.maxPool(
array.getHandle(),
kernelShape.getShape(),
stride.getShape(),
padding.getShape(),
ceilMode));
}
/** {@inheritDoc} */
@Override
public RsNDArray globalMaxPool() {
Shape shape = getPoolShape(array);
long newHandle = RustLibrary.adaptiveMaxPool(array.getHandle(), shape.getShape());
try (NDArray temp = new RsNDArray(array.getManager(), newHandle)) {
return (RsNDArray) temp.reshape(array.getShape().slice(0, 2));
}
}
/** {@inheritDoc} */
@Override
public RsNDArray avgPool(
Shape kernelShape,
Shape stride,
Shape padding,
boolean ceilMode,
boolean countIncludePad) {
if (kernelShape.size() != 2) {
throw new UnsupportedOperationException("Only avgPool2d is supported");
}
return new RsNDArray(
array.getManager(),
RustLibrary.avgPool2d(
array.getHandle(), kernelShape.getShape(), stride.getShape()));
}
/** {@inheritDoc} */
@Override
public RsNDArray globalAvgPool() {
Shape shape = getPoolShape(array);
long newHandle = RustLibrary.adaptiveAvgPool(array.getHandle(), shape.getShape());
try (NDArray temp = new RsNDArray(array.getManager(), newHandle)) {
return (RsNDArray) temp.reshape(array.getShape().slice(0, 2));
}
}
/** {@inheritDoc} */
@Override
public RsNDArray lpPool(
float normType, Shape kernelShape, Shape stride, Shape padding, boolean ceilMode) {
if (padding.size() != 0) {
throw new IllegalArgumentException("padding is not supported for Rust engine");
}
return new RsNDArray(
array.getManager(),
RustLibrary.lpPool(
array.getHandle(),
normType,
kernelShape.getShape(),
stride.getShape(),
ceilMode));
}
/** {@inheritDoc} */
@Override
public RsNDArray globalLpPool(float normType) {
long[] kernelShape = array.getShape().slice(2).getShape();
long[] stride = getPoolShape(array).getShape();
long newHandle =
RustLibrary.lpPool(array.getHandle(), normType, kernelShape, stride, false);
try (NDArray temp = new RsNDArray(array.getManager(), newHandle)) {
return (RsNDArray) temp.reshape(array.getShape().slice(0, 2));
}
}
/** {@inheritDoc} */
@Override
public void adadeltaUpdate(
NDList inputs,
NDList weights,
float weightDecay,
float rescaleGrad,
float clipGrad,
float rho,
float epsilon) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public void adagradUpdate(
NDList inputs,
NDList weights,
float learningRate,
float weightDecay,
float rescaleGrad,
float clipGrad,
float epsilon) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public void adamUpdate(
NDList inputs,
NDList weights,
float learningRate,
float learningRateBiasCorrection,
float weightDecay,
float rescaleGrad,
float clipGrad,
float beta1,
float beta2,
float epsilon,
boolean lazyUpdate,
boolean adamw) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public void nagUpdate(
NDList inputs,
NDList weights,
float learningRate,
float weightDecay,
float rescaleGrad,
float clipGrad,
float momentum) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public void rmspropUpdate(
NDList inputs,
NDList weights,
float learningRate,
float weightDecay,
float rescaleGrad,
float clipGrad,
float rho,
float momentum,
float epsilon,
boolean centered) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public void sgdUpdate(
NDList inputs,
NDList weights,
float learningRate,
float weightDecay,
float rescaleGrad,
float clipGrad,
float momentum,
boolean lazyUpdate) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList convolution(
NDArray input,
NDArray weight,
NDArray bias,
Shape stride,
Shape padding,
Shape dilation,
int groups) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList deconvolution(
NDArray input,
NDArray weight,
NDArray bias,
Shape stride,
Shape padding,
Shape outPadding,
Shape dilation,
int groups) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList linear(NDArray input, NDArray weight, NDArray bias) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList embedding(NDArray input, NDArray weight, SparseFormat sparseFormat) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList prelu(NDArray input, NDArray alpha) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList dropout(NDArray input, float rate, boolean training) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList layerNorm(
NDArray input, Shape normalizedShape, NDArray gamma, NDArray beta, float eps) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList batchNorm(
NDArray input,
NDArray runningMean,
NDArray runningVar,
NDArray gamma,
NDArray beta,
int axis,
float momentum,
float eps,
boolean training) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList rnn(
NDArray input,
NDArray state,
NDList params,
boolean hasBiases,
int numLayers,
RNN.Activation activation,
double dropRate,
boolean training,
boolean bidirectional,
boolean batchFirst) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList gru(
NDArray input,
NDArray state,
NDList params,
boolean hasBiases,
int numLayers,
double dropRate,
boolean training,
boolean bidirectional,
boolean batchFirst) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList lstm(
NDArray input,
NDList states,
NDList params,
boolean hasBiases,
int numLayers,
double dropRate,
boolean training,
boolean bidirectional,
boolean batchFirst) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray interpolation(long[] size, int mode, boolean alignCorners) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public RsNDArray resize(int width, int height, int interpolation) {
long[] shape = array.getShape().getShape();
if (shape[0] == height && shape[1] == width) {
return array.toType(DataType.FLOAT32, false);
}
// TODO:
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray randomFlipLeftRight() {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray randomFlipTopBottom() {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray randomBrightness(float brightness) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray randomHue(float hue) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray randomColorJitter(
float brightness, float contrast, float saturation, float hue) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArrayIndexer getIndexer(NDManager manager) {
return new RsNDArrayIndexer((RsNDManager) manager);
}
/** {@inheritDoc} */
@Override
public RsNDArray where(NDArray condition, NDArray other) {
// Try to broadcast if shape mismatch
if (!condition.getShape().equals(array.getShape())) {
throw new UnsupportedOperationException(
"condition and self shape mismatch, broadcast is not supported");
}
RsNDManager manager = array.getManager();
try (NDScope ignore = new NDScope()) {
long conditionHandle = manager.from(condition).getHandle();
long otherHandle = manager.from(other).getHandle();
RsNDArray ret =
new RsNDArray(
manager,
RustLibrary.where(conditionHandle, array.getHandle(), otherHandle));
NDScope.unregister(ret);
return ret;
}
}
/** {@inheritDoc} */
@Override
public RsNDArray stack(NDList arrays, int axis) {
long[] srcArray = new long[arrays.size() + 1];
srcArray[0] = array.getHandle();
RsNDManager manager = array.getManager();
try (NDScope ignore = new NDScope()) {
int i = 1;
for (NDArray arr : arrays) {
srcArray[i++] = manager.from(arr).getHandle();
}
RsNDArray ret = new RsNDArray(manager, RustLibrary.stack(srcArray, axis));
NDScope.unregister(ret);
return ret;
}
}
/** {@inheritDoc} */
@Override
public RsNDArray concat(NDList list, int axis) {
NDUtils.checkConcatInput(list);
long[] srcArray = new long[list.size() + 1];
srcArray[0] = array.getHandle();
RsNDManager manager = array.getManager();
try (NDScope ignore = new NDScope()) {
int i = 1;
for (NDArray arr : list) {
srcArray[i++] = manager.from(arr).getHandle();
}
RsNDArray ret = new RsNDArray(manager, RustLibrary.concat(srcArray, axis));
NDScope.unregister(ret);
return ret;
}
}
/** {@inheritDoc} */
@Override
public NDList multiBoxTarget(
NDList inputs,
float iouThreshold,
float ignoreLabel,
float negativeMiningRatio,
float negativeMiningThreshold,
int minNegativeSamples) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList multiBoxPrior(
List<Float> sizes,
List<Float> ratios,
List<Float> steps,
List<Float> offsets,
boolean clip) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList multiBoxDetection(
NDList inputs,
boolean clip,
float threshold,
int backgroundId,
float nmsThreshold,
boolean forceSuppress,
int nmsTopK) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public RsNDArray getArray() {
return array;
}
private Shape getPoolShape(NDArray array) {
switch (array.getShape().dimension() - 2) {
case 1:
return new Shape(1);
case 2:
return new Shape(1, 1);
case 3:
return new Shape(1, 1, 1);
default:
throw new IllegalArgumentException("the input dimension should be in [3, 5]");
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine/rust/RsNDArrayIndexer.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.engine.rust;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDScope;
import ai.djl.ndarray.index.NDArrayIndexer;
import ai.djl.ndarray.index.full.NDIndexFullPick;
import ai.djl.ndarray.index.full.NDIndexFullSlice;
import ai.djl.ndarray.index.full.NDIndexFullTake;
import ai.djl.ndarray.types.Shape;
import java.util.Arrays;
/** The {@link NDArrayIndexer} used by the {@link RsNDArray}. */
@SuppressWarnings("try")
public class RsNDArrayIndexer extends NDArrayIndexer {
private RsNDManager manager;
RsNDArrayIndexer(RsNDManager manager) {
this.manager = manager;
}
/** {@inheritDoc} */
@Override
public NDArray get(NDArray array, NDIndexFullPick fullPick) {
try (NDScope ignore = new NDScope()) {
long handle = manager.from(array).getHandle();
long pickHandle = manager.from(fullPick.getIndices()).getHandle();
long newHandle = RustLibrary.pick(handle, pickHandle, fullPick.getAxis());
RsNDArray ret = new RsNDArray(manager, newHandle);
NDScope.unregister(ret);
return ret;
}
}
/** {@inheritDoc} */
@Override
public NDArray get(NDArray array, NDIndexFullTake fullTake) {
try (NDScope ignore = new NDScope()) {
long handle = manager.from(array).getHandle();
long takeHandle = manager.from(fullTake.getIndices()).getHandle();
RsNDArray ret = new RsNDArray(manager, RustLibrary.take(handle, takeHandle));
NDScope.unregister(ret);
return ret;
}
}
/** {@inheritDoc} */
@Override
public NDArray get(NDArray array, NDIndexFullSlice fullSlice) {
long[] min = fullSlice.getMin();
long[] max = fullSlice.getMax();
long[] step = fullSlice.getStep();
long[] s = array.getShape().getShape().clone();
if (Arrays.stream(step).anyMatch(i -> i != 1)) {
throw new UnsupportedOperationException("only step 1 is supported");
}
for (int i = 0; i < min.length; i++) {
if (min[i] >= max[i] || min[i] >= s[i]) {
Shape shape = fullSlice.getSqueezedShape();
return manager.create(shape, array.getDataType(), array.getDevice());
}
}
try (NDScope ignore = new NDScope()) {
long handle = manager.from(array).getHandle();
long tmp = RustLibrary.fullSlice(handle, min, max, step);
long newHandle = RustLibrary.reshape(tmp, fullSlice.getSqueezedShape().getShape());
RustLibrary.deleteTensor(tmp);
RsNDArray ret = new RsNDArray(manager, newHandle, array.getDataType());
NDScope.unregister(ret);
return ret;
}
}
/** {@inheritDoc} */
@Override
public void set(NDArray array, NDIndexFullSlice fullSlice, NDArray value) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public void set(NDArray array, NDIndexFullSlice fullSlice, Number value) {
set(array, fullSlice, array.getManager().create(value));
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine/rust/RsNDManager.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.engine.rust;
import ai.djl.Device;
import ai.djl.engine.Engine;
import ai.djl.ndarray.BaseNDManager;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.charset.Charset;
/** {@code PtNDManager} is the Rust implementation of {@link NDManager}. */
public class RsNDManager extends BaseNDManager {
private static final RsNDManager SYSTEM_MANAGER = new SystemManager();
private RsNDManager(NDManager parent, Device device) {
super(parent, device);
}
static RsNDManager getSystemManager() {
return SYSTEM_MANAGER;
}
/** {@inheritDoc} */
@Override
public ByteBuffer allocateDirect(int capacity) {
return ByteBuffer.allocateDirect(capacity).order(ByteOrder.nativeOrder());
}
/** {@inheritDoc} */
@Override
public RsNDArray from(NDArray array) {
if (array == null || array instanceof RsNDArray) {
return (RsNDArray) array;
}
RsNDArray result = create(array.toByteBuffer(), array.getShape(), array.getDataType());
result.setName(array.getName());
return result;
}
/** {@inheritDoc} */
@Override
public RsNDArray create(Shape shape, DataType dataType) {
String deviceType = device.getDeviceType();
int deviceId = device.getDeviceId();
int dType = toRustDataType(dataType);
long handle = RustLibrary.zeros(shape.getShape(), dType, deviceType, deviceId);
return new RsNDArray(this, handle, dataType);
}
/** {@inheritDoc} */
@Override
public RsNDArray create(Buffer data, Shape shape, DataType dataType) {
int size = Math.toIntExact(shape.size());
BaseNDManager.validateBuffer(data, dataType, size);
ByteBuffer buf;
if (data.isDirect() && data instanceof ByteBuffer) {
buf = (ByteBuffer) data;
} else {
buf = allocateDirect(size * dataType.getNumOfBytes());
copyBuffer(data, buf);
}
String deviceType = device.getDeviceType();
int deviceId = device.getDeviceId();
int dType = toRustDataType(dataType);
long handle = RustLibrary.tensorOf(buf, shape.getShape(), dType, deviceType, deviceId);
return new RsNDArray(this, handle, dataType, buf);
}
/** {@inheritDoc} */
@Override
public NDArray create(String[] data, Charset charset, Shape shape) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray createCoo(Buffer data, long[][] indices, Shape shape) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray zeros(Shape shape, DataType dataType) {
return create(shape, dataType);
}
/** {@inheritDoc} */
@Override
public NDArray ones(Shape shape, DataType dataType) {
String deviceType = device.getDeviceType();
int deviceId = device.getDeviceId();
int dType = toRustDataType(dataType);
long handle = RustLibrary.ones(shape.getShape(), dType, deviceType, deviceId);
return new RsNDArray(this, handle, dataType);
}
/** {@inheritDoc} */
@Override
public NDArray full(Shape shape, float value, DataType dataType) {
String deviceType = device.getDeviceType();
int deviceId = device.getDeviceId();
int dType = toRustDataType(dataType);
long handle = RustLibrary.full(value, shape.getShape(), dType, deviceType, deviceId);
return new RsNDArray(this, handle, dataType);
}
/** {@inheritDoc} */
@Override
public NDArray arange(int start, int stop, int step, DataType dataType) {
return arange((float) start, (float) stop, (float) step, dataType, device);
}
/** {@inheritDoc} */
@Override
public NDArray arange(float start, float stop, float step, DataType dataType) {
if (Math.signum(stop - start) != Math.signum(step)) {
return create(new Shape(0), dataType, device);
}
String deviceType = device.getDeviceType();
int deviceId = device.getDeviceId();
int dType = toRustDataType(dataType);
long handle = RustLibrary.arange(start, stop, step, dType, deviceType, deviceId);
return new RsNDArray(this, handle, dataType);
}
/** {@inheritDoc} */
@Override
public NDArray eye(int rows, int cols, int k, DataType dataType) {
if (k != 0) {
throw new UnsupportedOperationException(
"index of the diagonal is not supported in Rust");
}
if (rows != cols) {
throw new UnsupportedOperationException("rows must equals to columns in Rust");
}
String deviceType = device.getDeviceType();
int deviceId = device.getDeviceId();
int dType = toRustDataType(dataType);
long handle = RustLibrary.eye(rows, cols, dType, deviceType, deviceId);
return new RsNDArray(this, handle, dataType);
}
/** {@inheritDoc} */
@Override
public NDArray linspace(float start, float stop, int num, boolean endpoint) {
if (!endpoint) {
throw new UnsupportedOperationException("endpoint only support true");
}
String deviceType = device.getDeviceType();
int deviceId = device.getDeviceId();
int dType = DataType.FLOAT32.ordinal();
long handle = RustLibrary.linspace(start, stop, num, dType, deviceType, deviceId);
return new RsNDArray(this, handle, DataType.FLOAT32);
}
/** {@inheritDoc} */
@Override
public NDArray randomInteger(long low, long high, Shape shape, DataType dataType) {
long[] sh = shape.getShape();
String deviceType = device.getDeviceType();
int deviceId = device.getDeviceId();
int dType = DataType.FLOAT32.ordinal();
long handle = RustLibrary.randint(low, high, sh, dType, deviceType, deviceId);
return new RsNDArray(this, handle, DataType.FLOAT32);
}
/** {@inheritDoc} */
@Override
public NDArray randomPermutation(long n) {
String deviceType = device.getDeviceType();
int deviceId = device.getDeviceId();
long handle = RustLibrary.randomPermutation(n, deviceType, deviceId);
return new RsNDArray(this, handle);
}
/** {@inheritDoc} */
@Override
public NDArray randomUniform(float low, float high, Shape shape, DataType dataType) {
long[] sh = shape.getShape();
String deviceType = device.getDeviceType();
int deviceId = device.getDeviceId();
int dType = toRustDataType(dataType);
long handle = RustLibrary.uniform(low, high, sh, dType, deviceType, deviceId);
return new RsNDArray(this, handle, dataType);
}
/** {@inheritDoc} */
@Override
public NDArray randomNormal(float loc, float scale, Shape shape, DataType dataType) {
long[] sh = shape.getShape();
String deviceType = device.getDeviceType();
int deviceId = device.getDeviceId();
int dType = toRustDataType(dataType);
long handle = RustLibrary.randomNormal(loc, scale, sh, dType, deviceType, deviceId);
return new RsNDArray(this, handle, dataType);
}
/** {@inheritDoc} */
@Override
public NDArray hanningWindow(long numPoints) {
String deviceType = device.getDeviceType();
int deviceId = device.getDeviceId();
long handle = RustLibrary.hannWindow(numPoints, deviceType, deviceId);
return new RsNDArray(this, handle);
}
/** {@inheritDoc} */
@Override
public RsNDManager newSubManager(Device device) {
RsNDManager manager = new RsNDManager(this, device);
attachUncappedInternal(manager.uid, manager);
return manager;
}
/** {@inheritDoc} */
@Override
public final Engine getEngine() {
return Engine.getEngine(RsEngine.ENGINE_NAME);
}
int toRustDataType(DataType dataType) {
switch (dataType) {
case BOOLEAN:
case INT8:
return DataType.UINT8.ordinal();
case INT32:
return DataType.UINT32.ordinal();
case FLOAT16:
case BFLOAT16:
case FLOAT32:
case FLOAT64:
case UINT8:
case UINT32:
case INT64:
return dataType.ordinal();
default:
throw new UnsupportedOperationException("Unsupported data type: " + dataType);
}
}
/** The SystemManager is the root {@link RsNDManager} of which all others are children. */
private static final class SystemManager extends RsNDManager implements SystemNDManager {
SystemManager() {
super(null, null);
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine/rust/RsSymbolBlock.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.engine.rust;
import ai.djl.ndarray.NDList;
import ai.djl.nn.AbstractSymbolBlock;
import ai.djl.nn.ParameterList;
import ai.djl.nn.SymbolBlock;
import ai.djl.training.ParameterStore;
import ai.djl.util.PairList;
import java.util.Arrays;
import java.util.concurrent.atomic.AtomicReference;
/** {@code RsSymbolBlock} is the Rust implementation of {@link SymbolBlock}. */
public class RsSymbolBlock extends AbstractSymbolBlock implements AutoCloseable {
private AtomicReference<Long> handle;
private String uid;
private RsNDManager manager;
/**
* Constructs a {@code RsSymbolBlock}.
*
* <p>You can create a {@code RsSymbolBlock} using {@link ai.djl.Model#load(java.nio.file.Path,
* String)}.
*
* @param manager the manager to use for the block
* @param handle the Booster handle
*/
@SuppressWarnings("this-escape")
public RsSymbolBlock(RsNDManager manager, long handle) {
this.handle = new AtomicReference<>(handle);
this.manager = manager;
inputNames = Arrays.asList(RustLibrary.getInputNames(handle));
uid = String.valueOf(handle);
manager.attachInternal(uid, this);
}
/** {@inheritDoc} */
@Override
protected NDList forwardInternal(
ParameterStore parameterStore,
NDList inputs,
boolean training,
PairList<String, Object> params) {
if (inputNames.size() != inputs.size()) {
throw new IllegalArgumentException("Input size mismatch, requires: " + inputNames);
}
try (RsNDManager sub = (RsNDManager) manager.newSubManager()) {
long[] inputHandles = new long[inputs.size()];
for (int i = 0; i < inputs.size(); i++) {
inputHandles[i] = sub.from(inputs.get(i)).getHandle();
}
long outputHandle = RustLibrary.runInference(handle.get(), inputHandles);
RsNDArray output = new RsNDArray(manager, outputHandle);
output.attach(inputs.head().getManager());
return new NDList(output);
}
}
/** {@inheritDoc} */
@Override
public void close() {
Long pointer = handle.getAndSet(null);
if (pointer != null) {
manager.detachInternal(uid);
manager = null;
}
}
/**
* Gets the native Rust pointer.
*
* @return the pointer
*/
public Long getHandle() {
Long reference = handle.get();
if (reference == null) {
throw new IllegalStateException("Rust model handle has been released!");
}
return reference;
}
/** {@inheritDoc} */
@Override
public ParameterList getDirectParameters() {
throw new UnsupportedOperationException("Not yet supported");
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine/rust/RustLibrary.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.engine.rust;
import java.nio.ByteBuffer;
/** Rust native library. */
@SuppressWarnings({"unused", "MissingJavadocMethod"})
public final class RustLibrary {
private RustLibrary() {}
public static native boolean isCudaAvailable();
public static native long loadModel(
String modelPath, int dtype, String deviceType, int deviceId);
public static native long deleteModel(long handle);
public static native String[] getInputNames(long handle);
public static native long runInference(long handle, long[] inputHandles);
public static native long tensorOf(
ByteBuffer buf, long[] shape, int dataType, String deviceType, int deviceId);
public static native long zeros(long[] shape, int dataType, String deviceType, int deviceId);
public static native long ones(long[] shape, int dType, String deviceType, int deviceId);
public static native long full(
float value, long[] shape, int dataType, String deviceType, int deviceId);
public static native long arange(
float start, float stop, float step, int dataType, String deviceType, int deviceId);
public static native long eye(
int rows, int cols, int dataType, String deviceType, int deviceId);
public static long linspace(
float start, float stop, int num, int dataType, String deviceType, int deviceId) {
throw new UnsupportedOperationException("Not implemented");
}
public static long randint(
long low, long high, long[] shape, int dataType, String deviceType, int deviceId) {
throw new UnsupportedOperationException("Not implemented");
}
public static long randomPermutation(long n, String deviceType, int deviceId) {
throw new UnsupportedOperationException("Not implemented");
}
public static native long uniform(
float low, float high, long[] shape, int dataType, String deviceType, int deviceId);
public static native long randomNormal(
float loc, float scale, long[] shape, int dataType, String deviceType, int deviceId);
public static long hannWindow(long numPoints, String deviceType, int deviceId) {
throw new UnsupportedOperationException("Not implemented");
}
public static native void deleteTensor(long handle);
public static native int getDataType(long handle);
public static native int[] getDevice(long handle);
public static native long[] getShape(long handle);
public static native long duplicate(long handle);
public static native long toDevice(long handle, String deviceType, int deviceId);
public static native long toBoolean(long handle);
public static native long toDataType(long handle, int dataType);
public static native byte[] toByteArray(long handle);
public static native long fullSlice(long handle, long[] min, long[] max, long[] step);
public static native long gather(long handle, long indexHandle, int axis);
public static long take(long handle, long indexHandle) {
throw new UnsupportedOperationException("Not implemented");
}
public static long put(long handle, long indexHandle, long valueHandle) {
throw new UnsupportedOperationException("Not implemented");
}
public static native long scatter(long handle, long indexHandle, long valueHandle, int axis);
public static long booleanMask(long handle, long indexHandle, int axis) {
throw new UnsupportedOperationException("Not implemented");
}
// comparison ops
public static native boolean contentEqual(long handle, long other);
public static native long eq(long handle, long other);
public static native long neq(long handle, long other);
public static native long gt(long handle, long other);
public static native long gte(long handle, long other);
public static native long lt(long handle, long other);
public static native long lte(long handle, long other);
// binary ops
public static native long add(long handle, long other);
public static native long sub(long handle, long other);
public static native long mul(long handle, long other);
public static native long div(long handle, long other);
public static native long minimum(long handle, long other);
public static native long maximum(long handle, long other);
public static long remainder(long handle, long other) {
throw new UnsupportedOperationException("Not implemented");
}
public static native long pow(long handle, long other);
public static long xlogy(long handle, long other) {
throw new UnsupportedOperationException("Not implemented");
}
// unary ops
public static native long exp(long handle);
public static native long log(long handle);
public static long log10(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static long log2(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static native long sin(long handle);
public static native long cos(long handle);
public static long tan(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static long asin(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static long acos(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static long atan(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static long atan2(long handle, long other) {
throw new UnsupportedOperationException("Not implemented");
}
public static long sinh(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static long cosh(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static native long tanh(long handle);
public static native long abs(long handle);
public static native long neg(long handle);
public static long sign(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static native long square(long handle);
public static native long sqrt(long handle);
public static native long floor(long handle);
public static native long ceil(long handle);
public static native long round(long handle);
public static long trunc(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static native long countNonzero(long handle);
public static native long countNonzeroWithAxis(long handle, int axis);
// reduce ops
public static native long sum(long handle);
public static native long sumWithAxis(long handle, int[] axes, boolean keepDims);
public static long[] topK(long handle, int k, int axis, boolean largest, boolean sorted) {
throw new UnsupportedOperationException("Not implemented");
}
public static native long max(long handle);
public static native long maxWithAxis(long handle, int axis, boolean keepDims);
public static native long min(long handle);
public static native long minWithAxis(long handle, int axis, boolean keepDims);
public static native long argMax(long handle);
public static native long argMaxWithAxis(long handle, int axis, boolean keepDims);
public static native long argMin(long handle);
public static native long argMinWithAxis(long handle, int axis, boolean keepDims);
public static long percentile(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static long percentileWithAxes(long handle, double percentile, int[] axes) {
throw new UnsupportedOperationException("Not implemented");
}
public static native long mean(long handle);
public static native long meanWithAxis(long handle, int[] axis, boolean keepDims);
public static long[] median(long handle, int axis, boolean keepDims) {
throw new UnsupportedOperationException("Not implemented");
}
public static long cumProd(long handle, int axis) {
throw new UnsupportedOperationException("Not implemented");
}
public static long cumProdWithType(long handle, int axis, int dataType) {
throw new UnsupportedOperationException("Not implemented");
}
public static long prod(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static long cumProdWithAxis(long handle, int axis, boolean keepDims) {
throw new UnsupportedOperationException("Not implemented");
}
// other ops
public static native long normalize(long handle, double p, long dim, double eps);
public static long rot90(long handle, int times, int[] axes) {
throw new UnsupportedOperationException("Not implemented");
}
public static native long[] split(long handle, long[] indices, int axis);
public static native long flatten(long handle);
public static native long flattenWithDims(long handle, int startDim, int endDim);
public static native long reshape(long handle, long[] shape);
public static native long expandDims(long handle, int axis);
public static native long squeeze(long handle, int[] axes);
public static long logicalAnd(long handle, long other) {
throw new UnsupportedOperationException("Not implemented");
}
public static long logicalOr(long handle, long other) {
throw new UnsupportedOperationException("Not implemented");
}
public static long logicalXor(long handle, long other) {
throw new UnsupportedOperationException("Not implemented");
}
public static long logicalNot(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static long argSort(long handle, int axis, boolean ascending) {
throw new UnsupportedOperationException("Not implemented");
}
public static long sort(long handle, int axis, boolean ascending) {
throw new UnsupportedOperationException("Not implemented");
}
public static native long softmax(long handle, int axis);
public static native long logSoftmax(long handle, int axis);
public static native long cumSum(long handle, int axis);
public static long isInf(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static long isNaN(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static long tile(long handle, long[] repeats) {
throw new UnsupportedOperationException("Not implemented");
}
public static long tileWithAxis(long handle, int axis, long repeat) {
throw new UnsupportedOperationException("Not implemented");
}
public static long tileWithShape(long handle, long[] shape) {
throw new UnsupportedOperationException("Not implemented");
}
public static long repeat(long handle, long repeat, int axis) {
throw new UnsupportedOperationException("Not implemented");
}
public static long dot(long handle, long other) {
throw new UnsupportedOperationException("Not implemented");
}
public static native long matmul(long handle, long other);
public static native long batchMatMul(long handle, long other);
public static native long clip(long handle, double min, double max);
public static native long transpose(long handle, int axis1, int axis2);
public static long flip(long handle, int[] axes) {
throw new UnsupportedOperationException("Not implemented");
}
public static native long permute(long handle, int[] axes);
public static native long broadcast(long handle, long[] shape);
public static long nonZero(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static long inverse(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static long norm(long handle, int i, int[] ints, boolean keepDims) {
throw new UnsupportedOperationException("Not implemented");
}
public static long oneHot(long handle, int depth, float onValue, float offValue, int dataType) {
throw new UnsupportedOperationException("Not implemented");
}
public static long complex(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static long real(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static native long sigmoid(long handle);
public static long softPlus(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static long softSign(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static native long leakyRelu(long handle, float alpha);
public static long elu(long handle, float alpha) {
throw new UnsupportedOperationException("Not implemented");
}
public static long selu(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static native long relu(long handle);
public static native long gelu(long handle);
public static native long erf(long handle);
public static long erfinv(long handle) {
throw new UnsupportedOperationException("Not implemented");
}
public static long maxPool(
long handle, long[] kernelShape, long[] stride, long[] padding, boolean ceilMode) {
throw new UnsupportedOperationException("Not implemented");
}
public static long adaptiveMaxPool(long handle, long[] shape) {
throw new UnsupportedOperationException("Not implemented");
}
public static native long avgPool2d(long handle, long[] kernelShape, long[] stride);
public static long adaptiveAvgPool(long handle, long[] shape) {
throw new UnsupportedOperationException("Not implemented");
}
public static long lpPool(
long handle, float normType, long[] kernelShape, long[] stride, boolean ceilMode) {
throw new UnsupportedOperationException("Not implemented");
}
public static long where(long conditionHandle, long handle, long otherHandle) {
throw new UnsupportedOperationException("Not implemented");
}
public static native long stack(long[] srcArray, int axis);
public static native long concat(long[] srcArray, int axis);
public static long pick(long handle, long pickHandle, int axis) {
throw new UnsupportedOperationException("Not implemented");
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine/rust/package-info.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes to interface with the underlying native engine. */
package ai.djl.engine.rust;
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine/rust
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine/rust/zoo/RsModelZoo.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.engine.rust.zoo;
import ai.djl.Application;
import ai.djl.engine.Engine;
import ai.djl.repository.RemoteRepository;
import ai.djl.repository.Repository;
import ai.djl.repository.Version;
import ai.djl.repository.VersionRange;
import ai.djl.repository.zoo.ModelLoader;
import ai.djl.repository.zoo.ModelZoo;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
/** RsModelZoo is a repository that contains HuggingFace models. */
public class RsModelZoo extends ModelZoo {
private static final Repository REPOSITORY = new RemoteRepository("Rust", DJL_REPO_URL);
private static final String GROUP_ID = "ai.djl.huggingface.rust";
private volatile boolean initialized; // NOPMD
RsModelZoo() {}
/** {@inheritDoc} */
@Override
public String getGroupId() {
return GROUP_ID;
}
/** {@inheritDoc} */
@Override
public Set<String> getSupportedEngines() {
return Collections.singleton("Rust");
}
/** {@inheritDoc} */
@Override
public Collection<ModelLoader> getModelLoaders() {
init();
return super.getModelLoaders();
}
/** {@inheritDoc} */
@Override
public ModelLoader getModelLoader(String name) {
init();
return super.getModelLoader(name);
}
private void init() {
if (!initialized) {
synchronized (RsModelZoo.class) {
if (!initialized) {
Version version = new Version(Engine.getDjlVersion());
addModels(Application.NLP.TEXT_EMBEDDING, version);
addModels(Application.NLP.TEXT_CLASSIFICATION, version);
initialized = true;
}
}
}
}
private void addModels(Application app, Version version) {
Map<String, Map<String, Object>> map = listModels(REPOSITORY, app);
for (Map.Entry<String, Map<String, Object>> entry : map.entrySet()) {
Map<String, Object> model = entry.getValue();
if ("failed".equals(model.get("result"))) {
continue;
}
String requires = (String) model.get("requires");
if (requires != null) {
// the model requires specific DJL version
VersionRange range = VersionRange.parse(requires);
if (!range.contains(version)) {
continue;
}
}
String artifactId = entry.getKey();
addModel(REPOSITORY.model(app, GROUP_ID, artifactId, "0.0.1"));
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine/rust
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine/rust/zoo/RsZooProvider.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.engine.rust.zoo;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooProvider;
/**
* An Rust model zoo provider implements the {@link ai.djl.repository.zoo.ZooProvider} interface.
*/
public class RsZooProvider implements ZooProvider {
/** {@inheritDoc} */
@Override
public ModelZoo getModelZoo() {
return new RsModelZoo();
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine/rust
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/engine/rust/zoo/package-info.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains the built-in {@link ai.djl.engine.rust.zoo.RsModelZoo}. */
package ai.djl.engine.rust.zoo;
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/tokenizers/Encoding.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.tokenizers;
import ai.djl.huggingface.tokenizers.jni.CharSpan;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import java.util.Arrays;
/** A class holds token encoding information. */
public class Encoding {
private long[] ids;
private long[] typeIds;
private String[] tokens;
private long[] wordIds;
private long[] sequenceIds;
private long[] attentionMask;
private long[] specialTokenMask;
private CharSpan[] charTokenSpans;
private Encoding[] overflowing;
private boolean exceedMaxLength;
protected Encoding(
long[] ids,
long[] typeIds,
String[] tokens,
long[] wordIds,
long[] sequenceIds,
long[] attentionMask,
long[] specialTokenMask,
CharSpan[] charTokenSpans,
boolean exceedMaxLength,
Encoding[] overflowing) {
this.ids = ids;
this.typeIds = typeIds;
this.tokens = tokens;
this.wordIds = wordIds;
this.sequenceIds = sequenceIds;
this.attentionMask = attentionMask;
this.specialTokenMask = specialTokenMask;
this.charTokenSpans = charTokenSpans;
this.exceedMaxLength = exceedMaxLength;
this.overflowing = overflowing;
}
/**
* Returns the {@link NDList} representation of the encodings.
*
* @param encodings the {@code Encoding} batch
* @param manager the {@link NDManager} to create the NDList
* @param withTokenType true to include the token type id
* @param int32 true to use int32 datatype
* @return the {@link NDList}
*/
public static NDList toNDList(
Encoding[] encodings, NDManager manager, boolean withTokenType, boolean int32) {
NDList list = new NDList();
if (!int32) {
long[][] ids = new long[encodings.length][];
long[][] attentionMask = new long[encodings.length][];
long[][] typeIds = new long[encodings.length][];
for (int i = 0; i < encodings.length; i++) {
ids[i] = encodings[i].getIds();
attentionMask[i] = encodings[i].getAttentionMask();
if (withTokenType) {
typeIds[i] = encodings[i].getTypeIds();
}
}
list.add(manager.create(ids));
NDArray inputAttentionMask = manager.create(attentionMask);
list.add(inputAttentionMask);
if (withTokenType) {
list.add(manager.create(typeIds));
}
return list;
}
int[][] ids = new int[encodings.length][];
int[][] attentionMask = new int[encodings.length][];
int[][] typeIds = new int[encodings.length][];
for (int i = 0; i < encodings.length; i++) {
ids[i] = Arrays.stream(encodings[i].getIds()).mapToInt(l -> (int) l).toArray();
attentionMask[i] =
Arrays.stream(encodings[i].getAttentionMask()).mapToInt(l -> (int) l).toArray();
if (withTokenType) {
typeIds[i] =
Arrays.stream(encodings[i].getTypeIds()).mapToInt(l -> (int) l).toArray();
}
}
list.add(manager.create(ids));
NDArray inputAttentionMask = manager.create(attentionMask);
list.add(inputAttentionMask);
if (withTokenType) {
list.add(manager.create(typeIds));
}
return list;
}
/**
* Returns the {@link NDList} representation of the encoding.
*
* @param manager the {@link NDManager} to create the NDList
* @param withTokenType true to include the token type id
* @param int32 true to use int32 datatype
* @return the {@link NDList}
*/
public NDList toNDList(NDManager manager, boolean withTokenType, boolean int32) {
// Converting encoding to int32 NDList because candle can't convert int64 to fp16 in cuda
NDList list = new NDList(withTokenType ? 3 : 2);
if (int32) {
int[] intIds = Arrays.stream(ids).mapToInt(i -> (int) i).toArray();
int[] intAttentionMask = Arrays.stream(attentionMask).mapToInt(i -> (int) i).toArray();
list.add(manager.create(intIds));
list.add(manager.create(intAttentionMask));
if (withTokenType) {
int[] intTypeIds = Arrays.stream(typeIds).mapToInt(i -> (int) i).toArray();
list.add(manager.create(intTypeIds));
}
} else {
list.add(manager.create(ids));
list.add(manager.create(attentionMask));
if (withTokenType) {
list.add(manager.create(typeIds));
}
}
return list;
}
/**
* Returns the token ids.
*
* @return the token ids
*/
public long[] getIds() {
return ids;
}
/**
* Returns the token type ids.
*
* @return the token type ids
*/
public long[] getTypeIds() {
return typeIds;
}
/**
* Returns the tokens.
*
* @return the tokens
*/
public String[] getTokens() {
return tokens;
}
/**
* Returns the word ids.
*
* @return the word ids
*/
public long[] getWordIds() {
return wordIds;
}
/**
* Returns the sequence ids.
*
* @return the sequence ids
*/
public long[] getSequenceIds() {
return sequenceIds;
}
/**
* Returns the attention masks.
*
* @return the attention masks
*/
public long[] getAttentionMask() {
return attentionMask;
}
/**
* Returns the special token masks.
*
* @return the special token masks
*/
public long[] getSpecialTokenMask() {
return specialTokenMask;
}
/**
* Returns char token spans.
*
* @return char token spans
*/
public CharSpan[] getCharTokenSpans() {
return charTokenSpans;
}
/**
* Returns if tokens exceed max length.
*
* @return {@code true} if tokens exceed max length
*/
public boolean exceedMaxLength() {
return exceedMaxLength;
}
/**
* Returns an array of overflowing encodings.
*
* @return the array of overflowing encodings
*/
public Encoding[] getOverflowing() {
return overflowing;
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/tokenizers/HuggingFaceTokenizer.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.tokenizers;
import ai.djl.huggingface.tokenizers.jni.CharSpan;
import ai.djl.huggingface.tokenizers.jni.LibUtils;
import ai.djl.huggingface.tokenizers.jni.TokenizersLibrary;
import ai.djl.modality.nlp.preprocess.Tokenizer;
import ai.djl.ndarray.NDManager;
import ai.djl.translate.ArgumentsUtil;
import ai.djl.util.Ec2Utils;
import ai.djl.util.NativeResource;
import ai.djl.util.PairList;
import ai.djl.util.Platform;
import ai.djl.util.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.Normalizer;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Stream;
/**
* {@code HuggingFaceTokenizer} is a Huggingface tokenizer implementation of the {@link Tokenizer}
* interface that converts sentences into token.
*/
public final class HuggingFaceTokenizer extends NativeResource<Long> implements Tokenizer {
private static final Logger logger = LoggerFactory.getLogger(HuggingFaceTokenizer.class);
private boolean addSpecialTokens;
private boolean withOverflowingTokens;
private Locale doLowerCase;
private TruncationStrategy truncation;
private PaddingStrategy padding;
private int maxLength;
private int stride;
private int padToMultipleOf;
private int modelMaxLength;
private boolean cleanupTokenizationSpaces;
private boolean stripAccents;
private boolean addPrefixSpace;
private HuggingFaceTokenizer(
long handle,
Map<String, String> options,
TokenizerConfig config,
PadTokenResolver.PadInfo padInfo) {
super(handle);
truncation = TruncationStrategy.LONGEST_FIRST;
padding = PaddingStrategy.LONGEST;
maxLength = TokenizersLibrary.LIB.getMaxLength(handle);
stride = TokenizersLibrary.LIB.getStride(handle);
padToMultipleOf = TokenizersLibrary.LIB.getPadToMultipleOf(handle);
if (options != null) {
String val = options.getOrDefault("addSpecialTokens", "true");
addSpecialTokens = Boolean.parseBoolean(val);
val = options.getOrDefault("withOverflowingTokens", "false");
withOverflowingTokens = Boolean.parseBoolean(val);
modelMaxLength = ArgumentsUtil.intValue(options, "modelMaxLength", 512);
if (options.containsKey("truncation")) {
truncation = TruncationStrategy.fromValue(options.get("truncation"));
}
if (options.containsKey("padding")) {
padding = PaddingStrategy.fromValue(options.get("padding"));
}
maxLength = ArgumentsUtil.intValue(options, "maxLength", maxLength);
stride = ArgumentsUtil.intValue(options, "stride", stride);
padToMultipleOf = ArgumentsUtil.intValue(options, "padToMultipleOf", padToMultipleOf);
String lowerCase = options.getOrDefault("doLowerCase", "false");
if ("true".equals(lowerCase)) {
this.doLowerCase = Locale.getDefault();
} else if (!"false".equals(lowerCase)) {
this.doLowerCase = Locale.forLanguageTag(lowerCase);
}
} else {
addSpecialTokens = true;
modelMaxLength = 512;
}
if (config != null) {
applyConfig(config);
}
updateTruncationAndPadding(padInfo);
}
private void applyConfig(TokenizerConfig config) {
this.modelMaxLength = config.getModelMaxLength();
if (config.hasExplicitDoLowerCase() && config.isDoLowerCase()) {
this.doLowerCase = Locale.getDefault();
}
this.cleanupTokenizationSpaces = config.isCleanUpTokenizationSpaces();
if (Stream.of(
config.getBosToken(),
config.getClsToken(),
config.getEosToken(),
config.getSepToken(),
config.getUnkToken(),
config.getPadToken())
.anyMatch(token -> token != null && !token.isEmpty())) {
this.addSpecialTokens = true;
}
if (config.hasExplicitStripAccents()) {
this.stripAccents = config.isStripAccents();
}
if (config.hasExplicitAddPrefixSpace()) {
this.addPrefixSpace = config.isAddPrefixSpace();
}
}
/**
* Creates a pre-trained {@code HuggingFaceTokenizer} instance from huggingface hub.
*
* @param name the name of the huggingface tokenizer
* @return a {@code HuggingFaceTokenizer} instance
*/
public static HuggingFaceTokenizer newInstance(String name) {
return newInstance(name, null);
}
/**
* Create a pre-trained {@code HuggingFaceTokenizer} instance from huggingface hub.
*
* @param identifier the identifier of the huggingface tokenizer
* @param options tokenizer options
* @return a {@code HuggingFaceTokenizer} instance
*/
public static HuggingFaceTokenizer newInstance(String identifier, Map<String, String> options) {
Ec2Utils.callHome("Huggingface");
LibUtils.checkStatus();
String autoToken = Utils.getEnvOrSystemProperty("HF_TOKEN");
if (options != null) {
autoToken = options.getOrDefault("hf_token", autoToken);
}
long handle = TokenizersLibrary.LIB.createTokenizer(identifier, autoToken);
return new HuggingFaceTokenizer(handle, options, null, null);
}
/**
* Create a pre-trained {@code HuggingFaceTokenizer} instance from existing models.
*
* @param modelPath the directory or file path of the model location
* @return a {@code HuggingFaceTokenizer} instance
* @throws IOException when IO operation fails in loading a resource
*/
public static HuggingFaceTokenizer newInstance(Path modelPath) throws IOException {
return newInstance(modelPath, null);
}
/**
* Create a pre-trained {@code HuggingFaceTokenizer} instance from existing models.
*
* @param modelPath the directory or file path of the model location
* @param options tokenizer options
* @return a {@code HuggingFaceTokenizer} instance
* @throws IOException when IO operation fails in loading a resource
*/
public static HuggingFaceTokenizer newInstance(Path modelPath, Map<String, String> options)
throws IOException {
if (Files.isDirectory(modelPath)) {
modelPath = modelPath.resolve("tokenizer.json");
}
try (InputStream is = Files.newInputStream(modelPath)) {
return newInstance(is, options);
}
}
/**
* Create a pre-trained {@code HuggingFaceTokenizer} instance from existing models.
*
* @param modelPath the directory or file path of the model location
* @param options tokenizer options
* @return a {@code HuggingFaceTokenizer} instance
* @throws IOException when IO operation fails in loading a resource
*/
public static HuggingFaceTokenizer newInstance(
Path modelPath, String configPath, Map<String, String> options) throws IOException {
if (Files.isDirectory(modelPath)) {
modelPath = modelPath.resolve("tokenizer.json");
}
TokenizerConfig config = TokenizerConfig.load(Paths.get(configPath));
try (InputStream is = Files.newInputStream(modelPath)) {
return newInstance(is, options, config);
}
}
/**
* Create a pre-trained BPE {@code HuggingFaceTokenizer} instance from existing models.
*
* @param vocab the BPE vocabulary file
* @param merges the BPE merges file
* @param options tokenizer options
* @return a {@code HuggingFaceTokenizer} instance
* @throws IOException when IO operation fails in loading a resource
*/
public static HuggingFaceTokenizer newInstance(
Path vocab, Path merges, Map<String, String> options) throws IOException {
Ec2Utils.callHome("Huggingface");
LibUtils.checkStatus();
String vocabFile = vocab.toAbsolutePath().toString();
String mergesFile = merges.toAbsolutePath().toString();
long handle = TokenizersLibrary.LIB.createBpeTokenizer(vocabFile, mergesFile);
return new HuggingFaceTokenizer(handle, options, null, null);
}
/**
* Create a pre-trained {@code HuggingFaceTokenizer} instance from {@code InputStream}.
*
* @param is {@code InputStream}
* @param options tokenizer options
* @return a {@code HuggingFaceTokenizer} instance
* @throws IOException when IO operation fails in loading a resource
*/
public static HuggingFaceTokenizer newInstance(InputStream is, Map<String, String> options)
throws IOException {
Ec2Utils.callHome("Huggingface");
LibUtils.checkStatus();
String json = Utils.toString(is);
long handle = TokenizersLibrary.LIB.createTokenizerFromString(json);
return new HuggingFaceTokenizer(handle, options, null, null);
}
/**
* Create a pre-trained {@code HuggingFaceTokenizer} instance from {@code InputStream}.
*
* @param is {@code InputStream}
* @param options tokenizer options
* @param config TokenizerConfig with special tokens and padding details
* @return a {@code HuggingFaceTokenizer} instance
* @throws IOException when IO operation fails in loading a resource
*/
public static HuggingFaceTokenizer newInstance(
InputStream is, Map<String, String> options, TokenizerConfig config)
throws IOException {
Ec2Utils.callHome("Huggingface");
LibUtils.checkStatus();
String json = Utils.toString(is);
PadTokenResolver.PadInfo padInfo = PadTokenResolver.extractPadInfo(json, config);
long handle = TokenizersLibrary.LIB.createTokenizerFromString(json);
return new HuggingFaceTokenizer(handle, options, config, padInfo);
}
/**
* Returns the version of the Huggingface tokenizer.
*
* @return the version number of the Huggingface tokenizer
*/
public String getVersion() {
Platform platform = Platform.detectPlatform("tokenizers");
return platform.getVersion();
}
/** {@inheritDoc} */
@Override
public List<String> tokenize(String sentence) {
Encoding encoding = encode(sentence);
return Arrays.asList(encoding.getTokens());
}
/** {@inheritDoc} */
@Override
public String buildSentence(List<String> tokens) {
// TODO:
return String.join(" ", tokens).replace(" ##", "").trim();
}
/** {@inheritDoc} */
@Override
public void close() {
Long pointer = handle.getAndSet(null);
if (pointer != null) {
TokenizersLibrary.LIB.deleteTokenizer(pointer);
}
}
/**
* Returns the {@code Encoding} of the input sentence.
*
* @param text the input sentence
* @param addSpecialTokens whether to encode the sequence with special tokens relative to their
* model
* @param withOverflowingTokens whether to return overflowing tokens
* @return the {@code Encoding} of the input sentence
*/
public Encoding encode(String text, boolean addSpecialTokens, boolean withOverflowingTokens) {
if (text == null) {
throw new NullPointerException("text cannot be null");
}
String processedText = prepareForTokenization(text);
long encoding = TokenizersLibrary.LIB.encode(getHandle(), processedText, addSpecialTokens);
return toEncoding(encoding, withOverflowingTokens);
}
/**
* Returns the {@code Encoding} of the input sentence.
*
* @param text the input sentence
* @return the {@code Encoding} of the input sentence
*/
public Encoding encode(String text) {
return encode(text, addSpecialTokens, withOverflowingTokens);
}
/**
* Returns the {@code Encoding} of the input sentence.
*
* @param text the input sentence
* @param textPair the second input sentence
* @param addSpecialTokens whether to encode the sequence with special tokens relative to their
* model
* @param withOverflowingTokens whether to return overflowing tokens
* @return the {@code Encoding} of the input sentence
*/
public Encoding encode(
String text, String textPair, boolean addSpecialTokens, boolean withOverflowingTokens) {
if (text == null || textPair == null) {
throw new NullPointerException("text/text_pair cannot be null");
}
if (doLowerCase != null) {
text = text.toLowerCase(doLowerCase);
textPair = textPair.toLowerCase(doLowerCase);
}
long encoding =
TokenizersLibrary.LIB.encodeDual(getHandle(), text, textPair, addSpecialTokens);
return toEncoding(encoding, withOverflowingTokens);
}
/**
* Returns the {@code Encoding} of the input sentence.
*
* @param text the input sentence
* @param textPair the second input sentence
* @return the {@code Encoding} of the input sentence
*/
public Encoding encode(String text, String textPair) {
return encode(text, textPair, addSpecialTokens, withOverflowingTokens);
}
/**
* Returns the {@code Encoding} of the input sentences.
*
* @param inputs the input sentences
* @param addSpecialTokens whether to encode the sequence with special tokens relative to their
* model
* @param withOverflowingTokens whether to return overflowing tokens
* @return the {@code Encoding} of the input sentences
*/
public Encoding encode(
List<String> inputs, boolean addSpecialTokens, boolean withOverflowingTokens) {
String[] array = inputs.toArray(Utils.EMPTY_ARRAY);
return encode(array, addSpecialTokens, withOverflowingTokens);
}
/**
* Returns the {@code Encoding} of the input sentences.
*
* @param inputs the input sentences
* @return the {@code Encoding} of the input sentences
*/
public Encoding encode(List<String> inputs) {
return encode(inputs, addSpecialTokens, withOverflowingTokens);
}
/**
* Returns the {@code Encoding} of the input sentences.
*
* @param inputs the input sentences
* @param addSpecialTokens whether to encode the sequence with special tokens relative to their
* model
* @param withOverflowingTokens whether to return overflowing tokens
* @return the {@code Encoding} of the input sentences
*/
public Encoding encode(
String[] inputs, boolean addSpecialTokens, boolean withOverflowingTokens) {
if (doLowerCase != null) {
for (int i = 0; i < inputs.length; ++i) {
inputs[i] = inputs[i].toLowerCase(doLowerCase);
}
} else if (Arrays.stream(inputs).anyMatch(Objects::isNull)) {
throw new NullPointerException("input text cannot be null");
}
long encoding = TokenizersLibrary.LIB.encodeList(getHandle(), inputs, addSpecialTokens);
return toEncoding(encoding, withOverflowingTokens);
}
/**
* Returns the {@code Encoding} of the input sentences.
*
* @param inputs the input sentences
* @return the {@code Encoding} of the input sentences
*/
public Encoding encode(String[] inputs) {
return encode(inputs, addSpecialTokens, withOverflowingTokens);
}
/**
* Returns the {@code Encoding} of the input sentence in batch.
*
* @param inputs the batch of input sentence
* @param addSpecialTokens whether to encode the sequence with special tokens relative to their
* model
* @param withOverflowingTokens whether to return overflowing tokens
* @return the {@code Encoding} of the input sentence in batch
*/
public Encoding[] batchEncode(
List<String> inputs, boolean addSpecialTokens, boolean withOverflowingTokens) {
String[] array = inputs.toArray(Utils.EMPTY_ARRAY);
return batchEncode(array, addSpecialTokens, withOverflowingTokens);
}
/**
* Returns the {@code Encoding} of the input sentence in batch.
*
* @param inputs the batch of input sentence
* @return the {@code Encoding} of the input sentence in batch
*/
public Encoding[] batchEncode(List<String> inputs) {
return batchEncode(inputs, addSpecialTokens, withOverflowingTokens);
}
/**
* Returns the {@code Encoding} of the input sentence in batch.
*
* @param inputs the batch of input sentence
* @param addSpecialTokens whether to encode the sequence with special tokens relative to their
* model
* @param withOverflowingTokens whether to return overflowing tokens
* @return the {@code Encoding} of the input sentence in batch
*/
public Encoding[] batchEncode(
String[] inputs, boolean addSpecialTokens, boolean withOverflowingTokens) {
if (doLowerCase != null) {
for (int i = 0; i < inputs.length; ++i) {
inputs[i] = inputs[i].toLowerCase(doLowerCase);
}
} else if (Arrays.stream(inputs).anyMatch(Objects::isNull)) {
throw new NullPointerException("input text cannot be null");
}
long[] encodings = TokenizersLibrary.LIB.batchEncode(getHandle(), inputs, addSpecialTokens);
Encoding[] ret = new Encoding[encodings.length];
for (int i = 0; i < encodings.length; ++i) {
ret[i] = toEncoding(encodings[i], withOverflowingTokens);
}
return ret;
}
/**
* Returns the {@code Encoding} of the input sentence in batch.
*
* @param inputs the batch of input sentence
* @return the {@code Encoding} of the input sentence in batch
*/
public Encoding[] batchEncode(String[] inputs) {
return batchEncode(inputs, addSpecialTokens, withOverflowingTokens);
}
/**
* Returns the {@code Encoding} of the input text pair in batch.
*
* @param inputs the batch of input text pair
* @param addSpecialTokens whether to encode the sequence with special tokens relative to their
* model
* @param withOverflowingTokens whether to return overflowing tokens
* @return the {@code Encoding} of the input text pair in batch
*/
public Encoding[] batchEncode(
PairList<String, String> inputs,
boolean addSpecialTokens,
boolean withOverflowingTokens) {
String[] text = inputs.keyArray(Utils.EMPTY_ARRAY);
String[] textPair = inputs.valueArray(Utils.EMPTY_ARRAY);
if (doLowerCase != null) {
for (int i = 0; i < text.length; ++i) {
text[i] = text[i].toLowerCase(doLowerCase);
}
for (int i = 0; i < textPair.length; ++i) {
textPair[i] = textPair[i].toLowerCase(doLowerCase);
}
} else {
if (inputs.keys().stream().anyMatch(Objects::isNull)) {
throw new NullPointerException("text pair key cannot be null");
}
if (inputs.values().stream().anyMatch(Objects::isNull)) {
throw new NullPointerException("text pair value cannot be null");
}
}
long[] encodings =
TokenizersLibrary.LIB.batchEncodePair(
getHandle(), text, textPair, addSpecialTokens);
Encoding[] ret = new Encoding[encodings.length];
for (int i = 0; i < encodings.length; ++i) {
ret[i] = toEncoding(encodings[i], withOverflowingTokens);
}
return ret;
}
/**
* Returns the {@code Encoding} of the input text pair in batch.
*
* @param inputs the batch of input text pair
* @return the {@code Encoding} of the input text pair in batch
*/
public Encoding[] batchEncode(PairList<String, String> inputs) {
return batchEncode(inputs, addSpecialTokens, withOverflowingTokens);
}
/**
* Returns the decoded String from the input ids.
*
* @param ids the input ids
* @param skipSpecialTokens whether to remove special tokens in the decoding
* @return the decoded String from the input ids
*/
public String decode(long[] ids, boolean skipSpecialTokens) {
String decodedText = TokenizersLibrary.LIB.decode(getHandle(), ids, skipSpecialTokens);
return this.cleanupTokenizationSpaces ? cleanUpTokenization(decodedText) : decodedText;
}
/**
* Returns the decoded String from the input ids.
*
* @param ids the input ids
* @return the decoded String from the input ids
*/
public String decode(long[] ids) {
return decode(ids, !addSpecialTokens);
}
/**
* Returns the decoded Strings from the input batch ids.
*
* @param batchIds the batch of id sequences to decode
* @param skipSpecialTokens whether to remove special tokens in the decoding
* @return the decoded Strings from the input batch ids
*/
public String[] batchDecode(long[][] batchIds, boolean skipSpecialTokens) {
return TokenizersLibrary.LIB.batchDecode(getHandle(), batchIds, skipSpecialTokens);
}
/**
* Returns the decoded Strings from the input batch ids.
*
* @param batchIds the batch of id sequences to decode
* @return the decoded Strings from the input batch ids
*/
public String[] batchDecode(long[][] batchIds) {
return batchDecode(batchIds, !addSpecialTokens);
}
/**
* Returns the truncation policy.
*
* @return the truncation policy
*/
public String getTruncation() {
return truncation.name();
}
/**
* Returns the padding policy.
*
* @return the padding policy
*/
public String getPadding() {
return padding.name();
}
/**
* Returns the max token length.
*
* @return the max token length
*/
public int getMaxLength() {
return maxLength;
}
/**
* Returns the stride to use in overflow overlap when truncating sequences longer than the model
* supports.
*
* @return the stride to use in overflow overlap when truncating sequences longer than the model
* supports
*/
public int getStride() {
return stride;
}
/**
* Returns the padToMultipleOf for padding.
*
* @return the padToMultipleOf for padding
*/
public int getPadToMultipleOf() {
return padToMultipleOf;
}
/**
* Creates a builder to build a {@code HuggingFaceTokenizer}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/**
* Creates a builder to build a {@code HuggingFaceTokenizer}.
*
* @param arguments the models' arguments
* @return a new builder
*/
public static Builder builder(Map<String, ?> arguments) {
Builder builder = builder();
builder.configure(arguments);
return builder;
}
private String prepareForTokenization(String text) {
if (addPrefixSpace && !text.startsWith(" ")) {
text = " " + text;
}
if (doLowerCase != null) {
text = text.toLowerCase(doLowerCase);
}
if (stripAccents) {
text = Normalizer.normalize(text, Normalizer.Form.NFKD);
text = text.replaceAll("\\p{InCombiningDiacriticalMarks}+", "");
}
return text;
}
/*
* See: https://huggingface.co/docs/transformers/pad_truncation
*/
private void updateTruncationAndPadding(PadTokenResolver.PadInfo padInfo) {
boolean isTruncate = truncation != TruncationStrategy.DO_NOT_TRUNCATE;
if (padding == PaddingStrategy.MAX_LENGTH || isTruncate) {
if (maxLength == -1) {
logger.warn(
"maxLength is not explicitly specified, use modelMaxLength: {}",
modelMaxLength);
maxLength = modelMaxLength;
} else if (maxLength > modelMaxLength) {
logger.warn(
"maxLength is greater then modelMaxLength, change to: {}", modelMaxLength);
maxLength = modelMaxLength;
}
if (padding == PaddingStrategy.MAX_LENGTH && isTruncate && padToMultipleOf != 0) {
int remainder = maxLength % padToMultipleOf;
if (remainder != 0) {
int newMaxLength = maxLength + padToMultipleOf - maxLength % padToMultipleOf;
if (newMaxLength > modelMaxLength) {
newMaxLength -= padToMultipleOf;
}
logger.warn(
"maxLength ({}) is not a multiple of padToMultipleOf ({}), change to:"
+ " {}",
maxLength,
padToMultipleOf,
newMaxLength);
maxLength = newMaxLength;
}
}
}
if (isTruncate) {
TokenizersLibrary.LIB.setTruncation(getHandle(), maxLength, truncation.name(), stride);
} else {
TokenizersLibrary.LIB.disableTruncation(getHandle());
}
updatePadding(padInfo);
}
private void updatePadding(PadTokenResolver.PadInfo padInfo) {
if (padding == PaddingStrategy.DO_NOT_PAD) {
TokenizersLibrary.LIB.disablePadding(getHandle());
return;
}
if (padInfo != null) {
TokenizersLibrary.LIB.setPaddingWithTokenAndId(
getHandle(),
maxLength,
padding.name(),
padInfo.getPadToken(),
padInfo.getPadId(),
padToMultipleOf);
} else {
TokenizersLibrary.LIB.setPadding(
getHandle(), maxLength, padding.name(), padToMultipleOf);
}
}
private Encoding toEncoding(long encoding, boolean withOverflowingTokens) {
long[] ids = TokenizersLibrary.LIB.getTokenIds(encoding);
long[] typeIds = TokenizersLibrary.LIB.getTypeIds(encoding);
String[] tokens = TokenizersLibrary.LIB.getTokens(encoding);
long[] wordIds = TokenizersLibrary.LIB.getWordIds(encoding);
long[] sequenceIds = TokenizersLibrary.LIB.getSequenceIds(encoding);
long[] attentionMask = TokenizersLibrary.LIB.getAttentionMask(encoding);
long[] specialTokenMask = TokenizersLibrary.LIB.getSpecialTokenMask(encoding);
CharSpan[] charSpans = TokenizersLibrary.LIB.getTokenCharSpans(encoding);
int overFlowCount = TokenizersLibrary.LIB.getOverflowCount(encoding);
boolean exceedMaxLength = overFlowCount > 0;
Encoding[] overflowing;
if (withOverflowingTokens) {
long[] overflowingHandles = TokenizersLibrary.LIB.getOverflowing(encoding);
overflowing = new Encoding[overflowingHandles.length];
for (int i = 0; i < overflowingHandles.length; ++i) {
overflowing[i] = toEncoding(overflowingHandles[i], true);
}
} else {
overflowing = new Encoding[0];
}
TokenizersLibrary.LIB.deleteEncoding(encoding);
return new Encoding(
ids,
typeIds,
tokens,
wordIds,
sequenceIds,
attentionMask,
specialTokenMask,
charSpans,
exceedMaxLength,
overflowing);
}
private String cleanUpTokenization(String text) {
return text.replace(" .", ".")
.replace(" ?", "?")
.replace(" !", "!")
.replace(" ,", ",")
.replace(" ' ", "'")
.replace(" n't", "n't")
.replace(" 'm", "'m")
.replace(" 's", "'s")
.replace(" 've", "'ve")
.replace(" 're", "'re");
}
/** {@inheritDoc} */
@SuppressWarnings("deprecation")
@Override
protected void finalize() throws Throwable {
close();
super.finalize();
}
/** An enum to represent the different available truncation strategies. */
private enum TruncationStrategy {
LONGEST_FIRST,
ONLY_FIRST,
ONLY_SECOND,
DO_NOT_TRUNCATE;
/**
* Converts the String to the matching TruncationStrategy type.
*
* @param value the String to convert
* @return the matching PaddingStrategy type
* @throws IllegalArgumentException if the value does not match any TruncationStrategy type
*/
static TruncationStrategy fromValue(String value) {
if ("true".equals(value)) {
return TruncationStrategy.LONGEST_FIRST;
} else if ("false".equals(value)) {
return TruncationStrategy.DO_NOT_TRUNCATE;
}
for (TruncationStrategy strategy : TruncationStrategy.values()) {
if (strategy.name().equalsIgnoreCase(value)) {
return strategy;
}
}
throw new IllegalArgumentException("Invalid TruncationStrategy: " + value);
}
}
/** An enum to represent the different available padding strategies. */
private enum PaddingStrategy {
LONGEST,
MAX_LENGTH,
DO_NOT_PAD;
/**
* Converts the String to the matching PaddingStrategy type.
*
* @param value the String to convert
* @return the matching PaddingStrategy type
* @throws IllegalArgumentException if the value does not match any PaddingStrategy type
*/
static PaddingStrategy fromValue(String value) {
if ("true".equals(value)) {
return PaddingStrategy.LONGEST;
} else if ("false".equals(value)) {
return PaddingStrategy.DO_NOT_PAD;
}
for (PaddingStrategy strategy : PaddingStrategy.values()) {
if (strategy.name().equalsIgnoreCase(value)) {
return strategy;
}
}
throw new IllegalArgumentException("Invalid PaddingStrategy: " + value);
}
}
/** The builder for creating huggingface tokenizer. */
public static final class Builder {
private NDManager manager;
private Map<String, String> options;
Builder() {
options = new ConcurrentHashMap<>();
options.put("addSpecialTokens", "true");
}
/**
* Sets the optional manager used to manage the lifecycle of the tokenizer.
*
* @param manager the {@link NDManager}
* @return this builder
*/
public Builder optManager(NDManager manager) {
this.manager = manager;
return this;
}
/**
* Sets the name of the tokenizer.
*
* @param tokenizerName the name of the tokenizer
* @return this builder
*/
public Builder optTokenizerName(String tokenizerName) {
options.put("tokenizer", tokenizerName);
return this;
}
/**
* Sets the file path of the tokenizer.
*
* @param tokenizerPath the path of the tokenizer
* @return this builder
*/
public Builder optTokenizerPath(Path tokenizerPath) {
options.putIfAbsent("tokenizerPath", tokenizerPath.toString());
return this;
}
/**
* Sets if add special tokens.
*
* @param addSpecialTokens true to add special tokens
* @return this builder
*/
public Builder optAddSpecialTokens(boolean addSpecialTokens) {
options.put("addSpecialTokens", String.valueOf(addSpecialTokens));
return this;
}
/**
* Sets if add special tokens.
*
* @param withOverflowingTokens true to return overflowing tokens
* @return this builder
*/
public Builder optWithOverflowingTokens(boolean withOverflowingTokens) {
options.put("withOverflowingTokens", String.valueOf(withOverflowingTokens));
return this;
}
/**
* Enables or Disables default truncation behavior for the tokenizer.
*
* @param enabled whether to enable default truncation behavior
* @return this builder
*/
public Builder optTruncation(boolean enabled) {
options.put("truncation", String.valueOf(enabled));
return this;
}
/**
* Enables truncation to only truncate the first item.
*
* @return this builder
*/
public Builder optTruncateFirstOnly() {
options.put("truncation", TruncationStrategy.ONLY_FIRST.name());
return this;
}
/**
* Enables truncation to only truncate the second item.
*
* @return this builder
*/
public Builder optTruncateSecondOnly() {
options.put("truncation", TruncationStrategy.ONLY_SECOND.name());
return this;
}
/**
* Enables or Disables default padding behavior for the tokenizer.
*
* @param enabled whether to enable default padding behavior
* @return this builder
*/
public Builder optPadding(boolean enabled) {
options.put("padding", String.valueOf(enabled));
return this;
}
/**
* Enables padding to pad sequences to previously specified maxLength, or modelMaxLength if
* not specified.
*
* @return this builder
*/
public Builder optPadToMaxLength() {
options.put("padding", PaddingStrategy.MAX_LENGTH.name());
return this;
}
/**
* Sets maxLength for padding and truncation.
*
* @param maxLength the length to truncate and/or pad sequences to
* @return this builder
*/
public Builder optMaxLength(int maxLength) {
options.put("maxLength", String.valueOf(maxLength));
return this;
}
/**
* Sets padToMultipleOf for padding.
*
* @param padToMultipleOf the multiple of sequences should be padded to
* @return this builder
*/
public Builder optPadToMultipleOf(int padToMultipleOf) {
options.put("padToMultipleOf", String.valueOf(padToMultipleOf));
return this;
}
/**
* Sets the stride to use in overflow overlap when truncating sequences longer than the
* model supports.
*
* @param stride the number of tokens to overlap when truncating long sequences
* @return this builder
*/
public Builder optStride(int stride) {
options.put("stride", String.valueOf(stride));
return this;
}
/**
* Sets the doLowerCase for the tokenizer.
*
* @param doLowerCase {@code true} to enable convert to lowercase
* @return this builder
*/
public Builder optDoLowerCase(boolean doLowerCase) {
options.put("doLowerCase", String.valueOf(doLowerCase));
return this;
}
/**
* Sets the doLowerCase for the tokenizer with specific locale.
*
* @param locale the locale to use when converting to lowercase
* @return this builder
*/
public Builder optDoLowerCase(String locale) {
options.put("doLowerCase", locale);
return this;
}
/**
* Sets the tokenizer_config path.
*
* @param configPath the tokenizer_config path
* @return this builder
*/
public Builder optTokenizerConfigPath(String configPath) {
options.put("tokenizerConfigPath", configPath);
return this;
}
/**
* Configures the builder with the arguments.
*
* @param arguments the arguments
*/
public void configure(Map<String, ?> arguments) {
for (Map.Entry<String, ?> entry : arguments.entrySet()) {
options.put(entry.getKey(), entry.getValue().toString());
}
}
/**
* Utility to make a tokenizer managed by the builder manager (if one is specified).
*
* @param tokenizer the tokenizer to manage
* @return the updated tokenizer
*/
private HuggingFaceTokenizer managed(HuggingFaceTokenizer tokenizer) {
if (manager != null) {
manager.attachInternal(tokenizer.getUid(), tokenizer);
}
return tokenizer;
}
/**
* Builds the translator.
*
* @return the new translator
* @throws IOException when IO operation fails in loading a resource
*/
public HuggingFaceTokenizer build() throws IOException {
String tokenizerName = options.get("tokenizer");
if (tokenizerName != null) {
return managed(HuggingFaceTokenizer.newInstance(tokenizerName, options));
}
String path = options.get("tokenizerPath");
if (path == null) {
throw new IllegalArgumentException("Missing tokenizer path.");
}
Path tokenizerPath = Paths.get(path);
if (!Files.exists(tokenizerPath)) {
throw new IOException("Tokenizer file not exists: " + tokenizerPath);
}
String configPath = options.get("tokenizerConfigPath");
validateConfigPath(configPath);
return managed(buildTokenizer(tokenizerPath, configPath, options));
}
private void validateConfigPath(String configPath) throws IOException {
if (configPath != null && !Files.exists(Paths.get(configPath))) {
throw new IOException("Tokenizer config file not exists: " + configPath);
}
}
private HuggingFaceTokenizer buildTokenizer(
Path tokenizerPath, String configPath, Map<String, String> options)
throws IOException {
if (!Files.isDirectory(tokenizerPath)) {
return configPath != null
? HuggingFaceTokenizer.newInstance(tokenizerPath, configPath, options)
: HuggingFaceTokenizer.newInstance(tokenizerPath, options);
}
Path tokenizerFile = tokenizerPath.resolve("tokenizer.json");
if (Files.exists(tokenizerFile)) {
return configPath != null
? HuggingFaceTokenizer.newInstance(tokenizerPath, configPath, options)
: HuggingFaceTokenizer.newInstance(tokenizerPath, options);
}
Path vocab = tokenizerPath.resolve("vocab.json");
Path merges = tokenizerPath.resolve("merges.txt");
if (Files.exists(vocab) && Files.exists(merges)) {
if (configPath != null) {
logger.warn(
"Config file is not supported for BPE tokenizers, ignoring config path:"
+ " {}",
configPath);
}
return HuggingFaceTokenizer.newInstance(vocab, merges, options);
}
throw new IOException("tokenizer.json file not found.");
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/tokenizers/PadTokenResolver.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.tokenizers;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.StringReader;
/** The type Pad token resolver. */
public final class PadTokenResolver {
private static final Logger logger = LoggerFactory.getLogger(PadTokenResolver.class);
private PadTokenResolver() {
throw new IllegalStateException("Utility class");
}
/**
* Extracts pad token and ID from tokenizer.json and/or tokenizer_config.json. Follows HF
* behavior: tokenizer.json takes precedence, config is fallback.
*
* @param tokenizerJson tokenizer.json content as string
* @param config TokenizerConfig, may be null
* @return PadInfo or null if pad token cannot be resolved
*/
public static PadInfo extractPadInfo(String tokenizerJson, TokenizerConfig config) {
try {
JsonObject tokenizer =
JsonParser.parseReader(new StringReader(tokenizerJson)).getAsJsonObject();
// 🔹 Priority 1: From tokenizer.json → padding block
if (tokenizer.has("padding") && !tokenizer.get("padding").isJsonNull()) {
JsonObject padding = tokenizer.getAsJsonObject("padding");
if (padding != null && padding.has("pad_token") && padding.has("pad_id")) {
String padToken = padding.get("pad_token").getAsString();
int padId = padding.get("pad_id").getAsInt();
return new PadInfo(padToken, padId);
}
}
// 🔹 Priority 2: From tokenizer_config.json (fallback)
if (config != null && config.getPadToken() != null) {
String padToken = config.getPadToken();
// First: try to find it in added_tokens block (preferred over vocab)
if (tokenizer.has("added_tokens")) {
JsonArray added = tokenizer.getAsJsonArray("added_tokens");
for (JsonElement el : added) {
JsonObject item = el.getAsJsonObject();
if (padToken.equals(item.get("content").getAsString())) {
int id = item.get("id").getAsInt();
return new PadInfo(padToken, id);
}
}
}
// Second: try to resolve from model.vocab (legacy scenario)
if (tokenizer.has("model")) {
JsonObject model = tokenizer.getAsJsonObject("model");
if (model.has("vocab")) {
JsonObject vocab = model.getAsJsonObject("vocab");
JsonElement element = vocab.get(padToken);
if (element != null && element.isJsonPrimitive()) {
int id = element.getAsInt();
return new PadInfo(padToken, id);
}
}
}
// Could not resolve the ID but we return the token for awareness
logger.warn(
"pad_token '{}' was found in config, but not in tokenizer.json added_tokens"
+ " or vocab",
padToken);
}
} catch (Exception e) {
logger.warn("Failed to parse pad_token from tokenizer.json", e);
}
return null;
}
/** The type Pad info. */
public static class PadInfo {
/** The Pad token. */
private final String padToken;
/** The Pad id. */
private final int padId;
/**
* Instantiates a new Pad info.
*
* @param padToken the pad token
* @param padId the pad id
*/
public PadInfo(String padToken, int padId) {
this.padToken = padToken;
this.padId = padId;
}
/**
* Gets pad token.
*
* @return the pad token
*/
public String getPadToken() {
return padToken;
}
/**
* Gets pad id.
*
* @return the pad id
*/
public int getPadId() {
return padId;
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/tokenizers/TokenizerConfig.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.tokenizers;
import ai.djl.util.JsonUtils;
import com.google.gson.annotations.SerializedName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.Reader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Objects;
/**
* Configuration class for HuggingFace tokenizer. Loads and holds configuration from
* tokenizer_config.json.
*/
public class TokenizerConfig {
private static final Logger logger = LoggerFactory.getLogger(TokenizerConfig.class);
/** The constant DEFAULT_MAX_LENGTH. */
public static final int DEFAULT_MAX_LENGTH = 512;
@SerializedName("tokenizer_class")
private String tokenizerClass;
@SerializedName("model_max_length")
private Integer modelMaxLength;
@SerializedName("do_lower_case")
private Boolean doLowerCase;
@SerializedName("strip_accents")
private Boolean stripAccents;
@SerializedName("clean_up_tokenization_spaces")
private Boolean cleanUpTokenizationSpaces;
@SerializedName("add_prefix_space")
private Boolean addPrefixSpace;
// Special tokens
@SerializedName("bos_token")
private String bosToken;
@SerializedName("eos_token")
private String eosToken;
@SerializedName("unk_token")
private String unkToken;
@SerializedName("sep_token")
private String sepToken;
@SerializedName("pad_token")
private String padToken;
@SerializedName("cls_token")
private String clsToken;
/**
* Load tokenizer config.
*
* @param configPath the config path
* @return the tokenizer config
*/
public static TokenizerConfig load(Path configPath) {
if (Files.exists(configPath)) {
try (Reader reader = Files.newBufferedReader(configPath)) {
return JsonUtils.GSON.fromJson(reader, TokenizerConfig.class);
} catch (IOException e) {
logger.warn(
"Failed to load tokenizer_config.json, falling back to legacy config", e);
return null;
}
}
return null;
}
/**
* Gets model max length.
*
* @return the model max length
*/
public int getModelMaxLength() {
if (Objects.isNull(modelMaxLength)) {
return DEFAULT_MAX_LENGTH;
}
return modelMaxLength;
}
/**
* Is do lower case boolean.
*
* @return the boolean
*/
public boolean isDoLowerCase() {
return Boolean.TRUE.equals(doLowerCase);
}
/**
* Is strip accents boolean.
*
* @return the boolean
*/
public boolean isStripAccents() {
return Boolean.TRUE.equals(stripAccents);
}
/**
* Is clean up tokenization spaces boolean.
*
* @return the boolean
*/
public boolean isCleanUpTokenizationSpaces() {
return Boolean.TRUE.equals(cleanUpTokenizationSpaces);
}
/**
* Is add prefix space boolean.
*
* @return the boolean
*/
public boolean isAddPrefixSpace() {
return Boolean.TRUE.equals(addPrefixSpace);
}
/**
* Gets bos token.
*
* @return the bos token
*/
public String getBosToken() {
return bosToken;
}
/**
* Gets eos token.
*
* @return the eos token
*/
public String getEosToken() {
return eosToken;
}
/**
* Gets unk token.
*
* @return the unk token
*/
public String getUnkToken() {
return unkToken;
}
/**
* Gets sep token.
*
* @return the sep token
*/
public String getSepToken() {
return sepToken;
}
/**
* Gets pad token.
*
* @return the pad token
*/
public String getPadToken() {
return padToken;
}
/**
* Gets cls token.
*
* @return the cls token
*/
public String getClsToken() {
return clsToken;
}
/**
* Gets tokenizer class.
*
* @return the tokenizer class
*/
public String getTokenizerClass() {
return tokenizerClass;
}
/**
* Has explicit do lower case boolean.
*
* @return the boolean
*/
public boolean hasExplicitDoLowerCase() {
return doLowerCase != null;
}
/**
* Has explicit strip accents boolean.
*
* @return the boolean
*/
public boolean hasExplicitStripAccents() {
return stripAccents != null;
}
/**
* Has explicit add prefix space boolean.
*
* @return the boolean
*/
public boolean hasExplicitAddPrefixSpace() {
return addPrefixSpace != null;
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/tokenizers/package-info.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes to interface with the underlying Huggingface tokenizers. */
package ai.djl.huggingface.tokenizers;
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/tokenizers
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/tokenizers/jni/CharSpan.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.tokenizers.jni;
/** A class holds character span information. */
public class CharSpan {
private final int start;
private final int end;
/**
* Constructs a new {@code CharSpan} instance.
*
* @param start the start position
* @param end the end position
*/
public CharSpan(int start, int end) {
this.start = start;
this.end = end;
}
/**
* Returns the start position.
*
* @return the start position
*/
public int getStart() {
return start;
}
/**
* Returns the end position.
*
* @return the end position
*/
public int getEnd() {
return end;
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/tokenizers
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/tokenizers/jni/LibUtils.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.tokenizers.jni;
import ai.djl.engine.EngineException;
import ai.djl.util.ClassLoaderUtils;
import ai.djl.util.Platform;
import ai.djl.util.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Objects;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/** Utilities for finding the Huggingface tokenizer native binary on the System. */
@SuppressWarnings("MissingJavadocMethod")
public final class LibUtils {
private static final Logger logger = LoggerFactory.getLogger(LibUtils.class);
private static final String LIB_NAME = System.mapLibraryName("tokenizers");
private static final Pattern VERSION_PATTERN =
Pattern.compile(
"(\\d+\\.\\d+\\.\\d+(-[a-z]+)?)-(\\d+\\.\\d+\\.\\d+)(-SNAPSHOT)?(-\\d+)?");
private static final int[] SUPPORTED_CUDA_VERSIONS = {122};
private static final Set<String> SUPPORTED_CUDA_ARCH =
new HashSet<>(Arrays.asList("80", "86", "89", "90"));
private static EngineException exception;
static {
try {
loadLibrary();
} catch (RuntimeException e) {
exception = new EngineException("Failed to load Huggingface native library.", e);
}
}
private LibUtils() {}
public static void checkStatus() {
if (exception != null) {
throw exception;
}
}
private static void loadLibrary() {
if ("http://www.android.com/".equals(System.getProperty("java.vendor.url"))) {
System.loadLibrary("djl_tokenizer"); // NOPMD
return;
}
String[] libs;
if (System.getProperty("os.name").startsWith("Windows")) {
libs =
new String[] {
"libwinpthread-1.dll", "libgcc_s_seh-1.dll", "libstdc++-6.dll", LIB_NAME
};
} else {
libs = new String[] {LIB_NAME};
}
Platform platform = Platform.detectPlatform("tokenizers");
Path dir = findOverrideLibrary(platform);
if (dir == null) {
dir = copyJniLibrary(libs, platform);
}
logger.debug("Loading huggingface library from: {}", dir);
for (String libName : libs) {
String path = dir.resolve(libName).toString();
logger.debug("Loading native library: {}", path);
String nativeHelper = System.getProperty("ai.djl.huggingface.native_helper");
if (nativeHelper != null && !nativeHelper.isEmpty()) {
ClassLoaderUtils.nativeLoad(nativeHelper, path);
} else {
System.load(path); // NOPMD
}
}
}
private static Path findOverrideLibrary(Platform platform) {
String libPath = Utils.getEnvOrSystemProperty("RUST_LIBRARY_PATH");
if (libPath != null) {
logger.info("Override Rust library path: {}", libPath);
Path path = Paths.get(libPath);
String fileName = Objects.requireNonNull(path.getFileName()).toString();
if (Files.isRegularFile(path) && LIB_NAME.equals(fileName)) {
return path.getParent();
} else if (Files.isDirectory(path)) {
String cudaArch = platform.getCudaArch();
if (!cudaArch.isEmpty()) {
path = path.resolve(cudaArch);
}
Path file = path.resolve(LIB_NAME);
if (Files.exists(file)) {
return path;
}
}
throw new EngineException("No native rust library found in: " + libPath);
}
return null;
}
private static Path copyJniLibrary(String[] libs, Platform platform) {
Path cacheDir = Utils.getEngineCacheDir("tokenizers");
String os = platform.getOsPrefix();
String classifier = platform.getClassifier();
String version = platform.getVersion();
String cudaArch = platform.getCudaArch();
if (cudaArch == null) {
cudaArch = "";
}
String flavor = Utils.getEnvOrSystemProperty("RUST_FLAVOR");
boolean override = flavor != null && !flavor.isEmpty();
if (override) {
logger.info("Uses override RUST_FLAVOR: {}", flavor);
} else {
if (Utils.isOfflineMode() || "win".equals(os)) {
flavor = "cpu";
} else {
flavor = platform.getFlavor();
}
}
// Find the highest matching CUDA version
if (flavor.startsWith("cu")) {
boolean match = false;
if (SUPPORTED_CUDA_ARCH.contains(cudaArch)) {
int cudaVersion = Integer.parseInt(flavor.substring(2, 5));
for (int v : SUPPORTED_CUDA_VERSIONS) {
if (override && cudaVersion == v) {
match = true;
break;
} else if (cudaVersion >= v) {
flavor = "cu" + v;
match = true;
break;
}
}
}
if (!match) {
logger.warn(
"No matching cuda flavor for {} found: {}/sm_{}.",
classifier,
flavor,
cudaArch);
flavor = "cpu"; // Fallback to CPU
}
}
Path dir = cacheDir.resolve(version + '-' + flavor + '-' + classifier);
if (!cudaArch.isEmpty()) {
dir = dir.resolve(cudaArch);
}
logger.debug("Using cache dir: {}", dir);
Path path = dir.resolve(LIB_NAME);
if (Files.exists(path)) {
return dir.toAbsolutePath();
}
// Copy JNI library from classpath
if (copyJniLibraryFromClasspath(libs, dir, classifier, flavor)) {
return dir.toAbsolutePath();
}
// Download JNI library
if (flavor.startsWith("cu")) {
Matcher matcher = VERSION_PATTERN.matcher(version);
if (!matcher.matches()) {
throw new EngineException("Unexpected version: " + version);
}
String jniVersion = matcher.group(1);
String djlVersion = matcher.group(3);
downloadJniLib(path, djlVersion, jniVersion, classifier, flavor + '-' + cudaArch);
return dir.toAbsolutePath();
}
throw new EngineException("Unexpected flavor: " + flavor);
}
private static boolean copyJniLibraryFromClasspath(
String[] libs, Path dir, String classifier, String flavor) {
Path tmp = null;
try {
Path parent = Objects.requireNonNull(dir.getParent());
Files.createDirectories(parent);
tmp = Files.createTempDirectory(parent, "tmp");
for (String libName : libs) {
String libPath = "native/lib/" + classifier + "/" + flavor + "/" + libName;
if (ClassLoaderUtils.getResource(libPath) == null) {
logger.info("library not found in classpath: {}", libPath);
return false;
}
logger.info("Extracting {} to cache ...", libPath);
try (InputStream is = ClassLoaderUtils.getResourceAsStream(libPath)) {
Path target = tmp.resolve(libName);
Files.copy(is, target, StandardCopyOption.REPLACE_EXISTING);
}
}
Utils.moveQuietly(tmp, dir);
return true;
} catch (IOException e) {
logger.error("Cannot copy jni files", e);
} finally {
if (tmp != null) {
Utils.deleteQuietly(tmp);
}
}
return false;
}
private static void downloadJniLib(
Path path, String djlVersion, String version, String classifier, String flavor) {
String url =
"https://publish.djl.ai/tokenizers/"
+ version
+ "/jnilib/"
+ djlVersion
+ '/'
+ classifier
+ '/'
+ flavor
+ '/'
+ LIB_NAME;
logger.info("Downloading jni {} to cache ...", url);
Path parent = Objects.requireNonNull(path.getParent());
Path tmp = null;
try (InputStream is = Utils.openUrl(url)) {
Files.createDirectories(parent);
tmp = Files.createTempFile(parent, "jni", "tmp");
Files.copy(is, tmp, StandardCopyOption.REPLACE_EXISTING);
Utils.moveQuietly(tmp, path);
} catch (IOException e) {
throw new EngineException("Cannot download jni files: " + url, e);
} finally {
if (tmp != null) {
Utils.deleteQuietly(tmp);
}
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/tokenizers
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/tokenizers/jni/TokenizersLibrary.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.tokenizers.jni;
/** A class containing utilities to interact with the Tokenizer JNI layer. */
@SuppressWarnings("MissingJavadocMethod")
public final class TokenizersLibrary {
public static final TokenizersLibrary LIB = new TokenizersLibrary();
private TokenizersLibrary() {}
public native long createTokenizer(String identifier, String authToken);
public native long createTokenizerFromString(String json);
public native long createBpeTokenizer(String vocabulary, String merges);
public native void deleteTokenizer(long handle);
public native long encode(long tokenizer, String text, boolean addSpecialTokens);
public native long encodeDual(
long tokenizer, String text, String textPair, boolean addSpecialTokens);
public native long encodeList(long tokenizer, String[] inputs, boolean addSpecialTokens);
public native long[] batchEncode(long tokenizer, String[] inputs, boolean addSpecialTokens);
public native long[] batchEncodePair(
long tokenizer, String[] text, String[] textPair, boolean addSpecialTokens);
public native String[] batchDecode(long tokenizer, long[][] batchIds, boolean addSpecialTokens);
public native void deleteEncoding(long encoding);
public native long[] getTokenIds(long encoding);
public native long[] getTypeIds(long encoding);
public native long[] getWordIds(long encoding);
public native long[] getSequenceIds(long encoding);
public native String[] getTokens(long encoding);
public native long[] getAttentionMask(long encoding);
public native long[] getSpecialTokenMask(long encoding);
public native CharSpan[] getTokenCharSpans(long encoding);
public native long[] getOverflowing(long encoding);
public native int getOverflowCount(long encoding);
public native String decode(long tokenizer, long[] ids, boolean addSpecialTokens);
public native String getTruncationStrategy(long tokenizer);
public native String getPaddingStrategy(long tokenizer);
public native int getMaxLength(long tokenizer);
public native int getStride(long tokenizer);
public native int getPadToMultipleOf(long tokenizer);
public native void disablePadding(long tokenizer);
public native void setPadding(
long tokenizer, int maxLength, String paddingStrategy, int padToMultipleOf);
public native void setPaddingWithTokenAndId(
long tokenizer,
int maxLength,
String paddingStrategy,
String padToken,
int id,
int padToMultipleOf);
public native void disableTruncation(long tokenizer);
public native void setTruncation(
long tokenizer, int maxLength, String truncationStrategy, int stride);
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/tokenizers
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/tokenizers/jni/package-info.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes to interface with the underlying Huggingface tokenizers. */
package ai.djl.huggingface.tokenizers.jni;
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/CrossEncoderTranslator.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.translator;
import ai.djl.huggingface.tokenizers.Encoding;
import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.translate.ArgumentsUtil;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import ai.djl.util.PairList;
import ai.djl.util.StringPair;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/** The translator for Huggingface cross encoder model. */
public class CrossEncoderTranslator implements Translator<StringPair, float[]> {
private HuggingFaceTokenizer tokenizer;
private boolean includeTokenTypes;
private boolean int32;
private boolean sigmoid;
private Batchifier batchifier;
CrossEncoderTranslator(
HuggingFaceTokenizer tokenizer,
boolean includeTokenTypes,
boolean int32,
boolean sigmoid,
Batchifier batchifier) {
this.tokenizer = tokenizer;
this.includeTokenTypes = includeTokenTypes;
this.int32 = int32;
this.sigmoid = sigmoid;
this.batchifier = batchifier;
}
/** {@inheritDoc} */
@Override
public Batchifier getBatchifier() {
return batchifier;
}
/** {@inheritDoc} */
@Override
public NDList processInput(TranslatorContext ctx, StringPair input) {
Encoding encoding = tokenizer.encode(input.getKey(), input.getValue());
ctx.setAttachment("encoding", encoding);
return encoding.toNDList(ctx.getNDManager(), includeTokenTypes, int32);
}
/** {@inheritDoc} */
@Override
public NDList batchProcessInput(TranslatorContext ctx, List<StringPair> inputs) {
NDManager manager = ctx.getNDManager();
PairList<String, String> list = new PairList<>(inputs);
Encoding[] encodings = tokenizer.batchEncode(list);
NDList[] batch = new NDList[encodings.length];
for (int i = 0; i < encodings.length; ++i) {
batch[i] = encodings[i].toNDList(manager, includeTokenTypes, int32);
}
return batchifier.batchify(batch);
}
/** {@inheritDoc} */
@Override
public float[] processOutput(TranslatorContext ctx, NDList list) {
NDArray logits = list.get(0);
if (sigmoid) {
logits = logits.getNDArrayInternal().sigmoid();
}
return logits.toFloatArray();
}
/** {@inheritDoc} */
@Override
public List<float[]> batchProcessOutput(TranslatorContext ctx, NDList list) {
if (sigmoid) {
NDList[] batches = batchifier.unbatchify(list);
List<float[]> ret = new ArrayList<>(batches.length);
for (NDList batch : batches) {
NDArray result = batch.get(0);
result = result.getNDArrayInternal().sigmoid();
ret.add(result.toFloatArray());
}
return ret;
}
NDArray array = list.get(0);
int batchSize = Math.toIntExact(array.size(0));
float[] buf = list.get(0).toFloatArray();
if (batchSize == 1) {
return Collections.singletonList(buf);
}
int length = buf.length / batchSize;
List<float[]> ret = new ArrayList<>(batchSize);
for (int i = 0; i < batchSize; ++i) {
float[] f = new float[length];
System.arraycopy(buf, i * length, f, 0, length);
ret.add(f);
}
return ret;
}
/**
* Creates a builder to build a {@code CrossEncoderTranslator}.
*
* @param tokenizer the tokenizer
* @return a new builder
*/
public static Builder builder(HuggingFaceTokenizer tokenizer) {
return new Builder(tokenizer);
}
/**
* Creates a builder to build a {@code CrossEncoderTranslator}.
*
* @param tokenizer the tokenizer
* @param arguments the models' arguments
* @return a new builder
*/
public static Builder builder(HuggingFaceTokenizer tokenizer, Map<String, ?> arguments) {
Builder builder = builder(tokenizer);
builder.configure(arguments);
return builder;
}
/** The builder for cross encoder translator. */
public static final class Builder {
private HuggingFaceTokenizer tokenizer;
private boolean includeTokenTypes;
private boolean int32;
private boolean sigmoid = true;
private Batchifier batchifier = Batchifier.STACK;
Builder(HuggingFaceTokenizer tokenizer) {
this.tokenizer = tokenizer;
}
/**
* Sets if include token types for the {@link Translator}.
*
* @param includeTokenTypes true to include token types
* @return this builder
*/
public Builder optIncludeTokenTypes(boolean includeTokenTypes) {
this.includeTokenTypes = includeTokenTypes;
return this;
}
/**
* Sets if use int32 datatype for the {@link Translator}.
*
* @param int32 true to include token types
* @return this builder
*/
public Builder optInt32(boolean int32) {
this.int32 = int32;
return this;
}
/**
* Sets if apply sigmoid for the {@link Translator}.
*
* @param sigmoid true to apply sigmoid
* @return this builder
*/
public Builder optSigmoid(boolean sigmoid) {
this.sigmoid = sigmoid;
return this;
}
/**
* Sets the {@link Batchifier} for the {@link Translator}.
*
* @param batchifier true to include token types
* @return this builder
*/
public Builder optBatchifier(Batchifier batchifier) {
this.batchifier = batchifier;
return this;
}
/**
* Configures the builder with the model arguments.
*
* @param arguments the model arguments
*/
public void configure(Map<String, ?> arguments) {
optIncludeTokenTypes(ArgumentsUtil.booleanValue(arguments, "includeTokenTypes"));
optInt32(ArgumentsUtil.booleanValue(arguments, "int32"));
optSigmoid(ArgumentsUtil.booleanValue(arguments, "sigmoid", true));
String batchifierStr = ArgumentsUtil.stringValue(arguments, "batchifier", "stack");
optBatchifier(Batchifier.fromString(batchifierStr));
}
/**
* Builds the translator.
*
* @return the new translator
* @throws IOException if I/O error occurs
*/
public CrossEncoderTranslator build() throws IOException {
return new CrossEncoderTranslator(
tokenizer, includeTokenTypes, int32, sigmoid, batchifier);
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/FillMaskTranslator.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.translator;
import ai.djl.huggingface.tokenizers.Encoding;
import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.modality.Classifications;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.translate.ArgumentsUtil;
import ai.djl.translate.Batchifier;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/** The translator for Huggingface fill mask model. */
public class FillMaskTranslator implements Translator<String, Classifications> {
private HuggingFaceTokenizer tokenizer;
private String maskToken;
private long maskTokenId;
private int topK;
private boolean includeTokenTypes;
private boolean int32;
private Batchifier batchifier;
FillMaskTranslator(
HuggingFaceTokenizer tokenizer,
String maskToken,
int topK,
boolean includeTokenTypes,
boolean int32,
Batchifier batchifier) {
this.tokenizer = tokenizer;
this.maskToken = maskToken;
this.topK = topK;
this.includeTokenTypes = includeTokenTypes;
this.int32 = int32;
this.batchifier = batchifier;
Encoding encoding = tokenizer.encode(maskToken, false, false);
maskTokenId = encoding.getIds()[0];
}
/** {@inheritDoc} */
@Override
public Batchifier getBatchifier() {
return batchifier;
}
/** {@inheritDoc} */
@Override
public NDList processInput(TranslatorContext ctx, String input) throws TranslateException {
Encoding encoding = tokenizer.encode(input);
long[] indices = encoding.getIds();
int maskIndex = getMaskIndex(indices);
ctx.setAttachment("maskIndex", maskIndex);
return encoding.toNDList(ctx.getNDManager(), includeTokenTypes, int32);
}
/** {@inheritDoc} */
@Override
public NDList batchProcessInput(TranslatorContext ctx, List<String> inputs)
throws TranslateException {
NDManager manager = ctx.getNDManager();
Encoding[] encodings = tokenizer.batchEncode(inputs);
NDList[] batch = new NDList[encodings.length];
int[] maskIndices = new int[encodings.length];
ctx.setAttachment("maskIndices", maskIndices);
for (int i = 0; i < batch.length; ++i) {
long[] indices = encodings[i].getIds();
maskIndices[i] = getMaskIndex(indices);
batch[i] = encodings[i].toNDList(manager, includeTokenTypes, int32);
}
return batchifier.batchify(batch);
}
/** {@inheritDoc} */
@Override
public Classifications processOutput(TranslatorContext ctx, NDList list) {
int maskIndex = (int) ctx.getAttachment("maskIndex");
return toClassifications(list, maskIndex);
}
/** {@inheritDoc} */
@Override
public List<Classifications> batchProcessOutput(TranslatorContext ctx, NDList list) {
NDList[] batch = batchifier.unbatchify(list);
int[] maskIndices = (int[]) ctx.getAttachment("maskIndices");
List<Classifications> ret = new ArrayList<>(maskIndices.length);
for (int i = 0; i < batch.length; ++i) {
ret.add(toClassifications(batch[i], maskIndices[i]));
}
return ret;
}
private int getMaskIndex(long[] indices) throws TranslateException {
int maskIndex = -1;
for (int i = 0; i < indices.length; ++i) {
if (indices[i] == maskTokenId) {
if (maskIndex != -1) {
throw new TranslateException("Only one mask supported.");
}
maskIndex = i;
}
}
if (maskIndex == -1) {
throw new TranslateException("Mask token " + maskToken + " not found.");
}
return maskIndex;
}
private Classifications toClassifications(NDList output, int maskIndex) {
NDArray prob = output.get(0).get(maskIndex).softmax(0);
NDArray array = prob.argSort(0, false);
long[] classIds = new long[topK];
List<Double> probabilities = new ArrayList<>(topK);
for (int i = 0; i < topK; ++i) {
classIds[i] = array.getLong(i);
probabilities.add((double) prob.getFloat(classIds[i]));
}
String[] classes = tokenizer.decode(classIds).trim().split(" ");
return new Classifications(Arrays.asList(classes), probabilities);
}
/**
* Creates a builder to build a {@code FillMaskTranslator}.
*
* @param tokenizer the tokenizer
* @return a new builder
*/
public static Builder builder(HuggingFaceTokenizer tokenizer) {
return new Builder(tokenizer);
}
/**
* Creates a builder to build a {@code FillMaskTranslator}.
*
* @param tokenizer the tokenizer
* @param arguments the models' arguments
* @return a new builder
*/
public static Builder builder(HuggingFaceTokenizer tokenizer, Map<String, ?> arguments) {
Builder builder = builder(tokenizer);
builder.configure(arguments);
return builder;
}
/** The builder for fill mask translator. */
public static final class Builder {
private HuggingFaceTokenizer tokenizer;
private String maskedToken = "[MASK]";
private int topK = 5;
private boolean includeTokenTypes;
private boolean int32;
private Batchifier batchifier = Batchifier.STACK;
Builder(HuggingFaceTokenizer tokenizer) {
this.tokenizer = tokenizer;
}
/**
* Sets the id of the mask {@link Translator}.
*
* @param maskedToken the id of the mask
* @return this builder
*/
public Builder optMaskToken(String maskedToken) {
this.maskedToken = maskedToken;
return this;
}
/**
* Set the topK number of classes to be displayed.
*
* @param topK the number of top classes to return
* @return this builder
*/
public Builder optTopK(int topK) {
this.topK = topK;
return this;
}
/**
* Sets if include token types for the {@link Translator}.
*
* @param includeTokenTypes true to include token types
* @return this builder
*/
public Builder optIncludeTokenTypes(boolean includeTokenTypes) {
this.includeTokenTypes = includeTokenTypes;
return this;
}
/**
* Sets if use int32 datatype for the {@link Translator}.
*
* @param int32 true to include token types
* @return this builder
*/
public Builder optInt32(boolean int32) {
this.int32 = int32;
return this;
}
/**
* Sets the {@link Batchifier} for the {@link Translator}.
*
* @param batchifier true to include token types
* @return this builder
*/
public Builder optBatchifier(Batchifier batchifier) {
this.batchifier = batchifier;
return this;
}
/**
* Configures the builder with the model arguments.
*
* @param arguments the model arguments
*/
public void configure(Map<String, ?> arguments) {
optMaskToken(ArgumentsUtil.stringValue(arguments, "maskToken", "[MASK]"));
optInt32(ArgumentsUtil.booleanValue(arguments, "int32"));
optTopK(ArgumentsUtil.intValue(arguments, "topK", 5));
optIncludeTokenTypes(ArgumentsUtil.booleanValue(arguments, "includeTokenTypes"));
String batchifierStr = ArgumentsUtil.stringValue(arguments, "batchifier", "stack");
optBatchifier(Batchifier.fromString(batchifierStr));
}
/**
* Builds the translator.
*
* @return the new translator
*/
public FillMaskTranslator build() {
return new FillMaskTranslator(
tokenizer, maskedToken, topK, includeTokenTypes, int32, batchifier);
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/FillMaskTranslatorFactory.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.translator;
import ai.djl.Model;
import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.modality.Classifications;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.modality.nlp.translator.TextClassificationServingTranslator;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorFactory;
import ai.djl.util.Pair;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/** A {@link TranslatorFactory} that creates a {@link FillMaskTranslator} instance. */
public class FillMaskTranslatorFactory implements TranslatorFactory, Serializable {
private static final long serialVersionUID = 1L;
private static final Set<Pair<Type, Type>> SUPPORTED_TYPES = new HashSet<>();
static {
SUPPORTED_TYPES.add(new Pair<>(String.class, Classifications.class));
SUPPORTED_TYPES.add(new Pair<>(Input.class, Output.class));
}
/** {@inheritDoc} */
@Override
public Set<Pair<Type, Type>> getSupportedTypes() {
return SUPPORTED_TYPES;
}
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public <I, O> Translator<I, O> newInstance(
Class<I> input, Class<O> output, Model model, Map<String, ?> arguments)
throws TranslateException {
Path modelPath = model.getModelPath();
try {
HuggingFaceTokenizer tokenizer =
HuggingFaceTokenizer.builder(arguments)
.optTokenizerPath(modelPath)
.optManager(model.getNDManager())
.build();
FillMaskTranslator translator =
FillMaskTranslator.builder(tokenizer, arguments).build();
if (input == String.class && output == Classifications.class) {
return (Translator<I, O>) translator;
} else if (input == Input.class && output == Output.class) {
return (Translator<I, O>) new TextClassificationServingTranslator(translator);
}
throw new IllegalArgumentException("Unsupported input/output types.");
} catch (IOException e) {
throw new TranslateException("Failed to load tokenizer.", e);
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/PretrainedConfig.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.translator;
import com.google.gson.annotations.SerializedName;
import java.util.Map;
/** A class representing HuggingFace config.json file. */
public class PretrainedConfig {
@SerializedName("problem_type")
String problemType;
Map<String, String> id2label;
/**
* Sets the problem type.
*
* @param problemType the problem type
*/
public void setProblemType(String problemType) {
this.problemType = problemType;
}
/**
* Sets the id to label map.
*
* @param id2label the id to label map
*/
public void setId2label(Map<String, String> id2label) {
this.id2label = id2label;
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/QuestionAnsweringTranslator.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.translator;
import ai.djl.huggingface.tokenizers.Encoding;
import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.modality.nlp.qa.QAInput;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.index.NDIndex;
import ai.djl.translate.ArgumentsUtil;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import ai.djl.util.JsonUtils;
import ai.djl.util.PairList;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/** The translator for Huggingface question answering model. */
public class QuestionAnsweringTranslator implements Translator<QAInput, String> {
private HuggingFaceTokenizer tokenizer;
private boolean includeTokenTypes;
private boolean int32;
private Batchifier batchifier;
private boolean detail;
QuestionAnsweringTranslator(
HuggingFaceTokenizer tokenizer,
boolean includeTokenTypes,
boolean int32,
Batchifier batchifier,
boolean detail) {
this.tokenizer = tokenizer;
this.includeTokenTypes = includeTokenTypes;
this.int32 = int32;
this.batchifier = batchifier;
this.detail = detail;
}
/** {@inheritDoc} */
@Override
public Batchifier getBatchifier() {
return batchifier;
}
/** {@inheritDoc} */
@Override
public NDList processInput(TranslatorContext ctx, QAInput input) {
Encoding encoding = tokenizer.encode(input.getQuestion(), input.getParagraph());
ctx.setAttachment("encoding", encoding);
return encoding.toNDList(ctx.getNDManager(), includeTokenTypes, int32);
}
/** {@inheritDoc} */
@Override
public NDList batchProcessInput(TranslatorContext ctx, List<QAInput> inputs) {
NDManager manager = ctx.getNDManager();
PairList<String, String> pair = new PairList<>(inputs.size());
for (QAInput input : inputs) {
pair.add(input.getQuestion(), input.getParagraph());
}
Encoding[] encodings = tokenizer.batchEncode(pair);
ctx.setAttachment("encodings", encodings);
NDList[] batch = new NDList[encodings.length];
for (int i = 0; i < encodings.length; ++i) {
batch[i] = encodings[i].toNDList(manager, includeTokenTypes, int32);
}
return batchifier.batchify(batch);
}
/** {@inheritDoc} */
@Override
public String processOutput(TranslatorContext ctx, NDList list) {
Encoding encoding = (Encoding) ctx.getAttachment("encoding");
return decode(list, encoding);
}
/** {@inheritDoc} */
@Override
public List<String> batchProcessOutput(TranslatorContext ctx, NDList list) {
NDList[] batch = batchifier.unbatchify(list);
Encoding[] encodings = (Encoding[]) ctx.getAttachment("encodings");
List<String> ret = new ArrayList<>(batch.length);
for (int i = 0; i < encodings.length; ++i) {
ret.add(decode(batch[i], encodings[i]));
}
return ret;
}
private String decode(NDList list, Encoding encoding) {
NDArray startLogits = list.get(0);
NDArray endLogits = list.get(1);
if ("PyTorch".equals(startLogits.getManager().getEngine().getEngineName())) {
// PyTorch InferenceMode tensor is read only, must clone it
startLogits = startLogits.duplicate();
endLogits = endLogits.duplicate();
}
if (detail) {
// exclude undesired sequences
long[] sequenceIds = encoding.getSequenceIds();
List<Integer> undesired = new ArrayList<>();
for (int i = 0; i < sequenceIds.length; ++i) {
if (sequenceIds[i] == 0) {
undesired.add(i);
}
}
int[] idx = undesired.stream().mapToInt(Integer::intValue).toArray();
NDIndex ndIndex = new NDIndex("{}", list.getManager().create(idx));
startLogits.set(ndIndex, -100000f);
endLogits.set(ndIndex, -100000f);
// normalize
startLogits = startLogits.sub(startLogits.max()).exp();
startLogits = startLogits.div(startLogits.sum());
endLogits = endLogits.sub(endLogits.max()).exp();
endLogits = endLogits.div(endLogits.sum());
}
// exclude <CLS>, TODO: exclude impossible ids properly and handle max answer length
startLogits.set(new NDIndex(0), -100000);
endLogits.set(new NDIndex(0), -100000);
int startIdx = (int) startLogits.argMax().getLong();
int endIdx = (int) endLogits.argMax().getLong();
if (startIdx > endIdx) {
int tmp = startIdx;
startIdx = endIdx;
endIdx = tmp;
NDArray tmpArray = startLogits;
startLogits = endLogits;
endLogits = tmpArray;
}
long[] indices = encoding.getIds();
int len = endIdx - startIdx + 1;
long[] ids = new long[len];
System.arraycopy(indices, startIdx, ids, 0, len);
String answer = tokenizer.decode(ids).trim();
if (detail) {
float score = startLogits.getFloat(startIdx) * endLogits.getFloat(endIdx);
Map<String, Object> dict = new ConcurrentHashMap<>();
dict.put("score", score);
dict.put("start", startIdx);
dict.put("end", endIdx);
dict.put("answer", answer);
return JsonUtils.toJson(dict);
}
return answer;
}
/**
* Creates a builder to build a {@code QuestionAnsweringTranslator}.
*
* @param tokenizer the tokenizer
* @return a new builder
*/
public static Builder builder(HuggingFaceTokenizer tokenizer) {
return new Builder(tokenizer);
}
/**
* Creates a builder to build a {@code QuestionAnsweringTranslator}.
*
* @param tokenizer the tokenizer
* @param arguments the models' arguments
* @return a new builder
*/
public static Builder builder(HuggingFaceTokenizer tokenizer, Map<String, ?> arguments) {
Builder builder = builder(tokenizer);
builder.configure(arguments);
return builder;
}
/** The builder for question answering translator. */
public static final class Builder {
private HuggingFaceTokenizer tokenizer;
private boolean includeTokenTypes;
private boolean int32;
private Batchifier batchifier = Batchifier.STACK;
private boolean detail;
Builder(HuggingFaceTokenizer tokenizer) {
this.tokenizer = tokenizer;
}
/**
* Sets if include token types for the {@link Translator}.
*
* @param includeTokenTypes true to include token types
* @return this builder
*/
public Builder optIncludeTokenTypes(boolean includeTokenTypes) {
this.includeTokenTypes = includeTokenTypes;
return this;
}
/**
* Sets if use int32 datatype for the {@link Translator}.
*
* @param int32 true to include token types
* @return this builder
*/
public Builder optInt32(boolean int32) {
this.int32 = int32;
return this;
}
/**
* Sets the {@link Batchifier} for the {@link Translator}.
*
* @param batchifier true to include token types
* @return this builder
*/
public Builder optBatchifier(Batchifier batchifier) {
this.batchifier = batchifier;
return this;
}
/**
* Sets if output detail for the {@link Translator}.
*
* @param detail true to output detail
* @return this builder
*/
public Builder optDetail(boolean detail) {
this.detail = detail;
return this;
}
/**
* Configures the builder with the model arguments.
*
* @param arguments the model arguments
*/
public void configure(Map<String, ?> arguments) {
optIncludeTokenTypes(ArgumentsUtil.booleanValue(arguments, "includeTokenTypes"));
optInt32(ArgumentsUtil.booleanValue(arguments, "int32"));
String batchifierStr = ArgumentsUtil.stringValue(arguments, "batchifier", "stack");
optDetail(ArgumentsUtil.booleanValue(arguments, "detail"));
optBatchifier(Batchifier.fromString(batchifierStr));
}
/**
* Builds the translator.
*
* @return the new translator
*/
public QuestionAnsweringTranslator build() {
return new QuestionAnsweringTranslator(
tokenizer, includeTokenTypes, int32, batchifier, detail);
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/QuestionAnsweringTranslatorFactory.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.translator;
import ai.djl.Model;
import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.modality.nlp.qa.QAInput;
import ai.djl.modality.nlp.translator.QaServingTranslator;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorFactory;
import ai.djl.util.Pair;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/** A {@link TranslatorFactory} that creates a {@link QuestionAnsweringTranslator} instance. */
public class QuestionAnsweringTranslatorFactory implements TranslatorFactory, Serializable {
private static final long serialVersionUID = 1L;
private static final Set<Pair<Type, Type>> SUPPORTED_TYPES = new HashSet<>();
static {
SUPPORTED_TYPES.add(new Pair<>(QAInput.class, String.class));
SUPPORTED_TYPES.add(new Pair<>(Input.class, Output.class));
}
/** {@inheritDoc} */
@Override
public Set<Pair<Type, Type>> getSupportedTypes() {
return SUPPORTED_TYPES;
}
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public <I, O> Translator<I, O> newInstance(
Class<I> input, Class<O> output, Model model, Map<String, ?> arguments)
throws TranslateException {
Path modelPath = model.getModelPath();
try {
HuggingFaceTokenizer tokenizer =
HuggingFaceTokenizer.builder(arguments)
.optTokenizerPath(modelPath)
.optManager(model.getNDManager())
.build();
QuestionAnsweringTranslator translator =
QuestionAnsweringTranslator.builder(tokenizer, arguments).build();
if (input == QAInput.class && output == String.class) {
return (Translator<I, O>) translator;
} else if (input == Input.class && output == Output.class) {
return (Translator<I, O>) new QaServingTranslator(translator);
}
throw new IllegalArgumentException("Unsupported input/output types.");
} catch (IOException e) {
throw new TranslateException("Failed to load tokenizer.", e);
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/SparseRetrievalTranslator.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.translator;
import ai.djl.huggingface.tokenizers.Encoding;
import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.modality.nlp.EmbeddingOutput;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.nn.Activation;
import ai.djl.translate.ArgumentsUtil;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import java.io.IOException;
import java.io.InputStream;
import java.nio.FloatBuffer;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
/** The translator handles sparse retrieval for Huggingface text embedding model. */
public class SparseRetrievalTranslator implements Translator<String, EmbeddingOutput> {
private static final String[] SPECIAL_TOKENS = {
"cls_token", "eos_token", "pad_token", "unk_token"
};
private HuggingFaceTokenizer tokenizer;
private TextEmbeddingTranslator translator;
private boolean includeTokenTypes;
private boolean int32;
private boolean returnDenseEmbedding;
private Set<Long> unusedTokens;
private String sparseLinear;
private NDList sparseLinearModel;
SparseRetrievalTranslator(Builder builder) {
this.tokenizer = builder.tokenizer;
this.translator = builder.baseBuilder.build();
this.includeTokenTypes = builder.baseBuilder.includeTokenTypes;
this.int32 = builder.baseBuilder.int32;
this.returnDenseEmbedding = builder.returnDenseEmbedding;
this.sparseLinear = builder.sparseLinear;
Encoding encoding = tokenizer.encode(SPECIAL_TOKENS);
unusedTokens = Arrays.stream(encoding.getIds()).boxed().collect(Collectors.toSet());
}
/** {@inheritDoc} */
@Override
public void prepare(TranslatorContext ctx) throws Exception {
NDManager manager = ctx.getPredictorManager().newSubManager();
if (returnDenseEmbedding) {
translator.prepare(ctx);
}
if (sparseLinear != null) {
Path file = Paths.get(sparseLinear);
if (!file.isAbsolute()) {
file = ctx.getModel().getModelPath().resolve(file);
}
if (Files.notExists(file)) {
throw new TranslateException("sparseLinear file does not exist: " + sparseLinear);
}
try (InputStream is = Files.newInputStream(file)) {
sparseLinearModel = NDList.decode(manager, is);
}
}
}
/** {@inheritDoc} */
@Override
public NDList processInput(TranslatorContext ctx, String input) {
return batchProcessInput(ctx, Collections.singletonList(input));
}
/** {@inheritDoc} */
@Override
public NDList batchProcessInput(TranslatorContext ctx, List<String> inputs) {
NDManager manager = ctx.getNDManager();
Encoding[] encodings = tokenizer.batchEncode(inputs);
NDList list = Encoding.toNDList(encodings, manager, includeTokenTypes, int32);
ctx.setAttachment("encodings", encodings);
ctx.setAttachment("attentionMask", list.get(1));
return list;
}
/** {@inheritDoc} */
@Override
public EmbeddingOutput processOutput(TranslatorContext ctx, NDList list) {
return Objects.requireNonNull(batchProcessOutput(ctx, list)).get(0);
}
/** {@inheritDoc} */
@Override
public List<EmbeddingOutput> batchProcessOutput(TranslatorContext ctx, NDList list) {
Encoding[] encodings = (Encoding[]) ctx.getAttachment("encodings");
int batchSize = encodings.length;
List<EmbeddingOutput> embeddings = new ArrayList<>();
NDArray lastHiddenState = list.get("last_hidden_state");
if (lastHiddenState == null) {
lastHiddenState = list.get(0); // only pytorch returns "last_hidden_state" name
}
NDArray weight =
sparseLinearModel.get("weight").toType(lastHiddenState.getDataType(), false);
NDArray bias = sparseLinearModel.get("bias").toType(lastHiddenState.getDataType(), false);
NDArray array =
lastHiddenState.getNDArrayInternal().linear(lastHiddenState, weight, bias).get(0);
array = Activation.relu(array);
NDArray sparseVecs = array.squeeze(-1);
float[] data = sparseVecs.toFloatArray();
int index = 0;
for (Encoding encoding : encodings) {
long[] tokenIds = encoding.getIds();
EmbeddingOutput embedding = new EmbeddingOutput();
embeddings.add(embedding);
for (long idx : tokenIds) {
float w = data[index++];
if (!unusedTokens.contains(idx) && w > 0) {
embedding.addTokenWeights(String.valueOf(idx), w);
}
}
}
if (returnDenseEmbedding) {
NDArray attentionMask = (NDArray) ctx.getAttachment("attentionMask");
NDArray output = translator.processEmbedding(list, attentionMask);
FloatBuffer fb = output.toByteBuffer().asFloatBuffer();
int denseEmbeddingSize = fb.remaining() / batchSize;
for (EmbeddingOutput embedding : embeddings) {
float[] buf = new float[denseEmbeddingSize];
fb.get(buf);
embedding.setDenseEmbedding(buf);
}
}
return embeddings;
}
/**
* Creates a builder to build a {@code SparseRetrievalTranslator}.
*
* @param tokenizer the tokenizer
* @param arguments the models' arguments
* @return a new builder
*/
public static Builder builder(HuggingFaceTokenizer tokenizer, Map<String, ?> arguments) {
Builder builder = new Builder(tokenizer);
builder.configure(arguments);
return builder;
}
/** The builder for question answering translator. */
public static final class Builder {
HuggingFaceTokenizer tokenizer;
TextEmbeddingTranslator.Builder baseBuilder;
boolean returnDenseEmbedding;
String sparseLinear;
Builder(HuggingFaceTokenizer tokenizer) {
this.tokenizer = tokenizer;
baseBuilder = TextEmbeddingTranslator.builder(tokenizer);
sparseLinear = "sparse_linear.safetensors";
}
/**
* Sets if apply sigmoid for the {@link Translator}.
*
* @param returnDenseEmbedding true to output dense embedding
* @return this builder
*/
public Builder optReturnDenseEmbedding(boolean returnDenseEmbedding) {
this.returnDenseEmbedding = returnDenseEmbedding;
return this;
}
/**
* Sets the sparse linear layer model file for the {@link Translator}.
*
* @param sparseLinear path to sparse linear layer model file
* @return this builder
*/
public Builder optSparseLinear(String sparseLinear) {
this.sparseLinear = sparseLinear;
return this;
}
/**
* Configures the builder with the model arguments.
*
* @param arguments the model arguments
*/
public void configure(Map<String, ?> arguments) {
baseBuilder.configure(arguments);
optReturnDenseEmbedding(
ArgumentsUtil.booleanValue(arguments, "returnDenseEmbedding", false));
optSparseLinear(ArgumentsUtil.stringValue(arguments, "sparseLinear", sparseLinear));
}
/**
* Builds the translator.
*
* @return the new translator
* @throws IOException if I/O error occurs
*/
public SparseRetrievalTranslator build() throws IOException {
return new SparseRetrievalTranslator(this);
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/TextClassificationTranslator.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.translator;
import ai.djl.huggingface.tokenizers.Encoding;
import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.modality.Classifications;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.translate.ArgumentsUtil;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import ai.djl.util.JsonUtils;
import java.io.IOException;
import java.io.Reader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/** The translator for Huggingface text classification model. */
public class TextClassificationTranslator implements Translator<String, Classifications> {
private HuggingFaceTokenizer tokenizer;
private boolean includeTokenTypes;
private boolean int32;
private Batchifier batchifier;
private PretrainedConfig config;
TextClassificationTranslator(
HuggingFaceTokenizer tokenizer,
boolean includeTokenTypes,
boolean int32,
Batchifier batchifier) {
this.tokenizer = tokenizer;
this.includeTokenTypes = includeTokenTypes;
this.int32 = int32;
this.batchifier = batchifier;
}
/** {@inheritDoc} */
@Override
public Batchifier getBatchifier() {
return batchifier;
}
/** {@inheritDoc} */
@Override
public void prepare(TranslatorContext ctx) throws IOException {
Path path = ctx.getModel().getModelPath();
Path file = path.resolve("config.json");
try (Reader reader = Files.newBufferedReader(file)) {
config = JsonUtils.GSON.fromJson(reader, PretrainedConfig.class);
}
}
/** {@inheritDoc} */
@Override
public NDList processInput(TranslatorContext ctx, String input) {
Encoding encoding = tokenizer.encode(input);
return encoding.toNDList(ctx.getNDManager(), includeTokenTypes, int32);
}
/** {@inheritDoc} */
@Override
public NDList batchProcessInput(TranslatorContext ctx, List<String> inputs) {
NDManager manager = ctx.getNDManager();
Encoding[] encodings = tokenizer.batchEncode(inputs);
NDList[] batch = new NDList[encodings.length];
for (int i = 0; i < encodings.length; ++i) {
batch[i] = encodings[i].toNDList(manager, includeTokenTypes, int32);
}
return batchifier.batchify(batch);
}
/** {@inheritDoc} */
@Override
public Classifications processOutput(TranslatorContext ctx, NDList list) {
return toClassifications(list);
}
/** {@inheritDoc} */
@Override
public List<Classifications> batchProcessOutput(TranslatorContext ctx, NDList list) {
NDList[] batches = batchifier.unbatchify(list);
List<Classifications> ret = new ArrayList<>(batches.length);
for (NDList batch : batches) {
ret.add(toClassifications(batch));
}
return ret;
}
private Classifications toClassifications(NDList list) {
NDArray logits = list.get(0);
int size = config.id2label.size();
if ("multi_label_classification".equals(config.problemType) || size == 1) {
logits = logits.getNDArrayInternal().sigmoid();
} else if ("single_label_classification".equals(config.problemType) || size > 1) {
logits = logits.softmax(0);
}
long[] indices = logits.argSort(-1, false).toLongArray();
float[] buf = logits.toFloatArray();
List<String> classes = new ArrayList<>(size);
List<Double> probabilities = new ArrayList<>(size);
for (long l : indices) {
int index = Math.toIntExact(l);
classes.add(config.id2label.get(String.valueOf(index)));
probabilities.add((double) buf[index]);
}
return new Classifications(classes, probabilities);
}
/**
* Creates a builder to build a {@code TextClassificationTranslator}.
*
* @param tokenizer the tokenizer
* @return a new builder
*/
public static Builder builder(HuggingFaceTokenizer tokenizer) {
return new Builder(tokenizer);
}
/**
* Creates a builder to build a {@code TextClassificationTranslator}.
*
* @param tokenizer the tokenizer
* @param arguments the models' arguments
* @return a new builder
*/
public static Builder builder(HuggingFaceTokenizer tokenizer, Map<String, ?> arguments) {
Builder builder = builder(tokenizer);
builder.configure(arguments);
return builder;
}
/** The builder for token classification translator. */
public static final class Builder {
private HuggingFaceTokenizer tokenizer;
private boolean includeTokenTypes;
private boolean int32;
private Batchifier batchifier = Batchifier.STACK;
Builder(HuggingFaceTokenizer tokenizer) {
this.tokenizer = tokenizer;
}
/**
* Sets if include token types for the {@link Translator}.
*
* @param includeTokenTypes true to include token types
* @return this builder
*/
public Builder optIncludeTokenTypes(boolean includeTokenTypes) {
this.includeTokenTypes = includeTokenTypes;
return this;
}
/**
* Sets if use int32 datatype for the {@link Translator}.
*
* @param int32 true to include token types
* @return this builder
*/
public Builder optInt32(boolean int32) {
this.int32 = int32;
return this;
}
/**
* Sets the {@link Batchifier} for the {@link Translator}.
*
* @param batchifier true to include token types
* @return this builder
*/
public Builder optBatchifier(Batchifier batchifier) {
this.batchifier = batchifier;
return this;
}
/**
* Configures the builder with the model arguments.
*
* @param arguments the model arguments
*/
public void configure(Map<String, ?> arguments) {
optIncludeTokenTypes(ArgumentsUtil.booleanValue(arguments, "includeTokenTypes"));
optInt32(ArgumentsUtil.booleanValue(arguments, "int32"));
String batchifierStr = ArgumentsUtil.stringValue(arguments, "batchifier", "stack");
optBatchifier(Batchifier.fromString(batchifierStr));
}
/**
* Builds the translator.
*
* @return the new translator
*/
public TextClassificationTranslator build() {
return new TextClassificationTranslator(
tokenizer, includeTokenTypes, int32, batchifier);
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/TextClassificationTranslatorFactory.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.translator;
import ai.djl.Model;
import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.modality.Classifications;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.modality.nlp.translator.CrossEncoderServingTranslator;
import ai.djl.modality.nlp.translator.TextClassificationServingTranslator;
import ai.djl.translate.ArgumentsUtil;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorFactory;
import ai.djl.util.Pair;
import ai.djl.util.StringPair;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/** A {@link TranslatorFactory} that creates a {@link TextClassificationTranslator} instance. */
public class TextClassificationTranslatorFactory implements TranslatorFactory, Serializable {
private static final long serialVersionUID = 1L;
private static final Set<Pair<Type, Type>> SUPPORTED_TYPES = new HashSet<>();
static {
SUPPORTED_TYPES.add(new Pair<>(String.class, Classifications.class));
SUPPORTED_TYPES.add(new Pair<>(StringPair.class, float[].class));
SUPPORTED_TYPES.add(new Pair<>(Input.class, Output.class));
}
/** {@inheritDoc} */
@Override
public Set<Pair<Type, Type>> getSupportedTypes() {
return SUPPORTED_TYPES;
}
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public <I, O> Translator<I, O> newInstance(
Class<I> input, Class<O> output, Model model, Map<String, ?> arguments)
throws TranslateException {
Path modelPath = model.getModelPath();
try {
HuggingFaceTokenizer tokenizer =
HuggingFaceTokenizer.builder(arguments)
.optTokenizerPath(modelPath)
.optManager(model.getNDManager())
.build();
if (ArgumentsUtil.booleanValue(arguments, "reranking")) {
CrossEncoderTranslator translator =
CrossEncoderTranslator.builder(tokenizer, arguments).build();
if (input == StringPair.class && output == float[].class) {
return (Translator<I, O>) translator;
} else if (input == Input.class && output == Output.class) {
return (Translator<I, O>) new CrossEncoderServingTranslator(translator);
}
throw new IllegalArgumentException("Unsupported input/output types.");
}
TextClassificationTranslator translator =
TextClassificationTranslator.builder(tokenizer, arguments).build();
if (input == String.class && output == Classifications.class) {
return (Translator<I, O>) translator;
} else if (input == Input.class && output == Output.class) {
return (Translator<I, O>) new TextClassificationServingTranslator(translator);
}
throw new IllegalArgumentException("Unsupported input/output types.");
} catch (IOException e) {
throw new TranslateException("Failed to load tokenizer.", e);
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/TextEmbeddingTranslator.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.translator;
import ai.djl.huggingface.tokenizers.Encoding;
import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.index.NDIndex;
import ai.djl.ndarray.types.Shape;
import ai.djl.translate.ArgumentsUtil;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/** The translator for Huggingface text embedding model. */
public class TextEmbeddingTranslator implements Translator<String, float[]> {
private static final int[] AXIS = {-2};
private HuggingFaceTokenizer tokenizer;
private Batchifier batchifier;
private boolean normalize;
private String pooling;
private boolean includeTokenTypes;
private boolean int32;
private String dense;
private String denseActivation;
private String layerNorm;
private NDList denseModel;
private NDList layerNormModel;
TextEmbeddingTranslator(Builder builder) {
this.tokenizer = builder.tokenizer;
this.batchifier = builder.batchifier;
this.pooling = builder.pooling;
this.normalize = builder.normalize;
this.includeTokenTypes = builder.includeTokenTypes;
this.int32 = builder.int32;
this.dense = builder.dense;
this.denseActivation = builder.denseActivation;
this.layerNorm = builder.layerNorm;
}
/** {@inheritDoc} */
@Override
public Batchifier getBatchifier() {
return batchifier;
}
/** {@inheritDoc} */
@Override
public void prepare(TranslatorContext ctx) throws Exception {
NDManager manager = ctx.getPredictorManager().newSubManager();
if (dense != null) {
Path file = Paths.get(dense);
if (!file.isAbsolute()) {
file = ctx.getModel().getModelPath().resolve(file);
}
if (Files.exists(file)) {
try (InputStream is = Files.newInputStream(file)) {
denseModel = NDList.decode(manager, is);
}
}
}
if (layerNorm != null) {
Path file = Paths.get(layerNorm);
if (!file.isAbsolute()) {
file = ctx.getModel().getModelPath().resolve(file);
}
if (Files.exists(file)) {
try (InputStream is = Files.newInputStream(file)) {
layerNormModel = NDList.decode(manager, is);
}
}
}
}
/** {@inheritDoc} */
@Override
public NDList processInput(TranslatorContext ctx, String input) {
NDManager manager = ctx.getNDManager();
Encoding encoding = tokenizer.encode(input);
NDList list = encoding.toNDList(manager, includeTokenTypes, int32);
ctx.setAttachment("attentionMask", list.get(1));
return list;
}
/** {@inheritDoc} */
@Override
public NDList batchProcessInput(TranslatorContext ctx, List<String> inputs) {
NDManager manager = ctx.getNDManager();
Encoding[] encodings = tokenizer.batchEncode(inputs);
NDList list = Encoding.toNDList(encodings, manager, includeTokenTypes, int32);
ctx.setAttachment("attentionMask", list.get(1));
return list;
}
/** {@inheritDoc} */
@Override
public float[] processOutput(TranslatorContext ctx, NDList list) {
NDArray inputAttentionMask = (NDArray) ctx.getAttachment("attentionMask");
NDArray embeddings = processEmbedding(list, inputAttentionMask);
return embeddings.toFloatArray();
}
/** {@inheritDoc} */
@Override
public List<float[]> batchProcessOutput(TranslatorContext ctx, NDList list) {
NDArray attentionMask = (NDArray) ctx.getAttachment("attentionMask");
NDArray output = processEmbedding(list, attentionMask);
int batchSize = Math.toIntExact(output.size(0));
float[] buf = output.toFloatArray();
if (batchSize == 1) {
return Collections.singletonList(buf);
}
int length = buf.length / batchSize;
List<float[]> ret = new ArrayList<>(batchSize);
for (int i = 0; i < batchSize; ++i) {
float[] f = new float[length];
System.arraycopy(buf, i * length, f, 0, length);
ret.add(f);
}
return ret;
}
NDArray processEmbedding(NDList list, NDArray attentionMask) {
NDArray embedding = list.get("last_hidden_state");
if (embedding == null) {
// For Onnx model, NDArray name is not present
embedding = list.head();
}
switch (pooling) {
case "mean":
embedding = meanPool(embedding, attentionMask, false);
break;
case "mean_sqrt_len":
embedding = meanPool(embedding, attentionMask, true);
break;
case "max":
embedding = maxPool(embedding, attentionMask);
break;
case "weightedmean":
embedding = weightedMeanPool(embedding, attentionMask);
break;
case "cls":
embedding = embedding.get(new NDIndex(":, 0"));
break;
case "lasttoken":
embedding = lastTokenPool(embedding, attentionMask);
break;
default:
throw new AssertionError("Unexpected pooling mode: " + pooling);
}
if (denseModel != null) {
NDArray weight = denseModel.get("linear.weight");
NDArray bias = denseModel.get("linear.bias");
embedding = embedding.getNDArrayInternal().linear(embedding, weight, bias).get(0);
if ("Tanh".equalsIgnoreCase(denseActivation)) {
embedding = embedding.tanh();
}
}
if (layerNormModel != null) {
NDArray weight = layerNormModel.get("norm.weight");
NDArray bias = layerNormModel.get("norm.bias");
Shape shape = weight.getShape();
embedding =
embedding
.getNDArrayInternal()
.layerNorm(embedding, shape, weight, bias, 1e-5f)
.get(0);
}
if (normalize) {
embedding = embedding.normalize(2, -1);
}
return embedding;
}
private static NDArray meanPool(NDArray embeddings, NDArray attentionMask, boolean sqrt) {
long[] shape = embeddings.getShape().getShape();
attentionMask = attentionMask.expandDims(-1).broadcast(shape);
NDArray inputAttentionMaskSum = attentionMask.sum(AXIS);
NDArray clamp = inputAttentionMaskSum.clip(1e-9f, 1e12f);
NDArray prod = embeddings.mul(attentionMask);
NDArray sum = prod.sum(AXIS);
if (sqrt) {
return sum.div(clamp.sqrt());
}
return sum.div(clamp);
}
private static NDArray maxPool(NDArray embeddings, NDArray inputAttentionMask) {
long[] shape = embeddings.getShape().getShape();
inputAttentionMask = inputAttentionMask.expandDims(-1).broadcast(shape);
inputAttentionMask = inputAttentionMask.eq(0);
embeddings = embeddings.duplicate();
embeddings.set(inputAttentionMask, -1e9); // Set padding tokens to large negative value
return embeddings.max(AXIS, false);
}
private static NDArray weightedMeanPool(NDArray embeddings, NDArray attentionMask) {
long[] shape = embeddings.getShape().getShape();
NDArray weight = embeddings.getManager().arange(1, shape[0] + 1);
weight = weight.expandDims(-1).broadcast(shape);
attentionMask = attentionMask.expandDims(-1).broadcast(shape).mul(weight);
NDArray maskSum = attentionMask.sum(AXIS);
NDArray embeddingSum = embeddings.mul(attentionMask).sum(AXIS);
return embeddingSum.div(maskSum);
}
private static NDArray lastTokenPool(NDArray embeddings, NDArray attentionMask) {
long sum = attentionMask.get(":, -1").sum().getLong();
if (sum == attentionMask.getShape().get(0)) {
// left padding
return embeddings.get(":, -1");
}
long sequenceLength = attentionMask.sum(new int[] {1}).getLong() - 1;
long batchSize = embeddings.getShape().get(0);
embeddings = embeddings.get(":, " + sequenceLength);
NDArray index = embeddings.getManager().arange(batchSize);
return embeddings.get(index);
}
/**
* Creates a builder to build a {@code TextEmbeddingTranslator}.
*
* @param tokenizer the tokenizer
* @return a new builder
*/
public static Builder builder(HuggingFaceTokenizer tokenizer) {
return new Builder(tokenizer);
}
/**
* Creates a builder to build a {@code TextEmbeddingTranslator}.
*
* @param tokenizer the tokenizer
* @param arguments the models' arguments
* @return a new builder
*/
public static Builder builder(HuggingFaceTokenizer tokenizer, Map<String, ?> arguments) {
Builder builder = builder(tokenizer);
builder.configure(arguments);
return builder;
}
/** The builder for token classification translator. */
public static final class Builder {
HuggingFaceTokenizer tokenizer;
Batchifier batchifier = Batchifier.STACK;
boolean normalize = true;
String pooling = "mean";
boolean includeTokenTypes;
boolean int32;
String dense;
String denseActivation;
String layerNorm;
Builder(HuggingFaceTokenizer tokenizer) {
this.tokenizer = tokenizer;
}
/**
* Sets the {@link Batchifier} for the {@link Translator}.
*
* @param batchifier true to include token types
* @return this builder
*/
public Builder optBatchifier(Batchifier batchifier) {
this.batchifier = batchifier;
return this;
}
/**
* Sets the {@code normalize} for the {@link Translator}.
*
* @param normalize true to normalize the embeddings
* @return this builder
*/
public Builder optNormalize(boolean normalize) {
this.normalize = normalize;
return this;
}
/**
* Sets the pooling for the {@link Translator}.
*
* @param poolingMode the pooling model, one of mean_pool, max_pool and cls
* @return this builder
*/
public Builder optPoolingMode(String poolingMode) {
if (!"mean".equals(poolingMode)
&& !"max".equals(poolingMode)
&& !"cls".equals(poolingMode)
&& !"mean_sqrt_len".equals(poolingMode)
&& !"lasttoken".equals(poolingMode)
&& !"weightedmean".equals(poolingMode)) {
throw new IllegalArgumentException(
"Invalid pooling model, must be one of [mean, max, cls, mean_sqrt_len,"
+ " weightedmean, lasttoken].");
}
this.pooling = poolingMode;
return this;
}
/**
* Sets if include token types for the {@link Translator}.
*
* @param includeTokenTypes true to include token types
* @return this builder
*/
public Builder optIncludeTokenTypes(boolean includeTokenTypes) {
this.includeTokenTypes = includeTokenTypes;
return this;
}
/**
* Sets if use int32 datatype for the {@link Translator}.
*
* @param int32 true to include token types
* @return this builder
*/
public Builder optInt32(boolean int32) {
this.int32 = int32;
return this;
}
/**
* Sets the dense layer model file for the {@link Translator}.
*
* @param dense path to dense layer model file
* @return this builder
*/
public Builder optDense(String dense) {
this.dense = dense;
return this;
}
/**
* Sets the dense activation function for the {@link Translator}.
*
* @param denseActivation path to dense layer
* @return this builder
*/
public Builder optDenseActivation(String denseActivation) {
this.denseActivation = denseActivation;
return this;
}
/**
* Sets the LayerNorm model for the {@link Translator}.
*
* @param layerNorm path to LayerNorm model
* @return this builder
*/
public Builder optLayerNorm(String layerNorm) {
this.layerNorm = layerNorm;
return this;
}
/**
* Configures the builder with the model arguments.
*
* @param arguments the model arguments
*/
public void configure(Map<String, ?> arguments) {
String batchifierStr = ArgumentsUtil.stringValue(arguments, "batchifier", "stack");
optBatchifier(Batchifier.fromString(batchifierStr));
optNormalize(ArgumentsUtil.booleanValue(arguments, "normalize", true));
optPoolingMode(ArgumentsUtil.stringValue(arguments, "pooling", "mean"));
optIncludeTokenTypes(ArgumentsUtil.booleanValue(arguments, "includeTokenTypes"));
optInt32(ArgumentsUtil.booleanValue(arguments, "int32"));
optDense(ArgumentsUtil.stringValue(arguments, "dense"));
optDenseActivation(ArgumentsUtil.stringValue(arguments, "denseActivation"));
optLayerNorm(ArgumentsUtil.stringValue(arguments, "layerNorm"));
}
/**
* Builds the translator.
*
* @return the new translator
*/
public TextEmbeddingTranslator build() {
return new TextEmbeddingTranslator(this);
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/TextEmbeddingTranslatorFactory.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.translator;
import ai.djl.Model;
import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.modality.nlp.EmbeddingOutput;
import ai.djl.modality.nlp.translator.CrossEncoderServingTranslator;
import ai.djl.modality.nlp.translator.SparseRetrievalServingTranslator;
import ai.djl.modality.nlp.translator.TextEmbeddingServingTranslator;
import ai.djl.translate.ArgumentsUtil;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorFactory;
import ai.djl.util.Pair;
import ai.djl.util.StringPair;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/** A {@link TranslatorFactory} that creates a {@link TextEmbeddingTranslator} instance. */
public class TextEmbeddingTranslatorFactory implements TranslatorFactory, Serializable {
private static final long serialVersionUID = 1L;
private static final Set<Pair<Type, Type>> SUPPORTED_TYPES = new HashSet<>();
static {
SUPPORTED_TYPES.add(new Pair<>(String.class, float[].class));
SUPPORTED_TYPES.add(new Pair<>(String.class, EmbeddingOutput.class));
SUPPORTED_TYPES.add(new Pair<>(StringPair.class, float[].class));
SUPPORTED_TYPES.add(new Pair<>(Input.class, Output.class));
}
/** {@inheritDoc} */
@Override
public Set<Pair<Type, Type>> getSupportedTypes() {
return SUPPORTED_TYPES;
}
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public <I, O> Translator<I, O> newInstance(
Class<I> input, Class<O> output, Model model, Map<String, ?> arguments)
throws TranslateException {
Path modelPath = model.getModelPath();
try {
HuggingFaceTokenizer tokenizer =
HuggingFaceTokenizer.builder(arguments)
.optTokenizerPath(modelPath)
.optManager(model.getNDManager())
.build();
if (ArgumentsUtil.booleanValue(arguments, "reranking")) {
CrossEncoderTranslator translator =
CrossEncoderTranslator.builder(tokenizer, arguments).build();
if (input == StringPair.class && output == float[].class) {
return (Translator<I, O>) translator;
} else if (input == Input.class && output == Output.class) {
return (Translator<I, O>) new CrossEncoderServingTranslator(translator);
}
throw new IllegalArgumentException("Unsupported input/output types.");
} else if (ArgumentsUtil.booleanValue(arguments, "sparse")) {
SparseRetrievalTranslator translator =
SparseRetrievalTranslator.builder(tokenizer, arguments).build();
if (input == String.class && output == EmbeddingOutput.class) {
return (Translator<I, O>) translator;
} else if (input == Input.class && output == Output.class) {
return (Translator<I, O>) new SparseRetrievalServingTranslator(translator);
}
throw new IllegalArgumentException("Unsupported input/output types.");
}
TextEmbeddingTranslator translator =
TextEmbeddingTranslator.builder(tokenizer, arguments).build();
if (input == String.class && output == float[].class) {
return (Translator<I, O>) translator;
} else if (input == Input.class && output == Output.class) {
return (Translator<I, O>) new TextEmbeddingServingTranslator(translator);
}
throw new IllegalArgumentException("Unsupported input/output types.");
} catch (IOException e) {
throw new TranslateException("Failed to load tokenizer.", e);
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/TokenClassificationTranslator.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.translator;
import ai.djl.huggingface.tokenizers.Encoding;
import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.huggingface.tokenizers.jni.CharSpan;
import ai.djl.modality.nlp.translator.NamedEntity;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.translate.ArgumentsUtil;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import ai.djl.util.JsonUtils;
import java.io.IOException;
import java.io.Reader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/** The translator for Huggingface token classification model. */
public class TokenClassificationTranslator implements Translator<String, NamedEntity[]> {
private HuggingFaceTokenizer tokenizer;
private boolean includeTokenTypes;
private boolean int32;
private boolean softmax;
private Batchifier batchifier;
private PretrainedConfig config;
TokenClassificationTranslator(
HuggingFaceTokenizer tokenizer,
boolean includeTokenTypes,
boolean int32,
boolean softmax,
Batchifier batchifier) {
this.tokenizer = tokenizer;
this.includeTokenTypes = includeTokenTypes;
this.int32 = int32;
this.softmax = softmax;
this.batchifier = batchifier;
}
/** {@inheritDoc} */
@Override
public Batchifier getBatchifier() {
return batchifier;
}
/** {@inheritDoc} */
@Override
public void prepare(TranslatorContext ctx) throws IOException {
Path path = ctx.getModel().getModelPath();
Path file = path.resolve("config.json");
try (Reader reader = Files.newBufferedReader(file)) {
config = JsonUtils.GSON.fromJson(reader, PretrainedConfig.class);
}
}
/** {@inheritDoc} */
@Override
public NDList processInput(TranslatorContext ctx, String input) {
Encoding encoding = tokenizer.encode(input);
ctx.setAttachment("encoding", encoding);
return encoding.toNDList(ctx.getNDManager(), includeTokenTypes, int32);
}
/** {@inheritDoc} */
@Override
public NDList batchProcessInput(TranslatorContext ctx, List<String> inputs) {
NDManager manager = ctx.getNDManager();
Encoding[] encodings = tokenizer.batchEncode(inputs);
ctx.setAttachment("encodings", encodings);
NDList[] batch = new NDList[encodings.length];
for (int i = 0; i < encodings.length; ++i) {
batch[i] = encodings[i].toNDList(manager, includeTokenTypes, int32);
}
return batchifier.batchify(batch);
}
/** {@inheritDoc} */
@Override
public NamedEntity[] processOutput(TranslatorContext ctx, NDList list) {
Encoding encoding = (Encoding) ctx.getAttachment("encoding");
return toNamedEntities(encoding, list);
}
/** {@inheritDoc} */
@Override
public List<NamedEntity[]> batchProcessOutput(TranslatorContext ctx, NDList list) {
NDList[] batch = batchifier.unbatchify(list);
Encoding[] encodings = (Encoding[]) ctx.getAttachment("encodings");
List<NamedEntity[]> ret = new ArrayList<>(batch.length);
for (int i = 0; i < batch.length; ++i) {
ret.add(toNamedEntities(encodings[i], batch[i]));
}
return ret;
}
/**
* Creates a builder to build a {@code TokenClassificationTranslator}.
*
* @param tokenizer the tokenizer
* @return a new builder
*/
public static Builder builder(HuggingFaceTokenizer tokenizer) {
return new Builder(tokenizer);
}
/**
* Creates a builder to build a {@code TokenClassificationTranslator}.
*
* @param tokenizer the tokenizer
* @param arguments the models' arguments
* @return a new builder
*/
public static Builder builder(HuggingFaceTokenizer tokenizer, Map<String, ?> arguments) {
Builder builder = builder(tokenizer);
builder.configure(arguments);
return builder;
}
private NamedEntity[] toNamedEntities(Encoding encoding, NDList list) {
long[] inputIds = encoding.getIds();
CharSpan[] offsetMapping = encoding.getCharTokenSpans();
long[] specialTokenMasks = encoding.getSpecialTokenMask();
NDArray probabilities = list.get(0);
if (softmax) {
probabilities = probabilities.softmax(1);
}
List<NamedEntity> entities = new ArrayList<>();
for (int i = 0; i < inputIds.length; ++i) {
if (specialTokenMasks[i] != 0) {
continue;
}
int entityIdx = (int) probabilities.get(i).argMax().getLong();
String entity = config.id2label.get(String.valueOf(entityIdx));
if (!"O".equals(entity)) {
float score = probabilities.get(i).getFloat(entityIdx);
String word = encoding.getTokens()[i];
int start = offsetMapping[i].getStart();
int end = offsetMapping[i].getEnd();
NamedEntity item = new NamedEntity(entity, score, i, word, start, end);
entities.add(item);
}
}
return entities.toArray(new NamedEntity[0]);
}
/** The builder for token classification translator. */
public static final class Builder {
private HuggingFaceTokenizer tokenizer;
private boolean includeTokenTypes;
private boolean int32;
private boolean softmax = true;
private Batchifier batchifier = Batchifier.STACK;
Builder(HuggingFaceTokenizer tokenizer) {
this.tokenizer = tokenizer;
}
/**
* Sets if include token types for the {@link Translator}.
*
* @param includeTokenTypes true to include token types
* @return this builder
*/
public Builder optIncludeTokenTypes(boolean includeTokenTypes) {
this.includeTokenTypes = includeTokenTypes;
return this;
}
/**
* Sets if use int32 datatype for the {@link Translator}.
*
* @param int32 true to include token types
* @return this builder
*/
public Builder optInt32(boolean int32) {
this.int32 = int32;
return this;
}
/**
* Sets if implement softmax operation for the {@link Translator}.
*
* @param softmax true to implement softmax to model output result
* @return this builder
*/
public Builder optSoftmax(boolean softmax) {
this.softmax = softmax;
return this;
}
/**
* Sets the {@link Batchifier} for the {@link Translator}.
*
* @param batchifier true to include token types
* @return this builder
*/
public Builder optBatchifier(Batchifier batchifier) {
this.batchifier = batchifier;
return this;
}
/**
* Configures the builder with the model arguments.
*
* @param arguments the model arguments
*/
public void configure(Map<String, ?> arguments) {
optIncludeTokenTypes(ArgumentsUtil.booleanValue(arguments, "includeTokenTypes"));
optInt32(ArgumentsUtil.booleanValue(arguments, "int32"));
optSoftmax(ArgumentsUtil.booleanValue(arguments, "softmax", true));
String batchifierStr = ArgumentsUtil.stringValue(arguments, "batchifier", "stack");
optBatchifier(Batchifier.fromString(batchifierStr));
}
/**
* Builds the translator.
*
* @return the new translator
*/
public TokenClassificationTranslator build() {
return new TokenClassificationTranslator(
tokenizer, includeTokenTypes, int32, softmax, batchifier);
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/TokenClassificationTranslatorFactory.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.translator;
import ai.djl.Model;
import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.modality.nlp.translator.NamedEntity;
import ai.djl.modality.nlp.translator.TokenClassificationServingTranslator;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorFactory;
import ai.djl.util.Pair;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/** A {@link TranslatorFactory} that creates a {@link TokenClassificationTranslator} instance. */
public class TokenClassificationTranslatorFactory implements TranslatorFactory, Serializable {
private static final long serialVersionUID = 1L;
private static final Set<Pair<Type, Type>> SUPPORTED_TYPES = new HashSet<>();
static {
SUPPORTED_TYPES.add(new Pair<>(String.class, NamedEntity[].class));
SUPPORTED_TYPES.add(new Pair<>(Input.class, Output.class));
}
/** {@inheritDoc} */
@Override
public Set<Pair<Type, Type>> getSupportedTypes() {
return SUPPORTED_TYPES;
}
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public <I, O> Translator<I, O> newInstance(
Class<I> input, Class<O> output, Model model, Map<String, ?> arguments)
throws TranslateException {
Path modelPath = model.getModelPath();
try {
HuggingFaceTokenizer tokenizer =
HuggingFaceTokenizer.builder(arguments)
.optTokenizerPath(modelPath)
.optManager(model.getNDManager())
.build();
TokenClassificationTranslator translator =
TokenClassificationTranslator.builder(tokenizer, arguments).build();
if (input == String.class && output == NamedEntity[].class) {
return (Translator<I, O>) translator;
} else if (input == Input.class && output == Output.class) {
return (Translator<I, O>) new TokenClassificationServingTranslator(translator);
}
throw new IllegalArgumentException("Unsupported input/output types.");
} catch (IOException e) {
throw new TranslateException("Failed to load tokenizer.", e);
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/ZeroShotClassificationTranslator.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.translator;
import ai.djl.Model;
import ai.djl.ModelException;
import ai.djl.huggingface.tokenizers.Encoding;
import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.inference.Predictor;
import ai.djl.modality.nlp.translator.ZeroShotClassificationInput;
import ai.djl.modality.nlp.translator.ZeroShotClassificationOutput;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.index.NDIndex;
import ai.djl.translate.ArgumentsUtil;
import ai.djl.translate.Batchifier;
import ai.djl.translate.NoBatchifyTranslator;
import ai.djl.translate.NoopTranslator;
import ai.djl.translate.TranslateException;
import ai.djl.translate.TranslatorContext;
import ai.djl.util.JsonUtils;
import ai.djl.util.Pair;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.Reader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
/** The translator for Huggingface zero-shot-classification model. */
public class ZeroShotClassificationTranslator
implements NoBatchifyTranslator<ZeroShotClassificationInput, ZeroShotClassificationOutput> {
private static final Logger logger =
LoggerFactory.getLogger(ZeroShotClassificationTranslator.class);
private HuggingFaceTokenizer tokenizer;
private int entailmentId;
private int contradictionId;
private boolean tokenTypeId;
private boolean int32;
private Predictor<NDList, NDList> predictor;
ZeroShotClassificationTranslator(
HuggingFaceTokenizer tokenizer, boolean tokenTypeId, boolean int32) {
this.tokenizer = tokenizer;
this.tokenTypeId = tokenTypeId;
this.int32 = int32;
}
ZeroShotClassificationTranslator(
HuggingFaceTokenizer tokenizer,
boolean tokenTypeId,
boolean int32,
int entailmentId,
int contradictionId) {
this(tokenizer, tokenTypeId, int32);
this.entailmentId = entailmentId;
this.contradictionId = contradictionId;
}
/** {@inheritDoc} */
@Override
public void prepare(TranslatorContext ctx) throws IOException, ModelException {
Model model = ctx.getModel();
predictor = model.newPredictor(new NoopTranslator(null));
ctx.getPredictorManager().attachInternal(NDManager.nextUid(), predictor);
Path configFile = model.getModelPath().resolve("config.json");
if (!Files.isRegularFile(configFile)) {
return;
}
try (Reader reader = Files.newBufferedReader(configFile)) {
JsonObject config = JsonUtils.GSON.fromJson(reader, JsonObject.class);
if (config.has("label2id")) {
JsonObject label2Id = config.getAsJsonObject("label2id");
for (Map.Entry<String, JsonElement> entry : label2Id.entrySet()) {
String key = entry.getKey().toLowerCase(Locale.ROOT);
int value = entry.getValue().getAsInt();
if (key.startsWith("entail")) {
entailmentId = value;
} else if (key.startsWith("contra")) {
contradictionId = value;
}
}
}
boolean inferredWithTokenType = false; // Default assumption
if (config.has("type_vocab_size")) {
JsonElement typeVocabSizeObj = config.get("type_vocab_size");
if (typeVocabSizeObj.isJsonPrimitive()) {
int typeVocabSize = typeVocabSizeObj.getAsInt();
if (typeVocabSize > 1) {
inferredWithTokenType = true;
}
}
}
if (!inferredWithTokenType && config.has("model_type")) {
String modelType = config.get("model_type").getAsString().toLowerCase(Locale.ROOT);
if ("bert".equals(modelType)
|| "albert".equals(modelType)
|| "xlnet".equals(modelType)
|| modelType.startsWith("deberta")) {
inferredWithTokenType = true;
}
}
tokenTypeId = inferredWithTokenType;
} catch (IOException | JsonParseException e) {
logger.error("Failed to read or parse config.json for label2id", e);
}
}
/** {@inheritDoc} */
@Override
public NDList processInput(TranslatorContext ctx, ZeroShotClassificationInput input) {
ctx.setAttachment("input", input);
return new NDList();
}
/** {@inheritDoc} */
@Override
public ZeroShotClassificationOutput processOutput(TranslatorContext ctx, NDList list)
throws TranslateException {
ZeroShotClassificationInput input =
(ZeroShotClassificationInput) ctx.getAttachment("input");
String template = input.getHypothesisTemplate();
String[] candidates = input.getCandidates();
if (candidates == null || candidates.length == 0) {
throw new TranslateException("Missing candidates in input");
}
NDManager manager = ctx.getNDManager();
NDList output = new NDList(candidates.length);
for (String candidate : candidates) {
String hypothesis = applyTemplate(template, candidate);
Encoding encoding = tokenizer.encode(input.getText(), hypothesis);
NDList in = encoding.toNDList(manager, tokenTypeId, int32);
NDList batch = Batchifier.STACK.batchify(new NDList[] {in});
output.add(predictor.predict(batch).get(0));
}
NDArray combinedLogits = NDArrays.concat(output);
String[] finalLabels;
double[] finalScores;
if (input.isMultiLabel() || candidates.length == 1) {
NDArray entailmentScores;
if (combinedLogits.getShape().get(1) == 2) {
// Binary classification: [not entail, entail]
NDArray probs = combinedLogits.softmax(1);
entailmentScores = probs.get(":, " + entailmentId);
} else {
// 3-class NLI output (e.g., entailment, neutral, contradiction)
NDArray entailContrLogits =
combinedLogits.get(
new NDIndex(
":, {}",
manager.create(new int[] {contradictionId, entailmentId})));
NDArray scoresProbs = entailContrLogits.softmax(1);
entailmentScores = scoresProbs.get(":, 1");
}
float[] floatScores = entailmentScores.toFloatArray();
List<Pair<Double, String>> pairs = new ArrayList<>();
for (int i = 0; i < floatScores.length; i++) {
Pair<Double, String> pair = new Pair<>((double) floatScores[i], candidates[i]);
pairs.add(pair);
}
pairs.sort(
Comparator.comparingDouble((Pair<Double, String> e) -> e.getKey()).reversed());
finalLabels = new String[candidates.length];
finalScores = new double[candidates.length];
for (int i = 0; i < candidates.length; i++) {
finalLabels[i] = pairs.get(i).getValue();
finalScores[i] = pairs.get(i).getKey();
}
} else { // Single-label classification (len(candidate_labels) > 1 and not multi_label)
NDArray entailLogits = combinedLogits.get(":, " + entailmentId);
NDArray exp = entailLogits.exp();
NDArray sum = exp.sum();
NDArray normalizedScores = exp.div(sum); // Probabilities sum to 1 across candidates
long[] indices = normalizedScores.argSort(-1, false).toLongArray();
float[] probabilities = normalizedScores.toFloatArray();
finalLabels = new String[candidates.length];
finalScores = new double[candidates.length];
for (int i = 0; i < finalLabels.length; ++i) {
int index = (int) indices[i];
finalLabels[i] = candidates[index];
finalScores[i] = probabilities[index];
}
}
return new ZeroShotClassificationOutput(input.getText(), finalLabels, finalScores);
}
private String applyTemplate(String template, String arg) {
int pos = template.indexOf("{}");
if (pos == -1) {
return template + arg;
}
int len = template.length();
return template.substring(0, pos) + arg + template.substring(pos + 2, len);
}
/**
* Creates a builder to build a {@code ZeroShotClassificationTranslator}.
*
* @param tokenizer the tokenizer
* @return a new builder
*/
public static Builder builder(HuggingFaceTokenizer tokenizer) {
return new Builder(tokenizer);
}
/**
* Creates a builder to build a {@code ZeroShotClassificationTranslator}.
*
* @param tokenizer the tokenizer
* @param arguments the models' arguments
* @return a new builder
*/
public static Builder builder(HuggingFaceTokenizer tokenizer, Map<String, ?> arguments) {
Builder builder = builder(tokenizer);
builder.configure(arguments);
return builder;
}
/** The builder for zero-shot classification translator. */
public static final class Builder {
private HuggingFaceTokenizer tokenizer;
private boolean tokenTypeId;
private boolean int32;
private int entailmentId = 2;
private int contradictionId;
Builder(HuggingFaceTokenizer tokenizer) {
this.tokenizer = tokenizer;
}
/**
* Specifies whether to include token type IDs in the input tensors.
*
* @param tokenTypeId {@code true} to include token type IDs, {@code false} to omit them
* @return this builder instance for method chaining
*/
public Builder optTokenTypeId(boolean tokenTypeId) {
this.tokenTypeId = tokenTypeId;
return this;
}
/**
* Specifies whether to use int32 as the data type for input token tensors.
*
* @param int32 {@code true} to use int32 inputs, {@code false} to use the default type
* @return this builder instance for method chaining
*/
public Builder optInt32(boolean int32) {
this.int32 = int32;
return this;
}
/**
* Optional: Set custom entailment ID if different from default (2). This value usually
* comes from the model's `config.json` `label2id` mapping.
*
* @param entailmentId The index for the 'entailment' label.
* @return this builder
*/
public Builder optEntailmentId(int entailmentId) {
this.entailmentId = entailmentId;
return this;
}
/**
* Optional: Set custom contradiction ID if different from default (0). This value usually
* comes from the model's `config.json` `label2id` mapping.
*
* @param contradictionId The index for the 'contradiction' label.
* @return this builder
*/
public Builder optContradictionId(int contradictionId) {
this.contradictionId = contradictionId;
return this;
}
/**
* Configures the builder with the model arguments.
*
* @param arguments the model arguments
*/
public void configure(Map<String, ?> arguments) {
optTokenTypeId(ArgumentsUtil.booleanValue(arguments, "tokenTypeId"));
optInt32(ArgumentsUtil.booleanValue(arguments, "int32"));
}
/**
* Builds the translator.
*
* @return the new translator
* @throws IOException if I/O error occurs
*/
public ZeroShotClassificationTranslator build() throws IOException {
return new ZeroShotClassificationTranslator(
tokenizer, tokenTypeId, int32, entailmentId, contradictionId);
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/ZeroShotClassificationTranslatorFactory.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.translator;
import ai.djl.Model;
import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.modality.nlp.translator.ZeroShotClassificationInput;
import ai.djl.modality.nlp.translator.ZeroShotClassificationOutput;
import ai.djl.modality.nlp.translator.ZeroShotClassificationServingTranslator;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorFactory;
import ai.djl.util.Pair;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/** A {@link TranslatorFactory} that creates a {@link ZeroShotClassificationTranslator} instance. */
public class ZeroShotClassificationTranslatorFactory implements TranslatorFactory, Serializable {
private static final long serialVersionUID = 1L;
private static final Set<Pair<Type, Type>> SUPPORTED_TYPES = new HashSet<>();
static {
SUPPORTED_TYPES.add(
new Pair<>(ZeroShotClassificationInput.class, ZeroShotClassificationOutput.class));
SUPPORTED_TYPES.add(new Pair<>(Input.class, Output.class));
}
/** {@inheritDoc} */
@Override
public Set<Pair<Type, Type>> getSupportedTypes() {
return SUPPORTED_TYPES;
}
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public <I, O> Translator<I, O> newInstance(
Class<I> input, Class<O> output, Model model, Map<String, ?> arguments)
throws TranslateException {
Path modelPath = model.getModelPath();
try {
HuggingFaceTokenizer tokenizer =
HuggingFaceTokenizer.builder(arguments)
.optTokenizerPath(modelPath)
.optManager(model.getNDManager())
.build();
ZeroShotClassificationTranslator translator =
ZeroShotClassificationTranslator.builder(tokenizer, arguments).build();
if (input == ZeroShotClassificationInput.class
&& output == ZeroShotClassificationOutput.class) {
return (Translator<I, O>) translator;
} else if (input == Input.class && output == Output.class) {
return (Translator<I, O>) new ZeroShotClassificationServingTranslator(translator);
}
throw new IllegalArgumentException("Unsupported input/output types.");
} catch (IOException e) {
throw new TranslateException("Failed to load tokenizer.", e);
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/ZeroShotImageClassificationTranslator.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.translator;
import ai.djl.huggingface.tokenizers.Encoding;
import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.modality.Classifications;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.VisionLanguageInput;
import ai.djl.modality.cv.translator.BaseImagePreProcessor;
import ai.djl.modality.cv.translator.BaseImageTranslator;
import ai.djl.modality.cv.translator.BaseImageTranslator.BaseBuilder;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.translate.ArgumentsUtil;
import ai.djl.translate.NoBatchifyTranslator;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/** The translator for Huggingface zero-shot-image-classification model. */
public class ZeroShotImageClassificationTranslator
implements NoBatchifyTranslator<VisionLanguageInput, Classifications> {
private HuggingFaceTokenizer tokenizer;
private BaseImageTranslator<?> imageProcessor;
private boolean int32;
ZeroShotImageClassificationTranslator(
HuggingFaceTokenizer tokenizer, BaseImageTranslator<?> imageProcessor, boolean int32) {
this.tokenizer = tokenizer;
this.imageProcessor = imageProcessor;
this.int32 = int32;
}
/** {@inheritDoc} */
@Override
public NDList processInput(TranslatorContext ctx, VisionLanguageInput input)
throws TranslateException {
NDManager manager = ctx.getNDManager();
String template = input.getHypothesisTemplate();
String[] candidates = input.getCandidates();
if (candidates == null || candidates.length == 0) {
throw new TranslateException("Missing candidates in input");
}
List<String> sequences = new ArrayList<>(candidates.length);
for (String candidate : candidates) {
sequences.add(applyTemplate(template, candidate));
}
Encoding[] encodings = tokenizer.batchEncode(sequences);
NDList list = Encoding.toNDList(encodings, manager, false, int32);
Image img = input.getImage();
NDList imageFeatures = imageProcessor.processInput(ctx, img);
NDArray array = imageFeatures.get(0).expandDims(0);
list.add(array);
ctx.setAttachment("candidates", candidates);
return list;
}
/** {@inheritDoc} */
@Override
public Classifications processOutput(TranslatorContext ctx, NDList list)
throws TranslateException {
NDArray logits = list.get("logits_per_image");
logits = logits.squeeze().softmax(0);
String[] candidates = (String[]) ctx.getAttachment("candidates");
List<String> classes = Arrays.asList(candidates);
return new Classifications(classes, logits, candidates.length);
}
private String applyTemplate(String template, String arg) {
int pos = template.indexOf("{}");
if (pos == -1) {
return template + arg;
}
int len = template.length();
return template.substring(0, pos) + arg + template.substring(pos + 2, len);
}
/**
* Creates a builder to build a {@code ZeroShotImageClassificationTranslator}.
*
* @param tokenizer the tokenizer
* @return a new builder
*/
public static Builder builder(HuggingFaceTokenizer tokenizer) {
return new Builder(tokenizer);
}
/**
* Creates a builder to build a {@code ZeroShotImageClassificationTranslator}.
*
* @param tokenizer the tokenizer
* @param arguments the models' arguments
* @return a new builder
*/
public static Builder builder(HuggingFaceTokenizer tokenizer, Map<String, ?> arguments) {
Builder builder = builder(tokenizer);
builder.configure(arguments);
return builder;
}
/** The builder for zero-shot classification translator. */
public static final class Builder extends BaseBuilder<Builder> {
private HuggingFaceTokenizer tokenizer;
private boolean int32;
Builder(HuggingFaceTokenizer tokenizer) {
this.tokenizer = tokenizer;
}
/** {@inheritDoc} */
@Override
protected Builder self() {
return this;
}
/**
* Sets if use int32 datatype for the {@link Translator}.
*
* @param int32 true to include token types
* @return this builder
*/
public Builder optInt32(boolean int32) {
this.int32 = int32;
return this;
}
/**
* Configures the builder with the model arguments.
*
* @param arguments the model arguments
*/
public void configure(Map<String, ?> arguments) {
configPreProcess(arguments);
optInt32(ArgumentsUtil.booleanValue(arguments, "int32"));
}
/**
* Builds the translator.
*
* @return the new translator
* @throws IOException if I/O error occurs
*/
public ZeroShotImageClassificationTranslator build() throws IOException {
BaseImagePreProcessor processor = new BaseImagePreProcessor(this);
return new ZeroShotImageClassificationTranslator(tokenizer, processor, int32);
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/ZeroShotImageClassificationTranslatorFactory.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.translator;
import ai.djl.Model;
import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.modality.Classifications;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.modality.cv.VisionLanguageInput;
import ai.djl.modality.cv.translator.ZeroShotImageClassificationServingTranslator;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorFactory;
import ai.djl.util.Pair;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* A {@link TranslatorFactory} that creates a {@link ZeroShotImageClassificationTranslatorFactory}
* instance.
*/
public class ZeroShotImageClassificationTranslatorFactory
implements TranslatorFactory, Serializable {
private static final long serialVersionUID = 1L;
private static final Set<Pair<Type, Type>> SUPPORTED_TYPES = new HashSet<>();
static {
SUPPORTED_TYPES.add(new Pair<>(VisionLanguageInput.class, Classifications.class));
SUPPORTED_TYPES.add(new Pair<>(Input.class, Output.class));
}
/** {@inheritDoc} */
@Override
public Set<Pair<Type, Type>> getSupportedTypes() {
return SUPPORTED_TYPES;
}
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public <I, O> Translator<I, O> newInstance(
Class<I> input, Class<O> output, Model model, Map<String, ?> arguments)
throws TranslateException {
Path modelPath = model.getModelPath();
try {
HuggingFaceTokenizer tokenizer =
HuggingFaceTokenizer.builder(arguments)
.optTokenizerPath(modelPath)
.optManager(model.getNDManager())
.build();
ZeroShotImageClassificationTranslator translator =
ZeroShotImageClassificationTranslator.builder(tokenizer, arguments).build();
if (input == VisionLanguageInput.class && output == Classifications.class) {
return (Translator<I, O>) translator;
} else if (input == Input.class && output == Output.class) {
return (Translator<I, O>)
new ZeroShotImageClassificationServingTranslator(translator);
}
throw new IllegalArgumentException("Unsupported input/output types.");
} catch (IOException e) {
throw new TranslateException("Failed to load tokenizer.", e);
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/ZeroShotObjectDetectionTranslator.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.translator;
import ai.djl.huggingface.tokenizers.Encoding;
import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.VisionLanguageInput;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.modality.cv.translator.BaseImagePreProcessor;
import ai.djl.modality.cv.translator.BaseImageTranslator;
import ai.djl.modality.cv.translator.BaseImageTranslator.BaseBuilder;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.translate.ArgumentsUtil;
import ai.djl.translate.NoBatchifyTranslator;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/** The translator for Huggingface zero-shot-object-detection model. */
public class ZeroShotObjectDetectionTranslator
implements NoBatchifyTranslator<VisionLanguageInput, DetectedObjects> {
private HuggingFaceTokenizer tokenizer;
private BaseImageTranslator<?> imageProcessor;
private boolean int32;
private float threshold;
ZeroShotObjectDetectionTranslator(
HuggingFaceTokenizer tokenizer,
BaseImageTranslator<?> imageProcessor,
boolean int32,
float threshold) {
this.tokenizer = tokenizer;
this.imageProcessor = imageProcessor;
this.int32 = int32;
this.threshold = threshold;
}
/** {@inheritDoc} */
@Override
public NDList processInput(TranslatorContext ctx, VisionLanguageInput input)
throws TranslateException {
NDManager manager = ctx.getNDManager();
String[] candidates = input.getCandidates();
if (candidates == null || candidates.length == 0) {
throw new TranslateException("Missing candidates in input");
}
Encoding[] encodings = tokenizer.batchEncode(candidates);
NDList list = Encoding.toNDList(encodings, manager, false, int32);
Image img = input.getImage();
NDList imageFeatures = imageProcessor.processInput(ctx, img);
NDArray array = imageFeatures.get(0).expandDims(0);
list.add(array);
ctx.setAttachment("candidates", candidates);
return list;
}
/** {@inheritDoc} */
@Override
public DetectedObjects processOutput(TranslatorContext ctx, NDList list)
throws TranslateException {
NDArray logits = list.get("logits");
NDArray boxes = list.get("pred_boxes");
NDArray labels = logits.argMax(-1);
NDArray scores = logits.max(new int[] {-1}).getNDArrayInternal().sigmoid();
NDArray selected = scores.gt(threshold);
scores = scores.get(selected);
labels = labels.get(selected);
boxes = boxes.get(selected);
float[] prob = scores.toFloatArray();
long[] labelsIndex = labels.toLongArray();
float[] box = boxes.toFloatArray();
String[] candidates = (String[]) ctx.getAttachment("candidates");
List<String> classes = new ArrayList<>(labelsIndex.length);
List<Double> probabilities = new ArrayList<>(labelsIndex.length);
List<BoundingBox> boundingBoxes = new ArrayList<>(labelsIndex.length);
int width = (Integer) ctx.getAttachment("width");
int height = (Integer) ctx.getAttachment("height");
for (int i = 0; i < labelsIndex.length; i++) {
classes.add(candidates[(int) labelsIndex[i]]);
int pos = i * 4;
float x = box[pos];
float y = box[pos + 1];
float w = box[pos + 2];
float h = box[pos + 3];
x = x - w / 2;
y = y - h / 2;
// remove padding stretch
if (width > height) {
y = y * width / height;
h = h * width / height;
} else if (width < height) {
x = x * height / width;
w = w * height / width;
}
BoundingBox bbox = new Rectangle(x, y, w, h);
boundingBoxes.add(bbox);
probabilities.add((double) prob[i]);
}
return new DetectedObjects(classes, probabilities, boundingBoxes);
}
/**
* Creates a builder to build a {@code ZeroShotObjectDetectionTranslator}.
*
* @param tokenizer the tokenizer
* @return a new builder
*/
public static Builder builder(HuggingFaceTokenizer tokenizer) {
return new Builder(tokenizer);
}
/**
* Creates a builder to build a {@code ZeroShotObjectDetectionTranslator}.
*
* @param tokenizer the tokenizer
* @param arguments the models' arguments
* @return a new builder
*/
public static Builder builder(HuggingFaceTokenizer tokenizer, Map<String, ?> arguments) {
Builder builder = builder(tokenizer);
builder.configure(arguments);
return builder;
}
/** The builder for zero-shot classification translator. */
public static final class Builder extends BaseBuilder<Builder> {
private HuggingFaceTokenizer tokenizer;
private boolean int32;
private float threshold = 0.2f;
Builder(HuggingFaceTokenizer tokenizer) {
this.tokenizer = tokenizer;
}
/** {@inheritDoc} */
@Override
protected Builder self() {
return this;
}
/**
* Sets the threshold for prediction accuracy.
*
* <p>Predictions below the threshold will be dropped.
*
* @param threshold the threshold for the prediction accuracy
* @return this builder
*/
public Builder optThreshold(float threshold) {
this.threshold = threshold;
return this;
}
/**
* Sets if use int32 datatype for the {@link Translator}.
*
* @param int32 true to include token types
* @return this builder
*/
public Builder optInt32(boolean int32) {
this.int32 = int32;
return this;
}
/**
* Configures the builder with the model arguments.
*
* @param arguments the model arguments
*/
public void configure(Map<String, ?> arguments) {
configPreProcess(arguments);
optInt32(ArgumentsUtil.booleanValue(arguments, "int32"));
optThreshold(ArgumentsUtil.floatValue(arguments, "threshold", 0.2f));
}
/**
* Builds the translator.
*
* @return the new translator
* @throws IOException if I/O error occurs
*/
public ZeroShotObjectDetectionTranslator build() throws IOException {
BaseImageTranslator<?> imageProcessor = new BaseImagePreProcessor(this);
return new ZeroShotObjectDetectionTranslator(
tokenizer, imageProcessor, int32, threshold);
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/ZeroShotObjectDetectionTranslatorFactory.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.translator;
import ai.djl.Model;
import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.modality.cv.VisionLanguageInput;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.translator.ZeroShotObjectDetectionServingTranslator;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorFactory;
import ai.djl.util.Pair;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* A {@link TranslatorFactory} that creates a {@link ZeroShotObjectDetectionTranslatorFactory}
* instance.
*/
public class ZeroShotObjectDetectionTranslatorFactory implements TranslatorFactory, Serializable {
private static final long serialVersionUID = 1L;
private static final Set<Pair<Type, Type>> SUPPORTED_TYPES = new HashSet<>();
static {
SUPPORTED_TYPES.add(new Pair<>(VisionLanguageInput.class, DetectedObjects.class));
SUPPORTED_TYPES.add(new Pair<>(Input.class, Output.class));
}
/** {@inheritDoc} */
@Override
public Set<Pair<Type, Type>> getSupportedTypes() {
return SUPPORTED_TYPES;
}
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public <I, O> Translator<I, O> newInstance(
Class<I> input, Class<O> output, Model model, Map<String, ?> arguments)
throws TranslateException {
Path modelPath = model.getModelPath();
try {
HuggingFaceTokenizer tokenizer =
HuggingFaceTokenizer.builder(arguments)
.optTokenizerPath(modelPath)
.optManager(model.getNDManager())
.build();
ZeroShotObjectDetectionTranslator translator =
ZeroShotObjectDetectionTranslator.builder(tokenizer, arguments).build();
if (input == VisionLanguageInput.class && output == DetectedObjects.class) {
return (Translator<I, O>) translator;
} else if (input == Input.class && output == Output.class) {
return (Translator<I, O>) new ZeroShotObjectDetectionServingTranslator(translator);
}
throw new IllegalArgumentException("Unsupported input/output types.");
} catch (IOException e) {
throw new TranslateException("Failed to load tokenizer.", e);
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/translator/package-info.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains {@link ai.djl.translate.Translator} classes that leverage Huggingface tokenizers. */
package ai.djl.huggingface.translator;
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/zoo/HfModelZoo.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.zoo;
import ai.djl.Application;
import ai.djl.Application.CV;
import ai.djl.Application.NLP;
import ai.djl.engine.Engine;
import ai.djl.repository.RemoteRepository;
import ai.djl.repository.Repository;
import ai.djl.repository.Version;
import ai.djl.repository.VersionRange;
import ai.djl.repository.zoo.ModelLoader;
import ai.djl.repository.zoo.ModelZoo;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
/** HfModelZoo is a repository that contains HuggingFace models. */
public class HfModelZoo extends ModelZoo {
private static final Repository REPOSITORY = new RemoteRepository("Huggingface", DJL_REPO_URL);
private static final String GROUP_ID = "ai.djl.huggingface.pytorch";
private volatile boolean initialized; // NOPMD
HfModelZoo() {}
/** {@inheritDoc} */
@Override
public String getGroupId() {
return GROUP_ID;
}
/** {@inheritDoc} */
@Override
public Set<String> getSupportedEngines() {
return Collections.singleton("PyTorch");
}
/** {@inheritDoc} */
@Override
public Collection<ModelLoader> getModelLoaders() {
init();
return super.getModelLoaders();
}
/** {@inheritDoc} */
@Override
public ModelLoader getModelLoader(String name) {
init();
return super.getModelLoader(name);
}
private void init() {
if (!initialized) {
synchronized (HfModelZoo.class) {
if (!initialized) {
Version version = new Version(Engine.getDjlVersion());
addModels(CV.ZERO_SHOT_IMAGE_CLASSIFICATION, version);
addModels(CV.ZERO_SHOT_OBJECT_DETECTION, version);
addModels(NLP.FILL_MASK, version);
addModels(NLP.QUESTION_ANSWER, version);
addModels(NLP.TEXT_CLASSIFICATION, version);
addModels(NLP.TEXT_EMBEDDING, version);
addModels(NLP.TOKEN_CLASSIFICATION, version);
addModels(NLP.ZERO_SHOT_CLASSIFICATION, version);
initialized = true;
}
}
}
}
private void addModels(Application app, Version version) {
Map<String, Map<String, Object>> map = listModels(REPOSITORY, app);
for (Map.Entry<String, Map<String, Object>> entry : map.entrySet()) {
Map<String, Object> model = entry.getValue();
if ("failed".equals(model.get("result"))) {
continue;
}
String requires = (String) model.get("requires");
if (requires != null) {
// the model requires specific DJL version
VersionRange range = VersionRange.parse(requires);
if (!range.contains(version)) {
continue;
}
}
String artifactId = entry.getKey();
addModel(REPOSITORY.model(app, GROUP_ID, artifactId, "0.0.1"));
}
}
}
|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/zoo/HfZooProvider.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.huggingface.zoo;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooProvider;
/**
* An Huggingface model zoo provider implements the {@link ai.djl.repository.zoo.ZooProvider}
* interface.
*/
public class HfZooProvider implements ZooProvider {
/** {@inheritDoc} */
@Override
public ModelZoo getModelZoo() {
return new HfModelZoo();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.