index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/deployments/RpmDeployment.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.deployments;
import ai.konduit.serving.build.build.GradlePlugin;
import ai.konduit.serving.build.config.Deployment;
import ai.konduit.serving.build.config.DeploymentValidation;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import org.nd4j.shade.jackson.annotation.JsonProperty;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.*;
@Data
@Accessors(fluent = true)
@NoArgsConstructor
public class RpmDeployment implements Deployment {
public static final String DEFAULT_EXE_NAME = "konduit-serving-deployment.rpm";
public static final String PROP_OUTPUTDIR = "rpm.outputdir";
public static final String PROP_RPMNAME = "rpm.name";
private String outputDir;
private String rpmName;
private String version;
public RpmDeployment(String outputDir) {
this(outputDir, "ks", defaultVersion());
}
public RpmDeployment(@JsonProperty("outputDir") String outputDir, @JsonProperty("rpmName") String rpmName,
@JsonProperty("version") String version){
this.outputDir = outputDir;
this.rpmName = rpmName;
this.version = version;
}
private static String defaultVersion(){
long time = System.currentTimeMillis();
SimpleDateFormat sdf = new SimpleDateFormat("YYYYMMDD-HHmmss.SSS");
return sdf.format(new Date(time));
}
@Override
public List<String> propertyNames() {
return Arrays.asList(PROP_OUTPUTDIR, PROP_RPMNAME);
}
@Override
public Map<String, String> asProperties() {
Map<String,String> m = new LinkedHashMap<>();
m.put(PROP_OUTPUTDIR, outputDir);
m.put(PROP_RPMNAME, rpmName);
return m;
}
@Override
public void fromProperties(Map<String, String> props) {
outputDir = props.getOrDefault(PROP_OUTPUTDIR, outputDir);
rpmName = props.getOrDefault(PROP_RPMNAME, rpmName);
}
@Override
public DeploymentValidation validate() {
return null;
}
@Override
public String outputString() {
File outFile = new File(outputDir, rpmName);
StringBuilder sb = new StringBuilder();
sb.append("RPM location: ").append(outFile.getAbsolutePath()).append("\n");
String size;
if(outFile.exists()){
long bytes = outFile.length();
double bytesPerMB = 1024 * 1024;
double mb = bytes / bytesPerMB;
size = String.format("%.2f", mb) + " MB";
} else {
size = "<RPM not found>";
}
sb.append("RPM size: ").append(size);
return sb.toString();
}
@Override
public List<String> gradleImports() {
List<String> retVal = new ArrayList<>();
retVal.add("org.redline_rpm.header.Os");
retVal.add("org.redline_rpm.header.Architecture");
retVal.add("com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar");
return retVal;
}
@Override
public List<GradlePlugin> gradlePlugins() {
List<GradlePlugin> retVal = new ArrayList<>();
retVal.add(new GradlePlugin("nebula.ospackage", "8.3.0"));
retVal.add(new GradlePlugin("com.github.johnrengelman.shadow", "2.0.4"));
return retVal;
}
@Override
public List<String> gradleTaskNames() {
List<String> ret = new ArrayList<>();
ret.add("shadowJar");
ret.add("buildRpm");
ret.add("copyRpm");
return ret;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/deployments/TarDeployment.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.deployments;
import ai.konduit.serving.build.build.GradlePlugin;
import ai.konduit.serving.build.config.Deployment;
import ai.konduit.serving.build.config.DeploymentValidation;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import org.nd4j.shade.jackson.annotation.JsonProperty;
import java.io.File;
import java.util.*;
@Data
@Accessors(fluent = true)
@NoArgsConstructor
public class TarDeployment implements Deployment {
public static final String DEFAULT_ARCHIVE_NAME = "ks";
public static final String PROP_OUTPUTDIR = "tar.outputdir";
public static final String PROP_ARCHIVENAME = "tar.name";
private String outputDir;
private String archiveName;
private String version;
private List<String> files;
public TarDeployment(String outputDir) {
this(outputDir, "ks", Deployment.defaultVersion());
}
public TarDeployment(@JsonProperty("outputDir") String outputDir, @JsonProperty("rpmName") String imageName,
@JsonProperty("version") String version){
this.outputDir = outputDir;
this.archiveName = imageName;
this.version = version;
}
@Override
public List<String> propertyNames() {
return Arrays.asList(DEFAULT_ARCHIVE_NAME, PROP_OUTPUTDIR, PROP_ARCHIVENAME);
}
@Override
public Map<String, String> asProperties() {
Map<String,String> m = new LinkedHashMap<>();
m.put(PROP_OUTPUTDIR, outputDir);
m.put(PROP_ARCHIVENAME, archiveName);
return m;
}
@Override
public void fromProperties(Map<String, String> props) {
outputDir = props.getOrDefault(PROP_OUTPUTDIR, outputDir);
archiveName = props.getOrDefault(PROP_ARCHIVENAME, archiveName);
}
@Override
public DeploymentValidation validate() {
return null;
}
@Override
public String outputString() {
File outFile = new File(outputDir, archiveName);
StringBuilder sb = new StringBuilder();
sb.append("TAR location: ").append(outFile.getAbsolutePath()).append("\n");
String size;
if(outFile.exists()){
long bytes = outFile.length();
double bytesPerMB = 1024 * 1024;
double mb = bytes / bytesPerMB;
size = String.format("%.2f", mb) + " MB";
} else {
size = "<TAR not found>";
}
sb.append("TAR size: ").append(size);
return sb.toString();
}
@Override
public List<String> gradleImports() {
return Collections.singletonList("com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar");
}
@Override
public List<GradlePlugin> gradlePlugins() {
List<GradlePlugin> ret = new ArrayList<>();
ret.add(new GradlePlugin("distribution", ""));
ret.add(new GradlePlugin("com.github.johnrengelman.shadow", "2.0.4"));
return ret;
}
@Override
public List<String> gradleTaskNames() {
List<String> ret = new ArrayList<>();
ret.add("shadowJar");
ret.add("distTar");
ret.add("copyTar");
return ret;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/deployments/UberJarDeployment.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.deployments;
import ai.konduit.serving.build.build.GradlePlugin;
import ai.konduit.serving.build.config.Deployment;
import ai.konduit.serving.build.config.DeploymentValidation;
import ai.konduit.serving.build.config.SimpleDeploymentValidation;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import org.nd4j.shade.jackson.annotation.JsonProperty;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.*;
@Data
@Accessors(fluent = true)
@NoArgsConstructor
public class UberJarDeployment implements Deployment {
public static final String DEFAULT_GROUPID = "ai.konduit";
public static final String DEFAULT_ARTIFACT = "konduit-serving-uberjar";
public static final String DEFAULT_JAR_NAME = "konduit-serving-deployment.jar";
public static final String PROP_OUTPUTDIR = "jar.outputdir";
public static final String PROP_JARNAME = "jar.name";
public static final String PROP_GID = "jar.groupid";
public static final String PROP_AID = "jar.artifactid";
public static final String PROP_VER = "jar.version";
public static final String CLI_KEYS = "JAR deployment config keys: " + PROP_OUTPUTDIR + ", " + PROP_JARNAME + ","
+ PROP_GID + ", " + PROP_AID + ", " + PROP_VER;
private String outputDir;
private String jarName;
private String groupId;
private String artifactId;
private String version;
public UberJarDeployment(String outputDir){
this(outputDir, DEFAULT_JAR_NAME, DEFAULT_GROUPID, DEFAULT_ARTIFACT, defaultVersion());
}
public UberJarDeployment(@JsonProperty("outputDir") String outputDir, @JsonProperty("jarName") String jarName,
@JsonProperty("groupId") String groupId, @JsonProperty("artifactId") String artifactId,
@JsonProperty("version") String version){
this.outputDir = outputDir;
this.jarName = jarName;
this.groupId = groupId;
this.artifactId = artifactId;
this.version = version;
}
private static String defaultVersion(){
long time = System.currentTimeMillis();
SimpleDateFormat sdf = new SimpleDateFormat("YYYYMMDD-HHmmss.SSS");
return sdf.format(new Date(time));
}
@Override
public List<String> propertyNames() {
return Arrays.asList(PROP_OUTPUTDIR, PROP_JARNAME, PROP_GID, PROP_AID, PROP_VER);
}
@Override
public Map<String, String> asProperties() {
Map<String,String> m = new LinkedHashMap<>();
m.put(PROP_OUTPUTDIR, outputDir);
m.put(PROP_JARNAME, jarName);
m.put(PROP_GID, groupId);
m.put(PROP_AID, artifactId);
m.put(PROP_VER, version);
return m;
}
@Override
public void fromProperties(Map<String, String> p) {
outputDir = p.getOrDefault(PROP_OUTPUTDIR, outputDir);
jarName = p.getOrDefault(PROP_JARNAME, jarName);
groupId = p.getOrDefault(PROP_GID, groupId);
artifactId = p.getOrDefault(PROP_AID, artifactId);
version = p.getOrDefault(PROP_VER, version);
}
@Override
public DeploymentValidation validate() {
//TODO we need to validate the actual content - not that it's set. i.e., certain characters can't be used
// for groupid, artifacts, version, jar name, etc
if(outputDir == null || outputDir.isEmpty()){
return new SimpleDeploymentValidation("No output directory is set (property: " + PROP_OUTPUTDIR + ")");
}
return new SimpleDeploymentValidation();
}
@Override
public String outputString() {
File outFile = new File(outputDir, jarName);
StringBuilder sb = new StringBuilder();
sb.append("JAR location: ").append(outFile.getAbsolutePath()).append("\n");
String size;
String filename = "";
if(outFile.exists()){
long bytes = outFile.length();
double bytesPerMB = 1024 * 1024;
double mb = bytes / bytesPerMB;
size = String.format("%.2f", mb) + " MB";
filename = outFile.getName();
} else {
size = "<JAR not found>";
filename = "<jar file name>";
}
sb.append("JAR size: ").append(size).append("\n");
sb.append("JAR launch command: java -jar ").append(filename).append(" <serve|list|stop|inspect|logs>\n");
return sb.toString();
}
@Override
public List<String> gradleImports() {
return Collections.singletonList("com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar");
}
@Override
public List<GradlePlugin> gradlePlugins() {
return Collections.singletonList(new GradlePlugin("com.github.johnrengelman.shadow", "2.0.4"));
}
@Override
public List<String> gradleTaskNames() {
return Collections.singletonList("shadowJar");
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/steps/RunnerInfo.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.steps;
import ai.konduit.serving.build.config.Module;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.experimental.Accessors;
@Data
@AllArgsConstructor
@Accessors(fluent = true)
public class RunnerInfo {
private String runnerClass;
private Module module;
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/steps/StepId.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.steps;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.experimental.Accessors;
@AllArgsConstructor
@Data
@Accessors(fluent = true)
public class StepId {
private int number;
private String name;
private String jsonType;
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/util/ModuleUtils.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.util;
import ai.konduit.serving.build.config.Module;
import ai.konduit.serving.build.steps.RunnerInfo;
import ai.konduit.serving.build.steps.StepId;
import ai.konduit.serving.pipeline.util.ObjectMappers;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.nd4j.common.io.ClassPathResource;
import org.nd4j.shade.jackson.databind.JsonNode;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.*;
@Slf4j
public class ModuleUtils {
private ModuleUtils(){ }
public static Map<StepId, List<RunnerInfo>> runnersForFile(File f){
try {
JsonNode jsonConfiguration = readConfiguration(FileUtils.readFileToString(f, StandardCharsets.UTF_8));
if(jsonConfiguration == null) {
throw new IllegalStateException("Unable to parse string into a valid pipeline configuration from file: " + f.getAbsolutePath());
} else {
return runnersForJson(
(jsonConfiguration.has("pipeline") ?
jsonConfiguration.get("pipeline") :
jsonConfiguration)
.toString());
}
} catch (IOException e){
throw new RuntimeException("Error reading JSON/YAML from file: " + f.getAbsolutePath(), e);
}
}
/**
* Parse the given configuration yaml/json string to {@link JsonNode}.
*
* @param configurationString given configuration string. Can be a JSON/YAML string
* @return Read configuration to {@link JsonNode}. Returns null on failure.
*/
private static JsonNode readConfiguration(String configurationString) {
try {
return ObjectMappers.json().readTree(configurationString);
} catch (Exception jsonProcessingErrors) {
try {
return ObjectMappers.yaml().readTree(configurationString);
} catch (Exception yamlProcessingErrors) {
log.error("Given configuration: '{}' does not contain a valid JSON/YAML object", configurationString);
log.error("\n\nErrors while processing as a json string:", jsonProcessingErrors);
log.error("\n\nErrors while processing as a yaml string:", yamlProcessingErrors);
return null;
}
}
}
public static Map<StepId, List<RunnerInfo>> runnersForJson(String json){
//System.out.println(json);
Map<StepId, List<RunnerInfo>> out = new HashMap<>();
//TODO let's do this properly - this is a temporary hack for development/testing of other aspects
Map<String,Object> map;
try{
map = ObjectMappers.json().readValue(json, Map.class);
} catch (IOException e){
throw new RuntimeException(e);
}
Object stepsObj = map.get("steps");
int stepCount = 0;
if(stepsObj instanceof List){
List<Object> l = (List<Object>) stepsObj;
for(Object o : l){
if(o instanceof Map){
Map<String,Object> m = (Map<String,Object>)o;
String jsonType = (String) m.get("@type");
Module mod = moduleForJsonType(jsonType);
if(mod == null)
continue;
String runnerClass = null; //TODO
String name = ""; //TODO
StepId id = new StepId(stepCount, name, jsonType);
RunnerInfo ri = new RunnerInfo(runnerClass, mod);
out.put(id, Collections.singletonList(ri));
}
}
} else if(stepsObj instanceof Map) {
Map<String,Object> m = (Map<String,Object>)stepsObj;
for(Map.Entry<String,Object> e : m.entrySet()){
if(e.getValue() instanceof Map){
Map<String,Object> step = (Map<String,Object>)e.getValue();
if(step.containsKey("@type")){
String jsonType = (String) step.get("@type");
Module mod = moduleForJsonType(jsonType);
if(mod == null)
continue;
String runnerClass = null; //TODO
String name = ""; //TODO
StepId id = new StepId(stepCount, name, jsonType);
RunnerInfo ri = new RunnerInfo(runnerClass, mod);
out.put(id, Collections.singletonList(ri));
}
}
}
}
return out;
}
public static Module moduleForJsonType(String jsonType){
Map<String,List<RunnerInfo>> map = jsonNameToRunnerClass();
if(!map.containsKey(jsonType)){
log.warn("No JSON subtype known for: {} (safe to ignore for custom pipeline steps)", jsonType);
return null;
}
List<RunnerInfo> l = map.get(jsonType);
if(l == null || l.isEmpty()){
log.warn("Failed to determine runner for JSON type {} - class represents custom functionality, or missing @CanRun annotation on the runner?", jsonType);
return null;
}
if(l.size() > 1){
log.warn("More than 1 runner available for JSON type {} - returning first", jsonType);
}
if (l.get(0) == null) {
log.warn("Failed to determine runner for JSON type {} - class represents custom functionality, or missing @CanRun annotation on the runner?", jsonType);
return null;
}
return l.get(0).module();
}
public static Map<String,RunnerInfo> pipelineClassToRunnerClass(){
String s;
try {
File f = new ClassPathResource("META-INF/konduit-serving/PipelineStepRunner").getFile();
s = FileUtils.readFileToString(f, StandardCharsets.UTF_8);
} catch (IOException e){
throw new RuntimeException(e);
}
String[] lines = s.split("\n");
Map<String,RunnerInfo> out = new HashMap<>();
for(String line : lines){
String[] split = line.split(","); //Format: pipelineClass,runnerClass,module - i.e., "this type of pipeline step (in specified module) can be run by this type of runner"
RunnerInfo info = new RunnerInfo(split[1], Module.forName(split[2]));
out.put(split[0], info);
}
return out;
}
public static Map<String,List<RunnerInfo>> jsonNameToRunnerClass(){
String s;
try {
File f = new ClassPathResource("META-INF/konduit-serving/JsonNameMapping").getFile();
s = FileUtils.readFileToString(f, StandardCharsets.UTF_8);
} catch (IOException e){
throw new RuntimeException(e);
}
Map<String,RunnerInfo> c2Runner = pipelineClassToRunnerClass();
String[] lines = s.split("\n");
Map<String,List<RunnerInfo>> out = new HashMap<>();
for(String line : lines){
if(line.isEmpty())
continue;
String[] split = line.split(","); //Format: json_name,class_name,interface_name
RunnerInfo info = c2Runner.get(split[1]);
List<RunnerInfo> l = out.computeIfAbsent(split[0], k -> new ArrayList<>());
l.add(info);
}
return out;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/validation/ValidationFailure.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.validation;
import ai.konduit.serving.build.steps.StepId;
public interface ValidationFailure {
StepId step();
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/validation/ValidationResult.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.validation;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.experimental.Accessors;
import java.util.List;
@AllArgsConstructor
@Data
@Accessors(fluent = true)
public class ValidationResult {
private final List<ValidationFailure> failures;
public boolean ok(){
return failures.isEmpty();
}
public boolean failed(){
return !ok();
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/validation
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/validation/failures/NoAvailableRunnerFailure.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.validation.failures;
import ai.konduit.serving.build.steps.StepId;
import ai.konduit.serving.build.validation.ValidationFailure;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.experimental.Accessors;
@AllArgsConstructor
@Data
@Accessors(fluent = true)
public class NoAvailableRunnerFailure implements ValidationFailure {
private final StepId step;
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/CLIModuleInfo.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.cli;
import ai.konduit.serving.annotation.module.ModuleInfo;
@ModuleInfo("konduit-serving-cli")
public class CLIModuleInfo {
private CLIModuleInfo(){ }
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/KonduitServingCliJsonMapping.java
|
package ai.konduit.serving.cli;import ai.konduit.serving.pipeline.api.serde.JsonSubType;
import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping;
import ai.konduit.serving.pipeline.api.serde.JsonSubType;
import java.util.ArrayList;
import java.util.List;
//GENERATED CLASS DO NOT EDIT
public class KonduitServingCliJsonMapping implements JsonSubTypesMapping { @Override
public List<JsonSubType> getSubTypesMapping() {
List<JsonSubType> l = new ArrayList<>();
return l;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/KonduitServingLauncher.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.cli.launcher;
import ai.konduit.serving.build.cli.BuildCLI;
import ai.konduit.serving.cli.launcher.command.*;
import ai.konduit.serving.cli.launcher.command.build.extension.ProfileCommand;
import ai.konduit.serving.cli.launcher.command.build.extension.PythonPathsCommand;
import ai.konduit.serving.cli.launcher.command.build.extension.ServeBuildCommand;
import ai.konduit.serving.pipeline.settings.constants.Constants;
import ai.konduit.serving.pipeline.util.ObjectMappers;
import io.micrometer.core.instrument.MeterRegistry;
import io.vertx.core.Launcher;
import io.vertx.core.Vertx;
import io.vertx.core.VertxOptions;
import io.vertx.core.cli.annotations.Name;
import lombok.extern.slf4j.Slf4j;
import java.text.SimpleDateFormat;
import java.util.concurrent.TimeUnit;
/**
* Initializes the {@link VertxOptions} for deployment and use in a
* {@link Vertx} instance.
* The following other initialization also happens:
* {@code Vertx Working Directory} gets set (vertx.cwd) and {vertx.caseDirBase)
* (vertx.disableFileCPResolving) gets set to true
* (vertx.logger-delegate-factory-class-name) gets set to io.vertx.core.logging.SLF4JLogDelegateFactory
* The {@link MeterRegistry} field and associated prometheus configuration gets setup
* The {@link VertxOptions} event but options also get set
*/
@Slf4j
public class KonduitServingLauncher extends Launcher {
@Override
protected String getDefaultCommand() {
return "--help";
}
@Override
public void beforeStartingVertx(VertxOptions options) {
LauncherUtils.setCommonVertxProperties();
options.setMaxEventLoopExecuteTime(600);
options.setMaxEventLoopExecuteTimeUnit(TimeUnit.SECONDS);
}
public static void main(String[] args) {
new KonduitServingLauncher().exec(args);
}
public void exec(String[] args) {
ObjectMappers.json().setDateFormat(new SimpleDateFormat(Constants.DATE_FORMAT));
this.setMainCommands();
if(args.length > 0 && KonduitRunCommand.class.getAnnotation(Name.class).value().equals(args[0]))
this.register(KonduitRunCommand.class, KonduitRunCommand::new);
this.dispatch(args);
}
public void setMainCommands() {
this.unregister("bare")
.unregister("start")
.unregister("run")
.unregister("test")
.unregister("version")
//.register(JoinCommand.class, JoinCommand::new) // TODO: Uncomment this after implementation and testing
.register(ServeBuildCommand.class, ServeBuildCommand::new)
.register(ListCommand.class, ListCommand::new)
.register(StopCommand.class, StopCommand::new)
.register(MetricsCommand.class, MetricsCommand::new)
.register(VersionCommand.class, VersionCommand::new)
.register(ConfigCommand.class, ConfigCommand::new)
.register(InspectCommand.class, InspectCommand::new)
.register(LogsCommand.class, LogsCommand::new)
.register(ProfileCommand.class, ProfileCommand::new)
.register(BuildCLI.class, BuildCLI::new)
.register(PythonPathsCommand.class, PythonPathsCommand::new);
}
public String commandLinePrefix() {
return getCommandLinePrefix();
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/LauncherUtils.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.cli.launcher;
import ai.konduit.serving.pipeline.settings.DirectoryFetcher;
import ai.konduit.serving.pipeline.util.ObjectMappers;
import io.vertx.core.impl.launcher.commands.ExecUtils;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOCase;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.filefilter.RegexFileFilter;
import org.apache.commons.io.input.ReversedLinesFileReader;
import org.apache.commons.lang3.SystemUtils;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.Instant;
import java.util.*;
import static io.vertx.core.file.impl.FileResolver.CACHE_DIR_BASE_PROP_NAME;
import static java.lang.System.setProperty;
/**
* Common utility class for {@link KonduitServingLauncher} and its corresponding commands.
*/
@Slf4j
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public class LauncherUtils {
public static final int SECONDS_IN_DAY = 86400;
//standardize time zone
public final static DateFormat dateFormat = new SimpleDateFormat( "EEE MMM dd HH:mm:ss z yyyy", new Locale("en"));
static {
dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
}
/**
* This sets some of the common properties for vertx and logs. This will set the working directory
* for vertx and channels the vertx related logs to the logback configuration that konduit-serving
* utilizes.
*/
public static void setCommonVertxProperties() {
setProperty("vertx.cwd", DirectoryFetcher.getVertxDir().getAbsolutePath()); // For setting the vertx working directory for runtime files.
setProperty(CACHE_DIR_BASE_PROP_NAME, DirectoryFetcher.getVertxDir().getAbsolutePath()); // For setting caching directory for vertx related optimizations.
}
/**
* Gets the process id of the konduit server process given its application id.
* @param serverId application id of the konduit server application.
* @return process id of the konduit server process.
*/
public static int getPidFromServerId(String serverId) {
List<String> cmd = new ArrayList<>();
try {
if (ExecUtils.isWindows()) {
cmd.add("WMIC");
cmd.add("PROCESS");
cmd.add("WHERE");
cmd.add("\"CommandLine like '%serving.id=" + serverId + "' and name!='wmic.exe'\"");
cmd.add("GET");
cmd.add("CommandLine,ProcessId");
} else {
cmd.add("sh");
cmd.add("-c");
cmd.add("ps axww | grep \"serving.id=" + serverId + "$\"");
}
return Integer.parseInt(extractPidFromLine(IOUtils.toString(new InputStreamReader(
new ProcessBuilder(cmd).start().getInputStream())).replace(System.lineSeparator(), "")));
} catch (Exception exception) {
log.error("Failed to fetch pid from server id", exception);
System.exit(1);
return -1;
}
}
/**
* Parses the command line that was used to start the konduit server and extracts the
* application id (name) of the server.
* @param line command line that was used to start the konduit server.
* @return application id of the konduit server.
*/
public static String extractPidFromLine(String line) {
String[] splits = line.trim().split(" ");
if(ExecUtils.isWindows()) {
return splits[splits.length -1].trim();
} else {
return splits[0].trim();
}
}
/**
* Checks if there is a konduit server running with the given application id.
* @param applicationId application id of the konduit server.
* @return true if the server process exists, false otherwise.
*/
public static boolean isProcessExists(String applicationId) {
List<String> args;
if(SystemUtils.IS_OS_WINDOWS) {
args = Arrays.asList("WMIC", "PROCESS", "WHERE", "\"CommandLine like '%serving.id=" + applicationId + "' and name!='wmic.exe'\"", "GET", "CommandLine", "/VALUE");
} else {
args = Arrays.asList("sh", "-c", "ps axww | grep \"Dserving.id=" + applicationId + "$\"");
}
String output = "";
try {
Process process = new ProcessBuilder(args).start();
output = IOUtils.toString(process.getInputStream(), StandardCharsets.UTF_8);
} catch (Exception exception) {
log.error("An error occurred while checking for existing processes:", exception);
System.exit(1);
}
return output.trim().endsWith("Dserving.id=" + applicationId);
}
/**
* Reads the last n lines from a file
* @param file file where the data to be read is.
* @param numOfLastLinesToRead the number of last lines to read
* @return read lines
*/
public static String readLastLines(File file, int numOfLastLinesToRead) throws IOException {
List<String> result = new ArrayList<>();
try (ReversedLinesFileReader reader = new ReversedLinesFileReader(file, StandardCharsets.UTF_8)) {
String line;
while ((line = reader.readLine()) != null && result.size() < numOfLastLinesToRead) {
result.add(line);
}
} catch (IOException e) {
log.error("Error while reading log file", e);
throw e;
}
Collections.reverse(result);
return String.join(System.lineSeparator(), result);
}
/**
* Cleans up the server data files daily.
*/
public static void cleanServerDataFilesOnceADay() {
Date timeNow = Date.from(Instant.now());
File lastCheckedFile = new File(DirectoryFetcher.getServersDataDir(), "lastChecked");
Date lastChecked = timeNow;
boolean firstTime = false;
if(lastCheckedFile.exists()) {
try {
lastChecked = dateFormat.parse(FileUtils.readFileToString(lastCheckedFile, StandardCharsets.UTF_8).trim());
} catch (IOException | ParseException exception) {
try {
FileUtils.forceDelete(lastCheckedFile);
} catch (IOException e) {
log.error("Ran in to issue with parsing log file.", exception);
log.error("Failed to delete file ",e);
}
log.error("Unable to identify last server data file cleanup check", exception);
return; // Stop cleaning up
}
} else {
firstTime = true;
}
if(timeNow.toInstant().getEpochSecond() - lastChecked.toInstant().getEpochSecond() > SECONDS_IN_DAY || firstTime) {
cleanServerDataFiles();
}
try {
FileUtils.writeStringToFile(lastCheckedFile, dateFormat.format(timeNow), StandardCharsets.UTF_8);
} catch (IOException exception) {
log.error("Unable to set last checked clean up time at: {}", lastCheckedFile.getAbsolutePath(), exception);
}
}
/**
* Cleans extra server files with the name of <pid>.data at {@link DirectoryFetcher#getServersDataDir} which
* doesn't have a process associated with it.
*/
public static void cleanServerDataFiles() {
for(File file : FileUtils.listFiles(DirectoryFetcher.getServersDataDir(), new RegexFileFilter("\\d+.data",
IOCase.INSENSITIVE), null)) {
String pid = file.getName().split("\\.")[0];
boolean deleting = false;
try {
if (!LauncherUtils.isKonduitServer(pid)) {
deleting = true;
FileUtils.forceDelete(file);
}
} catch (IOException exception) {
if(deleting) {
log.error("Unable to delete server data file at: {}", file.getAbsolutePath(), exception);
} else {
log.error("Unable to identify a konduit serving process on the given id: {}", pid, exception);
}
}
}
}
/**
* Check if the process identified by the given pid is a konduit serving process
* @param pid Process pid
* @return true if it's a konduit serving process otherwise false
*/
private static boolean isKonduitServer(String pid) throws IOException {
List<String> args;
if(SystemUtils.IS_OS_WINDOWS) {
args = Arrays.asList("WMIC", "PROCESS", "WHERE", "ProcessId=" + pid, "GET", "CommandLine", "/VALUE");
} else {
args = Arrays.asList("sh", "-c", "ps axww | grep \"^\\s*" + pid + "\\s\"");
}
Process process = new ProcessBuilder(args).start();
String output = IOUtils.toString(process.getInputStream(), StandardCharsets.UTF_8);
return output.contains("Dserving.id=");
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/command/ConfigCommand.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.cli.launcher.command;
import ai.konduit.serving.pipeline.api.pipeline.Pipeline;
import ai.konduit.serving.pipeline.api.step.PipelineStep;
import ai.konduit.serving.pipeline.impl.pipeline.GraphPipeline;
import ai.konduit.serving.pipeline.impl.pipeline.SequencePipeline;
import ai.konduit.serving.pipeline.impl.pipeline.graph.GraphBuilder;
import ai.konduit.serving.pipeline.impl.pipeline.graph.GraphStep;
import ai.konduit.serving.pipeline.impl.pipeline.graph.switchfn.DataIntSwitchFn;
import ai.konduit.serving.pipeline.impl.pipeline.graph.switchfn.DataStringSwitchFn;
import ai.konduit.serving.pipeline.impl.step.logging.LoggingStep;
import ai.konduit.serving.pipeline.impl.step.ml.classifier.ClassifierOutputStep;
import ai.konduit.serving.pipeline.impl.step.ml.ssd.SSDToBoundingBoxStep;
import ai.konduit.serving.vertx.config.InferenceConfiguration;
import ai.konduit.serving.vertx.config.ServerProtocol;
import io.vertx.core.cli.annotations.Description;
import io.vertx.core.cli.annotations.Name;
import io.vertx.core.cli.annotations.Option;
import io.vertx.core.cli.annotations.Summary;
import io.vertx.core.json.JsonObject;
import io.vertx.core.spi.launcher.DefaultCommand;
import org.apache.commons.io.FileUtils;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
@Name("config")
@Summary("A helper command for creating boiler plate json/yaml for inference configuration")
@Description("This command is a utility to create boilerplate json/yaml configurations that can be conveniently modified to start konduit servers.\n\n" +
"Example usages:\n" +
"--------------\n" +
" -- FOR SEQUENCE PIPELINES--\n" +
"- Prints 'logging -> tensorflow -> logging' config in pretty format:\n" +
"$ konduit config -p logging,tensorflow,logging\n\n" +
"- Prints 'logging -> tensorflow -> logging' config with gRPC protocol\n" +
" in pretty format:\n" +
"$ konduit config -p logging,tensorflow,logging -pr grpc\n\n" +
"- Prints 'dl4j -> logging' config in minified format:\n" +
"$ konduit config -p dl4j,logging -m\n\n" +
"- Saves 'dl4j -> logging' config in a 'config.json' file:\n" +
"$ konduit config -p dl4j,logging -o config.json\n\n" +
"- Saves 'dl4j -> logging' config in a 'config.yaml' file:\n" +
"$ konduit config -p dl4j,logging -y -o config.json\n" +
"\n\n -- FOR GRAPH PIPELINES --\n" +
"- Generates a config that logs the input(1) then flow them through two \n" +
" tensorflow models(2,3) and merges the output(4):\n" +
"$ konduit config -p 1=logging(input),2=tensorflow(1),3=tensorflow(1),4=merge(2,3)\n\n" +
"- Generates a config that logs the input(1) then channels(2) them through one\n" +
" of the two tensorflow models(3,4) and then selects the output(5) based\n" +
" on the value of the selection integer field 'select'\n" +
"$ konduit config -p 1=logging(input),[2_1,2_2]=switch(int,select,1),3=tensorflow(2_1),4=tensorflow(2_2),5=any(3,4)\n\n" +
"- Generates a config that logs the input(1) then channels(2) them through one\n" +
" of the two tensorflow models(3,4) and then selects the output(5) based\n" +
" on the value of the selection string field 'select' in the selection map \n" +
" (x:0,y:1).\n" +
"$ konduit config -p 1=logging(input),[2_1,2_2]=switch(string,select,x:0,y:1,1),3=tensorflow(2_1),4=tensorflow(2_2),5=any(3,4)\n" +
"--------------")
public class ConfigCommand extends DefaultCommand {
protected static final Pattern STEP_PATTERN = Pattern.compile(",?(.+?)=([^,]+?)\\(([^)]+?)\\)");
protected static final Pattern NAME_PATTERN = Pattern.compile("([^,]+)");
protected static final Pattern SWITCH_WHOLE_OUTPUT_PATTERN = Pattern.compile("\\[(.+)]");
protected static final Pattern SWITCH_INPUTS_PATTERN = Pattern.compile("(int|string),([^,]+),(.+)");
protected static final Pattern SWITCH_MAP_PATTERN = Pattern.compile("([^,]+):([0-9]+)?");
private enum PipelineStepType {
CROP_GRID,
CROP_FIXED_GRID,
DL4J,
KERAS,
DRAW_BOUNDING_BOX,
DRAW_FIXED_GRID,
DRAW_GRID,
DRAW_SEGMENTATION,
EXTRACT_BOUNDING_BOX,
CAMERA_FRAME_CAPTURE,
VIDEO_FRAME_CAPTURE,
IMAGE_TO_NDARRAY,
LOGGING,
SSD_TO_BOUNDING_BOX,
SAMEDIFF,
SHOW_IMAGE,
TENSORFLOW,
ND4JTENSORFLOW,
PYTHON,
ONNX,
CLASSIFIER_OUTPUT
}
private enum GraphStepType {
SWITCH,
MERGE,
ANY
}
private static final List<String> reservedKeywords;
static {
reservedKeywords = Arrays.stream(PipelineStepType.values()).map(Enum::name).collect(Collectors.toList());
reservedKeywords.addAll(Arrays.stream(GraphStepType.values()).map(Enum::name).collect(Collectors.toList()));
reservedKeywords.add("INPUT");
}
private enum SwitchType {
INT,
STRING
}
Map<String, GraphStep> graphStepsGlobalMap = new HashMap<>();
private ServerProtocol protocol = ServerProtocol.HTTP;
private String pipelineString;
private boolean minified;
private boolean yaml;
private File outputFile;
@Option(longName = "pipeline", shortName = "p", argName = "config", required = true)
@Description("A comma-separated list of sequence/graph pipeline steps to create boilerplate configuration from. " +
"For sequences, allowed values are: " +
"[crop_grid, crop_fixed_grid, dl4j, keras, draw_bounding_box, draw_fixed_grid, draw_grid, " +
"draw_segmentation, extract_bounding_box, camera_frame_capture, video_frame_capture, " +
"image_to_ndarray, logging, ssd_to_bounding_box, samediff, show_image, tensorflow, " +
"nd4jtensorflow, python, onnx, classifier_output]. " +
"For graphs, the list item should be in the format '<output>=<type>(<inputs>)' or " +
"'[outputs]=switch(<inputs>)' for switches. The pre-defined root input is named, 'input'. " +
"Examples are ==> " +
"Pipeline step: 'a=tensorflow(input),b=dl4j(input)' " +
"Merge Step: 'c=merge(a,b)' " +
"Switch Step (int): '[d1,d2,d3]=switch(int,select,input)' " +
"Switch Step (string): '[d1,d2,d3]=switch(string,select,x:1,y:2,z:3,input)'" +
"Any Step: 'e=any(d1,d2,d3)' " +
"See the examples above for more usage information.")
public void setPipeline(String pipelineString) {
this.pipelineString = pipelineString;
}
@Option(longName = "minified", shortName = "m", flag = true)
@Description("If set, the output json will be printed in a single line, without indentations. (Ignored for yaml configuration output)")
public void setMinified(boolean minified) {
this.minified = minified;
}
@Option(longName = "yaml", shortName = "y", flag = true)
@Description("Set if you want the output to be a yaml configuration.")
public void setYaml(boolean yaml) { this.yaml = yaml; }
@Option(longName = "protocol", shortName = "pr")
@Description("Protocol to use with the server. Allowed values are [http, grpc, mqtt]")
public void setYaml(String protocol) {
try {
this.protocol = ServerProtocol.valueOf(protocol.toUpperCase());
} catch (Exception exception) {
System.out.format("Protocol can only be one of %s. Given %s%n",
Arrays.toString(ServerProtocol.values()), protocol);
exception.printStackTrace();
System.exit(1);
}
}
@Option(longName = "output", shortName = "o", argName = "output-file")
@Description("Optional: If set, the generated json/yaml will be saved here. Otherwise, it's printed on the console.")
public void setOutputFile(String output) {
outputFile = new File(output);
if(outputFile.exists()) {
if(!outputFile.isFile()) {
System.out.format("'%s' is not a valid file location%n", outputFile);
System.exit(1);
}
} else {
try {
if(!outputFile.createNewFile()) {
System.out.format("'%s' is not a valid file location%n", outputFile);
System.exit(1);
}
} catch (Exception exception) {
System.out.format("Error while creating file: '%s'%n", outputFile);
exception.printStackTrace();
System.exit(1);
}
}
}
public Pipeline pipelineFromString(String pipelineString) {
Pipeline pipeline;
if(pipelineString.contains("=")) {
pipeline = getGraph(pipelineString);
} else {
pipeline = getSequence(pipelineString);
}
return pipeline;
}
@Override
public void run() {
Pipeline pipeline;
if(pipelineString.contains("=")) {
pipeline = getGraph(pipelineString);
} else {
pipeline = getSequence(pipelineString);
}
InferenceConfiguration inferenceConfiguration =
new InferenceConfiguration()
.protocol(protocol)
.pipeline(pipeline);
if(yaml) {
printOrSave(inferenceConfiguration.toYaml());
} else {
JsonObject output = new JsonObject(inferenceConfiguration.toJson());
if (minified) {
printOrSave(output.encode());
} else {
printOrSave(output.encodePrettily());
}
}
}
private SequencePipeline getSequence(String pipelineString) {
SequencePipeline.Builder builder = SequencePipeline.builder();
for(String stepType : pipelineString.split(",")) {
builder.add(getPipelineStep(stepType));
}
return builder.build();
}
private GraphPipeline getGraph(String pipelineString) {
GraphBuilder builder = new GraphBuilder();
graphStepsGlobalMap.put("input", builder.input());
Matcher stepMatcher = STEP_PATTERN.matcher(pipelineString);
String lastOutputName = null;
int stepIndex = 0;
while (stepMatcher.find()) {
stepIndex++;
String outputs = stepMatcher.group(1);
String type = stepMatcher.group(2);
String inputs = stepMatcher.group(3);
if (type.equalsIgnoreCase(GraphStepType.SWITCH.name())) {
Matcher switchWholeOutputMatcher = SWITCH_WHOLE_OUTPUT_PATTERN.matcher(outputs);
if(switchWholeOutputMatcher.find()) {
Matcher switchOutputsMatcher = NAME_PATTERN.matcher(switchWholeOutputMatcher.group(1));
List<String> switchOutputs = new ArrayList<>();
while (switchOutputsMatcher.find()) {
String switchOutputName = switchOutputsMatcher.group(1);
if(reservedKeywords.contains(switchOutputName)) {
out.format("Output name '%s' should be other than one of the reserved keywords: %s%n", switchOutputName, reservedKeywords);
System.exit(1);
} else {
switchOutputs.add(switchOutputsMatcher.group(1));
}
}
if(switchOutputs.size() < 2) {
out.format("Switch outputs (%s) should be more than 1%n", switchOutputs.size());
System.exit(1);
}
Matcher switchInputsMatcher = SWITCH_INPUTS_PATTERN.matcher(inputs);
if(switchInputsMatcher.find()) {
String switchType = switchInputsMatcher.group(1);
String selectField = switchInputsMatcher.group(2);
String otherSwitchInputs = switchInputsMatcher.group(3);
if(switchType.equalsIgnoreCase(SwitchType.INT.name())) {
String switchName = String.format("%s_switch_%s", otherSwitchInputs, UUID.randomUUID().toString().substring(0, 8));
if(graphStepsGlobalMap.containsKey(otherSwitchInputs)) {
GraphStep[] switchOutputSteps = builder.switchOp(switchName, new DataIntSwitchFn(switchOutputs.size(), selectField), graphStepsGlobalMap.get(otherSwitchInputs));
for(int i = 0; i < switchOutputs.size(); i++) {
switchOutputSteps[i].name(switchOutputs.get(i));
if(graphStepsGlobalMap.containsKey(switchOutputSteps[i].name())) {
out.format("Output '%s' is already defined in a previous step from the current step %s%n",
switchOutputSteps[i].name(), stepIndex);
System.exit(1);
}
graphStepsGlobalMap.put(switchOutputSteps[i].name(), switchOutputSteps[i]);
lastOutputName = switchOutputs.get(i);
}
} else {
out.format("Undefined input name '%s' for switch step '%s' at step %s. Make sure that the input name '%s' is defined in a previous step%n",
otherSwitchInputs, stepMatcher.group(), stepIndex, otherSwitchInputs);
System.exit(1);
}
} else {
int lastIndexOfComma = otherSwitchInputs.lastIndexOf(',');
String inputName = otherSwitchInputs.substring(lastIndexOfComma + 1);
if(!graphStepsGlobalMap.containsKey(inputName)) {
out.format("Undefined input name '%s' for switch step '%s' at step %s. Make sure that the input name '%s' is defined in a previous step%n",
inputName, stepMatcher.group(), stepIndex, inputName);
System.exit(1);
}
String mapInput = otherSwitchInputs.substring(0, lastIndexOfComma);
Matcher switchMapMatcher = SWITCH_MAP_PATTERN.matcher(mapInput);
Map<String, Integer> switchMap = new HashMap<>();
while (switchMapMatcher.find()) {
String key = switchMapMatcher.group(1);
if(switchMap.containsKey(key)) {
out.format("Switch map key '%s' is already defined%n", key);
System.exit(1);
}
int channel = Integer.parseInt(switchMapMatcher.group(2));
if(channel > switchOutputs.size() - 1) {
out.format("The switch channel (%s) in the switch map should not be greater " +
"than the number of switch outputs minus one (%s)%n", channel, switchOutputs.size() - 1);
System.exit(1);
} else {
switchMap.put(key, channel);
}
}
if(switchMap.size() != switchOutputs.size()) {
out.format("Switch map size (%s) should be equal to switch outputs size (%s)%n",
switchMap.size(), switchOutputs.size());
System.exit(1);
}
String switchName = String.format("%s_switch_%s", inputName, UUID.randomUUID().toString().substring(0, 8));
GraphStep[] switchOutputSteps = builder.switchOp(switchName, new DataStringSwitchFn(switchOutputs.size(), selectField, switchMap), graphStepsGlobalMap.get(inputName));
for(int i = 0; i < switchOutputs.size(); i++) {
switchOutputSteps[i].name(switchOutputs.get(i));
if(graphStepsGlobalMap.containsKey(switchOutputSteps[i].name())) {
out.format("Output '%s' is already defined in a previous step from the current step %s%n",
switchOutputSteps[i].name(), stepIndex);
System.exit(1);
}
graphStepsGlobalMap.put(switchOutputSteps[i].name(), switchOutputSteps[i]);
lastOutputName = switchOutputSteps[i].name();
}
}
} else {
out.format("Invalid switch input pattern '%s' at step %s. The format should be int,<select_field>,<input_name> " +
"or string,<select_field>,<map_keys_and_values>,<input_name>. " +
"Where 'map_keys_and_values' should be in the form of '<key1>:<switch1_number>,<key2>:<switch2_number>,...'.%n" +
"Examples are:%n" +
"---------------------------------%n" +
"01. int,select,input%n" +
"02. string,select,x:0,y:1,input%n" +
"---------------------------------%n",
inputs, stepIndex);
System.exit(1);
}
} else {
out.format("Invalid switch output pattern '%s' at step %s. Should be a comma-separated list of output names. For example: [s1,s2,...]%n", outputs, stepIndex);
System.exit(1);
}
} else if(type.equalsIgnoreCase(GraphStepType.ANY.name()) || type.equalsIgnoreCase(GraphStepType.MERGE.name())) {
Matcher inputsMatcher = NAME_PATTERN.matcher(inputs);
List<GraphStep> inputGraphSteps = new ArrayList<>();
while (inputsMatcher.find()) {
String inputName = inputsMatcher.group(1);
if(graphStepsGlobalMap.containsKey(inputName)) {
inputGraphSteps.add(graphStepsGlobalMap.get(inputName));
} else {
out.format("Undefined input name '%s' for '%s' step '%s' at step %s. Make sure that the input name '%s' is defined in a previous step%n",
inputName, type.toLowerCase(), stepMatcher.group(), stepIndex, inputName);
System.exit(1);
}
}
if(inputGraphSteps.size() < 2) {
out.format("Number of inputs for '%s' step should be more than 1%n", type.toLowerCase());
System.exit(1);
}
if(type.equalsIgnoreCase(GraphStepType.ANY.name())) {
GraphStep anyOutput = builder.any(outputs, inputGraphSteps.toArray(new GraphStep[inputGraphSteps.size()]));
if(graphStepsGlobalMap.containsKey(anyOutput.name())) {
out.format("Output '%s' is already defined in a previous step from the current step %s%n",
anyOutput.name(), stepIndex);
System.exit(1);
}
graphStepsGlobalMap.put(anyOutput.name(), anyOutput);
lastOutputName = anyOutput.name();
} else {
GraphStep mergeOutput = inputGraphSteps.get(0).mergeWith(outputs, inputGraphSteps.subList(1, inputGraphSteps.size()).toArray(new GraphStep[inputGraphSteps.size() - 1]));
if(graphStepsGlobalMap.containsKey(mergeOutput.name())) {
out.format("Output '%s' is already defined in a previous step from the current step %s%n",
mergeOutput.name(), stepIndex);
System.exit(1);
}
graphStepsGlobalMap.put(mergeOutput.name(), mergeOutput);
lastOutputName = mergeOutput.name();
}
} else {
if(type.equalsIgnoreCase("input")) {
out.format("The step type cannot be 'input'. Should be either one of the pipeline step types %sor the graph step types %s%n",
Arrays.toString(PipelineStepType.values()), Arrays.toString(GraphStepType.values()));
System.exit(1);
}
if(outputs.contains(",")) {
out.format("Number of outputs (%s) in step %s can only be 1%n", outputs.split(",").length, stepIndex);
System.exit(1);
}
if(inputs.contains(",")) {
out.format("Number of inputs (%s) in step %s can only be 1%n", outputs.split(",").length, stepIndex);
System.exit(1);
}
if(reservedKeywords.contains(outputs)) {
out.format("Output name '%s' should be other than one of the reserved keywords: %s%n", outputs, reservedKeywords);
System.exit(1);
} else {
if(graphStepsGlobalMap.containsKey(inputs)) {
if(Arrays.stream(PipelineStepType.values()).map(Enum::name).collect(Collectors.toList()).contains(type.toUpperCase())) {
if(graphStepsGlobalMap.containsKey(outputs)) {
out.format("Output '%s' is already defined in a previous step from the current step %s%n",
outputs, stepIndex);
System.exit(1);
}
graphStepsGlobalMap.put(outputs, graphStepsGlobalMap.get(inputs).then(outputs, getPipelineStep(type)));
lastOutputName = outputs;
} else {
out.format("Invalid step type '%s'. Should be either one of the pipeline step types %sor the graph step types %s%n",
type, Arrays.toString(PipelineStepType.values()), Arrays.toString(GraphStepType.values()));
System.exit(1);
}
} else {
out.format("Undefined input name '%s' for %s step '%s' at step %s. Make sure that the input name '%s' is defined in a previous step%n",
inputs, type.toLowerCase(), stepMatcher.group(), stepIndex, inputs);
System.exit(1);
}
}
}
}
if (lastOutputName == null) {
out.format("Invalid graph pipeline format %s. Should be a comma-separated list of the format: " +
"'<output>=<type>(<inputs>)' or '[outputs]=switch(<inputs>)' for switches%n", pipelineString);
System.exit(1);
}
return builder.build(graphStepsGlobalMap.get(lastOutputName));
}
private PipelineStep getPipelineStep(String type) {
String moduleName = null;
Class<?> clazz;
try {
switch (PipelineStepType.valueOf(type.toUpperCase())) {
case CROP_GRID:
moduleName = "konduit-serving-image";
clazz = Class.forName("ai.konduit.serving.data.image.step.grid.crop.CropGridStep");
return (PipelineStep) clazz
.getConstructor(String.class, String.class, int.class, int.class,
boolean.class, String.class, boolean.class, boolean.class, Double.class, String.class)
.newInstance("image1", "topLeft,topRight,bottomLeft,bottomRight", 10, 10, true, "box", false, false, 1.33,
(String) clazz.getField("DEFAULT_OUTPUT_NAME").get(null));
case CROP_FIXED_GRID:
moduleName = "konduit-serving-image";
clazz = Class.forName("ai.konduit.serving.data.image.step.grid.crop.CropFixedGridStep");
Class<?> pointClazz1 = Class.forName("ai.konduit.serving.pipeline.impl.data.point");
Constructor<?> pointConstructor1 = pointClazz1.getConstructor(double[].class, String.class, Double.class);
List<?> pointList1 = Arrays.asList(
pointConstructor1.newInstance(new double[] {0, 1}, "label", 60.0),
pointConstructor1.newInstance(new double[] {1, 1}, "label", 60.0),
pointConstructor1.newInstance(new double[] {0, 0}, "label", 60.0),
pointConstructor1.newInstance(new double[] {1, 0}, "label", 60.0)
);
return (PipelineStep) clazz
.getConstructor(String.class, List.class, int.class, int.class,
boolean.class, String.class, boolean.class, boolean.class, Double.class, String.class)
.newInstance("image2", pointList1, 100, 100, true, "box",
false, false, 1.33, "crop");
case DL4J:
moduleName = "konduit-serving-deeplearning4j";
clazz = Class.forName("ai.konduit.serving.models.deeplearning4j.step.DL4JStep");
return (PipelineStep) clazz
.getConstructor(String.class, List.class, List.class)
.newInstance("<path_to_model>", Arrays.asList("1", "2"), Arrays.asList("11", "22"));
case KERAS:
moduleName = "konduit-serving-deeplearning4j";
clazz = Class.forName("ai.konduit.serving.models.deeplearning4j.step.keras.KerasStep");
return (PipelineStep) clazz
.getConstructor(String.class, List.class, List.class)
.newInstance("<path_to_model>", Arrays.asList("1", "2"), Arrays.asList("11", "22"));
case DRAW_BOUNDING_BOX:
moduleName = "konduit-serving-image";
clazz = Class.forName("ai.konduit.serving.data.image.step.bb.draw.DrawBoundingBoxStep");
Class<?> scaleClass = Class.forName("ai.konduit.serving.data.image.step.bb.draw.DrawBoundingBoxStep$Scale");
Class<?> imageToNDArrayConfigClass = Class.forName("ai.konduit.serving.data.image.convert.ImageToNDArrayConfig");
Object imageToNDArrayConfigObject = imageToNDArrayConfigClass.getConstructor().newInstance();
imageToNDArrayConfigObject.getClass().getMethod("height", Integer.class).invoke(imageToNDArrayConfigObject, 100);
imageToNDArrayConfigObject.getClass().getMethod("width", Integer.class).invoke(imageToNDArrayConfigObject, 100);
return (PipelineStep) clazz
.getConstructor(String.class, String.class, boolean.class, boolean.class, Map.class,
String.class, int.class, scaleClass, int.class, int.class,
imageToNDArrayConfigClass, boolean.class, String.class)
.newInstance("image3", "box", false, false, new HashMap<>(), "blue", 1,
scaleClass.getField("NONE").get(null), 10, 10,
imageToNDArrayConfigObject, false, "red");
case DRAW_FIXED_GRID:
moduleName = "konduit-serving-image";
clazz = Class.forName("ai.konduit.serving.data.image.step.grid.draw.DrawFixedGridStep");
Class<?> pointClazz2 = Class.forName("ai.konduit.serving.pipeline.impl.data.point");
Constructor<?> pointConstructor2 = pointClazz2.getConstructor(double[].class, String.class, Double.class);
List<?> pointList2 = Arrays.asList(
pointConstructor2.newInstance(new double[] {0, 1}, "label", 60.0),
pointConstructor2.newInstance(new double[] {1, 1}, "label", 60.0),
pointConstructor2.newInstance(new double[] {0, 0}, "label", 60.0),
pointConstructor2.newInstance(new double[] {1, 0}, "label", 60.0)
);
return (PipelineStep) clazz
.getConstructor(String.class, List.class, int.class, int.class,
boolean.class, String.class, String.class, int.class, Integer.class)
.newInstance("image4", pointList2, 10, 10, true, "blue", "red", 1, 1);
case DRAW_GRID:
moduleName = "konduit-serving-image";
clazz = Class.forName("ai.konduit.serving.data.image.step.grid.draw.DrawGridStep");
return (PipelineStep) clazz
.getConstructor(String.class, String.class, int.class, int.class,
boolean.class, String.class, String.class, int.class, Integer.class)
.newInstance("image1", "topLeft,topRight,bottomLeft,bottomRight", 10, 10, true, "blue", "red", 1, 1);
case DRAW_SEGMENTATION:
moduleName = "konduit-serving-image";
clazz = Class.forName("ai.konduit.serving.data.image.step.segmentation.index.DrawSegmentationStep");
Class<?> imageToNDArrayConfigClass1 = Class.forName("ai.konduit.serving.data.image.convert.ImageToNDArrayConfig");
Object imageToNDArrayConfigObject1 = imageToNDArrayConfigClass1.getConstructor().newInstance();
imageToNDArrayConfigObject1.getClass().getMethod("height", Integer.class).invoke(imageToNDArrayConfigObject1, 100);
imageToNDArrayConfigObject1.getClass().getMethod("width", Integer.class).invoke(imageToNDArrayConfigObject1, 100);
return (PipelineStep) clazz
.getConstructor(List.class, String.class, String.class, String.class, Double.class,
Integer.class, imageToNDArrayConfigClass1)
.newInstance(Arrays.asList("red", "blue"), "[]", "image5", "image6", 0.5, 1,
imageToNDArrayConfigObject1);
case EXTRACT_BOUNDING_BOX:
moduleName = "konduit-serving-image";
clazz = Class.forName("ai.konduit.serving.data.image.step.bb.extract.ExtractBoundingBoxStep");
Class<?> imageToNDArrayConfigClass2 = Class.forName("ai.konduit.serving.data.image.convert.ImageToNDArrayConfig");
Object imageToNDArrayConfigObject2 = imageToNDArrayConfigClass2.getConstructor().newInstance();
imageToNDArrayConfigObject2.getClass().getMethod("height", Integer.class).invoke(imageToNDArrayConfigObject2, 100);
imageToNDArrayConfigObject2.getClass().getMethod("width", Integer.class).invoke(imageToNDArrayConfigObject2, 100);
return (PipelineStep) clazz
.getConstructor(String.class, String.class, String.class, boolean.class, Double.class,
Integer.class, Integer.class, imageToNDArrayConfigClass2)
.newInstance("image7", "box2", "image8", true, 1.33, 10, 10, imageToNDArrayConfigObject2);
case CAMERA_FRAME_CAPTURE:
moduleName = "konduit-serving-camera";
clazz = Class.forName("ai.konduit.serving.camera.step.capture.CameraFrameCaptureStep");
return (PipelineStep) clazz
.getConstructor(int.class, int.class, int.class, String.class)
.newInstance(0, 640, 480, "image");
case VIDEO_FRAME_CAPTURE:
moduleName = "konduit-serving-camera";
clazz = Class.forName("ai.konduit.serving.camera.step.capture.VideoFrameCaptureStep");
return (PipelineStep) clazz
.getConstructor(String.class, String.class)
.newInstance("<video_file_path>", "image");
case IMAGE_TO_NDARRAY:
moduleName = "konduit-serving-image";
clazz = Class.forName("ai.konduit.serving.data.image.step.ndarray.ImageToNDArrayStep");
Class<?> imageToNDArrayConfigClass3 = Class.forName("ai.konduit.serving.data.image.convert.ImageToNDArrayConfig");
Object imageToNDArrayConfigObject3 = imageToNDArrayConfigClass3.getConstructor().newInstance();
imageToNDArrayConfigObject3.getClass().getMethod("height", Integer.class).invoke(imageToNDArrayConfigObject3, 100);
imageToNDArrayConfigObject3.getClass().getMethod("width", Integer.class).invoke(imageToNDArrayConfigObject3, 100);
return (PipelineStep) clazz
.getConstructor(imageToNDArrayConfigClass3, List.class, List.class, boolean.class, boolean.class,
String.class)
.newInstance(imageToNDArrayConfigObject3, Arrays.asList("key1", "key2"),
Arrays.asList("output1", "output2"), true, false, "@ImageToNDArrayStepMetadata");
case LOGGING:
return new LoggingStep()
.log(LoggingStep.Log.KEYS_AND_VALUES);
case SSD_TO_BOUNDING_BOX:
return new SSDToBoundingBoxStep();
case SAMEDIFF:
moduleName = "konduit-serving-samediff";
clazz = Class.forName("ai.konduit.serving.models.samediff.step.SameDiffStep");
return (PipelineStep) clazz
.getConstructor(String.class, List.class)
.newInstance("<path_to_model>", Arrays.asList("11", "22"));
case SHOW_IMAGE:
moduleName = "konduit-serving-image";
clazz = Class.forName("ai.konduit.serving.data.image.step.show.ShowImageStep");
return (PipelineStep) clazz
.getConstructor(String.class, String.class, Integer.class, Integer.class, boolean.class)
.newInstance("image", "image", 1280, 720, false);
case TENSORFLOW:
moduleName = "konduit-serving-tensorflow";
clazz = Class.forName("ai.konduit.serving.models.tensorflow.step.TensorFlowStep");
return (PipelineStep) clazz
.getConstructor(List.class, List.class, String.class)
.newInstance(Arrays.asList("1", "2"), Arrays.asList("11", "22"), "<path_to_model>");
case ND4JTENSORFLOW:
moduleName = "konduit-serving-nd4j-tensorflow";
clazz = Class.forName("ai.konduit.serving.models.nd4j.tensorflow.step.Nd4jTensorFlowStep");
return (PipelineStep) clazz
.getConstructor(List.class, List.class, Map.class, String.class)
.newInstance(Arrays.asList("1", "2"), Arrays.asList("11", "22"), null, "<path_to_model>");
case ONNX:
moduleName = "konduit-serving-onnx";
clazz = Class.forName("ai.konduit.serving.models.onnx.step.ONNXStep");
return (PipelineStep) clazz
.getConstructor(String.class, List.class, List.class)
.newInstance("<path_to_model>", Arrays.asList("1", "2"), Arrays.asList("11", "22"));
case PYTHON:
moduleName = "konduit-serving-python";
return (PipelineStep) Class.forName("ai.konduit.serving.python.PythonStep")
.getConstructor().newInstance();
case CLASSIFIER_OUTPUT:
return new ClassifierOutputStep()
.inputName("inputName (optional)")
.labels(Arrays.asList("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
default:
out.format("Invalid step type '%s'. Allowed values are %s%n", type, Arrays.asList(PipelineStepType.values()));
System.exit(1);
}
} catch (Exception exception) {
if(exception instanceof ClassNotFoundException) {
if(moduleName == null) {
exception.printStackTrace(out);
} else {
out.format("Please add '%s' module to the binaries to use " +
"'%s' step type%n", moduleName, type);
}
} else if (exception instanceof IllegalArgumentException) {
out.format("Invalid step type '%s'. Allowed values are %s%n", type, Arrays.asList(PipelineStepType.values()));
} else {
out.format("No pipeline step found for %s%n", type);
}
System.exit(1);
}
return null;
}
private void printOrSave(String output) {
if(outputFile == null) {
out.println(output);
} else {
try {
FileUtils.writeStringToFile(outputFile, output, StandardCharsets.UTF_8);
out.format("Config file created successfully at %s%n", outputFile.getAbsolutePath());
} catch (IOException exception) {
out.format("Unable to save configuration file to %s%n", outputFile.getAbsolutePath());
exception.printStackTrace(out);
}
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/command/InspectCommand.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.cli.launcher.command;
import ai.konduit.serving.cli.launcher.LauncherUtils;
import ai.konduit.serving.vertx.config.InferenceConfiguration;
import ai.konduit.serving.pipeline.settings.DirectoryFetcher;
import com.jayway.jsonpath.JsonPath;
import io.vertx.core.cli.annotations.*;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
import io.vertx.core.spi.launcher.DefaultCommand;
import org.apache.commons.io.FileUtils;
import org.nd4j.shade.guava.base.Strings;
import java.io.File;
import java.nio.charset.StandardCharsets;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Name("inspect")
@Summary("Inspect the details of a particular konduit server.")
@Description("Inspect the details of a particular konduit server given an id. To find a list of running servers and their details, use the 'list' command.\n\n" +
"Example usages:\n" +
"--------------\n" +
"- Prints the whole inference configuration of server with an id of 'inf_server':\n" +
"$ konduit inspect inf_server\n\n" +
"- Queries the inference configuration of server with an id of 'inf_server'\n" +
" based on the given pattern and gives output similar to 'localhost:45223'\n" +
"$ konduit inspect inf_server -q {host}:{port}\n\n" +
"- Queries the inference configuration of server with an id of 'inf_server'\n" +
" based on the given pattern and gives output similar to \n" +
" 'localhost:45223-{<pipeline_details>}'. The curly brackets can be escaped.\n" +
"$ konduit inspect inf_server -q {host}:{port}-\\{{pipeline}\\}\n" +
"--------------")
public class InspectCommand extends DefaultCommand {
protected static final Pattern QUERY_PATTERN = Pattern.compile("(?<!\\\\)\\{([^}]+.?)(?<!\\\\)}");
private String id;
private String query;
@Argument(index = 0, argName = "server-id")
@Description("Konduit server id")
public void setId(String id) {
this.id = id;
}
@Option(longName = "query", shortName = "q", argName = "pattern")
@Description("Query pattern to inspect. Pattern: {key1}... or {key1.key2[0].key3}... " +
"See above examples for usage.")
public void setQuery(String query) {
this.query = query;
}
@Override
public void run() {
if(LauncherUtils.isProcessExists(id)) {
try {
int pid = LauncherUtils.getPidFromServerId(id);
InferenceConfiguration inferenceConfiguration = InferenceConfiguration.fromJson(
FileUtils.readFileToString(
new File(DirectoryFetcher.getServersDataDir(),
pid + ".data"),
StandardCharsets.UTF_8));
String json = inferenceConfiguration.toJson();
if(Strings.isNullOrEmpty(query)) {
out.println(json);
} else {
Matcher matcher = QUERY_PATTERN.matcher(query);
String output = query;
while(true) {
if(matcher.find()) {
String key = matcher.group(1);
String result;
if("pid".equalsIgnoreCase(key)) {
result = String.valueOf(pid);
} else if ("size".equalsIgnoreCase(key)){
result = String.valueOf(inferenceConfiguration.pipeline().size());
} else {
Object outputObject = JsonPath.read(json, "$." + matcher.group(1));
if(outputObject instanceof LinkedHashMap) {
result = JsonObject.mapFrom(outputObject).encode();
} else if (outputObject instanceof List) {
result = new JsonArray((List) outputObject).encode();
} else {
result = outputObject.toString();
}
}
output = output.replace(matcher.group(), result);
} else {
break;
}
}
out.println(output
.replace("\\{", "{")
.replace("\\}", "}"));
}
} catch (Exception exception) {
exception.printStackTrace(out);
}
} else {
out.println("No konduit server exists with an id: " + id);
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/command/JoinCommand.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.cli.launcher.command;
import io.vertx.core.cli.annotations.Description;
import io.vertx.core.cli.annotations.Name;
import io.vertx.core.cli.annotations.Summary;
import io.vertx.core.impl.launcher.commands.BareCommand;
@Name("join")
@Summary("Join a konduit-serving cluster.")
@Description("Join a konduit-serving cluster.")
public class JoinCommand extends BareCommand {
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/command/KonduitRunCommand.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.cli.launcher.command;
import ai.konduit.serving.pipeline.api.protocol.URIResolver;
import ai.konduit.serving.pipeline.settings.KonduitSettings;
import ai.konduit.serving.vertx.api.DeployKonduitServing;
import ai.konduit.serving.vertx.config.InferenceConfiguration;
import io.vertx.core.cli.CLIException;
import io.vertx.core.cli.annotations.*;
import io.vertx.core.impl.launcher.CommandLineUtils;
import io.vertx.core.impl.launcher.commands.RunCommand;
import io.vertx.core.json.JsonObject;
import lombok.extern.slf4j.Slf4j;
import uk.org.lidalia.sysoutslf4j.context.SysOutOverSLF4J;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Scanner;
import static ai.konduit.serving.vertx.api.DeployKonduitServing.INFERENCE_SERVICE_IDENTIFIER;
@Slf4j
@Name(value = "run", priority = 1)
@Summary("Runs a konduit server in the foreground.")
@Description("Runs a konduit server in the foreground.")
public class KonduitRunCommand extends RunCommand {
public static final String INFERENCE_SERVICE_TYPE_NAME = "inference";
public static final String DEFAULT_SERVICE = INFERENCE_SERVICE_TYPE_NAME;
public static final List<String> VALID_SERVICE_TYPES = Collections.singletonList(INFERENCE_SERVICE_TYPE_NAME);
private String serviceType;
private InferenceConfiguration inferenceConfiguration;
@Override
@Option(longName = "service", shortName = "s", argName = "service-type")
@DefaultValue(DEFAULT_SERVICE)
@Description("Service type that needs to be deployed. Defaults to '" + DEFAULT_SERVICE + "'")
public void setMainVerticle(String serviceType) {
if(VALID_SERVICE_TYPES.contains(serviceType)) {
this.serviceType = serviceType;
} else {
throw new CLIException(
String.format("Invalid service type %s. Allowed values are: %s",
serviceType,
VALID_SERVICE_TYPES)
);
}
}
/**
* The main verticle configuration, it can be a json file or a json string.
*
* @param configuration the configuration
*/
@Override
@Option(shortName = "c", longName = "config", argName = "config", required = true)
@Description("Specifies a configuration that should be provided to the verticle. <config> should reference either a " +
"text file containing a valid JSON object which represents the configuration OR be a JSON string.")
public void setConfig(String configuration) {
File file = new File(configuration);
if(file.exists()) {
configuration = file.getAbsolutePath();
}
log.info("Processing configuration: {}", configuration);
super.setConfig(configuration);
}
@Override
public void run() {
SysOutOverSLF4J.sendSystemOutAndErrToSLF4J();
String serverId = getServerId();
if(serverId == null) {
serverId = KonduitSettings.getServingId();
}
log.info("Starting konduit server with an id of '{}'", serverId);
log.info("Using classpath: '{}'", System.getProperty("java.class.path"));
super.run();
}
private String getServerId() {
String[] commandSplits = CommandLineUtils.getCommand().split(" ");
String lastSegment = commandSplits[commandSplits.length - 1];
if(lastSegment.contains("serving.id")) {
return lastSegment.replace("-Dserving.id=", "").trim();
} else {
return null;
}
}
@Override
protected JsonObject getJsonFromFileOrString(String jsonOrYamlFileOrString, String argName) {
if (jsonOrYamlFileOrString != null) {
File scanFile = null;
try {
scanFile = URIResolver.isUrl(jsonOrYamlFileOrString) ?
URIResolver.getFile(jsonOrYamlFileOrString) :
new File(jsonOrYamlFileOrString);
} catch (IOException e) {
log.error("Failed to load model " + jsonOrYamlFileOrString, e);
return null;
}
try (Scanner scanner = new Scanner(scanFile, "UTF-8").useDelimiter("\\A")) {
return readConfiguration(scanner.next());
} catch (FileNotFoundException e) {
return readConfiguration(jsonOrYamlFileOrString);
}
} else {
return null;
}
}
/**
* Parse the given configuration yaml/json string to {@link JsonObject}. The
* configuration should be parsable to {@link InferenceConfiguration}.
*
* @param configurationString given configuration string
* @return Read configuration to JsonObject. Returns null on failure.
*/
private JsonObject readConfiguration(String configurationString) {
JsonObject jsonObject = null;
try {
jsonObject = new JsonObject(configurationString);
inferenceConfiguration = InferenceConfiguration.fromJson(configurationString);
return jsonObject;
} catch (Exception jsonProcessingErrors) {
try {
inferenceConfiguration = InferenceConfiguration.fromYaml(configurationString);
return new JsonObject(inferenceConfiguration.toJson());
} catch (Exception yamlProcessingErrors) {
if(jsonObject != null) {
log.error("Given configuration: {} does not contain a valid JSON/YAML object", jsonObject.encodePrettily());
} else {
log.error("Given configuration was malformatted JSON or invalid YAML configuration:", configurationString);
}
log.error("\n\nErrors while processing as a json string:", jsonProcessingErrors);
log.error("\n\nErrors while processing as a yaml string:", yamlProcessingErrors);
return null;
}
}
}
@Override
protected void deploy() {
if (INFERENCE_SERVICE_TYPE_NAME.equalsIgnoreCase(serviceType)) {
DeployKonduitServing.registerInferenceVerticleFactory(vertx);
super.setMainVerticle(INFERENCE_SERVICE_IDENTIFIER + ":" + inferenceConfiguration.protocol().name().toLowerCase());
} else {
throw new CLIException(String.format("Unsupported service type %s", serviceType));
}
deploy(mainVerticle, vertx, deploymentOptions, handler -> {
if (handler.failed()) {
out.format("Unable to deploy server for configuration %n%s%n", inferenceConfiguration.toJson());
vertx.close();
}
});
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/command/ListCommand.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.cli.launcher.command;
import ai.konduit.serving.cli.launcher.LauncherUtils;
import ai.konduit.serving.pipeline.util.ObjectMappers;
import ai.konduit.serving.pipeline.settings.DirectoryFetcher;
import io.vertx.core.cli.annotations.Description;
import io.vertx.core.cli.annotations.Name;
import io.vertx.core.cli.annotations.Summary;
import io.vertx.core.impl.launcher.commands.ExecUtils;
import io.vertx.core.spi.launcher.DefaultCommand;
import org.apache.commons.io.FileUtils;
import org.nd4j.shade.jackson.databind.JsonNode;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Name(value = "list", priority = 1)
@Summary("Lists the running konduit servers.")
@Description("List all konduit servers launched through the `serve` command.")
public class ListCommand extends DefaultCommand {
private static final Pattern PS = Pattern.compile("-Dserving.id=(.*)\\s*");
private static final Pattern ST = Pattern.compile("\\s+-s\\s+(.*)\\s*|\\s+--service\\s+(.*)\\s*");
// Note about stack traces - the stack trace are printed on the stream passed to the command.
/**
* Executes the {@code list} command.
*/
@Override
public void run() {
out.println("\nListing konduit servers...\n");
List<String> cmd = new ArrayList<>();
try {
if (ExecUtils.isWindows()) {
cmd.add("WMIC");
cmd.add("PROCESS");
cmd.add("WHERE");
cmd.add("\"CommandLine like '%serving.id%' and name!='wmic.exe'\"");
cmd.add("GET");
cmd.add("CommandLine,ProcessId");
} else {
cmd.add("sh");
cmd.add("-c");
cmd.add("ps axww | grep \"serving.id=\"");
}
dumpFoundVertxApplications(cmd);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
e.printStackTrace(out);
} catch (Exception e) {
e.printStackTrace(out);
}
}
private void dumpFoundVertxApplications(List<String> cmd) throws IOException, InterruptedException {
String printFormat = " %1$-3s | %2$-30s | %3$-10s | %4$-20s | %5$-7s | %6$-10s \n";
boolean none = true;
final Process process = new ProcessBuilder(cmd).start();
BufferedReader reader =
new BufferedReader(new InputStreamReader(process.getInputStream()));
String line;
int index = 0;
while ((line = reader.readLine()) != null) {
final Matcher matcher = PS.matcher(line);
if (matcher.find()) {
index++;
if(none) {
out.format(printFormat, "#", "ID", "TYPE", "URL", "PID", "STATUS");
}
String id = matcher.group(1).trim().split(" ")[0];
printServerDetails(index, printFormat, id, line);
none = false;
}
}
if(index > 0) {
out.println();
}
process.waitFor();
reader.close();
if (none) {
out.println("No konduit servers found.");
}
}
private void printServerDetails(int index, String printFormat, String id, String line) {
String pid = LauncherUtils.extractPidFromLine(line);
String configuration;
String hostAndPort = "waiting...";
String status = "starting";
try {
configuration = FileUtils.readFileToString(new File(DirectoryFetcher.getServersDataDir(), pid + ".data"), StandardCharsets.UTF_8);
JsonNode jsonNode = ObjectMappers.json().readTree(configuration);
hostAndPort = String.format("%s:%s", jsonNode.get("host"), jsonNode.get("port")).replaceAll("\"", "");
status = "started";
} catch (IOException exception) {
if (exception instanceof FileNotFoundException) {
status = String.format("Starting: Execute 'konduit logs %s' for more details...", id);
} else {
out.println("Error occurred listing servers:");
exception.printStackTrace(out);
}
}
out.format(printFormat, index, id, getServiceType(line), hostAndPort, pid, status);
}
private String getServiceType(String line) {
Matcher matcher = ST.matcher(line);
if(matcher.find()) {
String output = matcher.group(1) != null ? matcher.group(1) : matcher.group(2);
return output.trim().split(" ")[0];
} else {
return "inference"; // Default service assumed.
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/command/LogsCommand.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.cli.launcher.command;
import ai.konduit.serving.cli.launcher.LauncherUtils;
import ai.konduit.serving.pipeline.settings.DirectoryFetcher;
import io.vertx.core.cli.annotations.*;
import io.vertx.core.spi.launcher.DefaultCommand;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.input.Tailer;
import org.apache.commons.io.input.TailerListenerAdapter;
import java.io.*;
import java.nio.charset.StandardCharsets;
@Slf4j
@Name("logs")
@Summary("View the logs of a particular konduit server")
@Description("View the logs of a particular konduit server given an id.\n\n" +
"Example usages:\n" +
"--------------\n" +
"- Outputs the log file contents of server with an id of 'inf_server':\n" +
"$ konduit logs inf_server\n\n" +
"- Outputs and tail the log file contents of server with an id of 'inf_server':\n" +
"$ konduit logs inf_server -f\n\n" +
"- Outputs and tail the log file contents of server with an id of 'inf_server' \n" +
" from the last 10 lines:\n" +
"$ konduit logs inf_server -l 10 -f \n" +
"--------------")
public class LogsCommand extends DefaultCommand {
private String id;
private boolean follow;
private int lines = 10;
@Argument(index = 0, argName = "server-id")
@Description("Konduit server id")
public void setId(String id) {
this.id = id;
}
@Option(longName = "follow", shortName = "f", flag = true)
@Description("Follow the logs output.")
public void setFollow(boolean follow) {
this.follow = follow;
}
@Option(longName = "lines", shortName = "l")
@Description("Sets the number of lines to be printed. Default is '10'. Use -1 for outputting everything.")
public void setLines(String lines) {
try {
this.lines = Integer.parseInt(lines);
if(this.lines != -1 && this.lines < 1) {
System.out.format("Number of lines to be printed should be greater than 0. " +
"Current it is %s%n", lines);
System.exit(1);
}
} catch (Exception e) {
System.out.format("Unable to parse number of lines (%s) to a number%n", lines);
e.printStackTrace();
System.exit(1);
}
}
@Override
public void run() {
try {
File logsFile = new File(DirectoryFetcher.getCommandLogsDir(), id + ".log");
if (follow) {
readAndTail(logsFile, lines);
} else {
if(lines == -1) {
out.println(FileUtils.readFileToString(logsFile, StandardCharsets.UTF_8));
} else {
out.println(LauncherUtils.readLastLines(logsFile, lines));
}
}
} catch (Exception exception) {
out.println("Failed to read logs:");
exception.printStackTrace(out);
}
}
private void readAndTail(File logsFile, int fromNumberOfLines) throws IOException {
new Tailer(logsFile, StandardCharsets.UTF_8, new TailerListenerAdapter() {
@SneakyThrows
@Override
public void init(Tailer tailer) {
super.init(tailer);
if(fromNumberOfLines != -1) {
out.println(LauncherUtils.readLastLines(logsFile, fromNumberOfLines));
}
}
@Override
public void handle(String line) {
out.println(line);
}
@Override
public void handle(Exception ex) {
ex.printStackTrace();
System.exit(1);
}
}, 100, fromNumberOfLines != -1, false, 4096).run();
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/command/MetricsCommand.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.cli.launcher.command;
import ai.konduit.serving.cli.launcher.KonduitServingLauncher;
import ai.konduit.serving.cli.launcher.LauncherUtils;
import ai.konduit.serving.pipeline.settings.DirectoryFetcher;
import ai.konduit.serving.vertx.config.InferenceConfiguration;
import io.vertx.core.AsyncResult;
import io.vertx.core.Handler;
import io.vertx.core.Vertx;
import io.vertx.core.buffer.Buffer;
import io.vertx.core.cli.annotations.*;
import io.vertx.core.http.HttpMethod;
import io.vertx.core.spi.launcher.DefaultCommand;
import io.vertx.ext.web.client.HttpRequest;
import io.vertx.ext.web.client.HttpResponse;
import io.vertx.ext.web.client.WebClient;
import org.apache.commons.io.FileUtils;
import java.io.File;
import java.nio.charset.StandardCharsets;
import static ai.konduit.serving.cli.launcher.LauncherUtils.getPidFromServerId;
@Name("metrics")
@Summary("Shows the running metrics for a particular server")
@Description("Prints the calculate metrics for a particular server. Useful for getting a quick insight into the running server.\n\n" +
"Example usages:\n" +
"--------------\n" +
"- Shows metrics of a server, named 'my_server': \n" +
"$ konduit metrics my_server \n" +
"--------------")
public class MetricsCommand extends DefaultCommand {
private String id;
@Argument(index = 0, argName = "server-id")
@Description("Konduit server id")
public void setId(String id) {
this.id = id;
}
@Override
public void run() {
if(LauncherUtils.isProcessExists(id)) {
Vertx vertx = Vertx.vertx();
try {
InferenceConfiguration inferenceConfiguration = InferenceConfiguration.fromJson(
FileUtils.readFileToString(new File(DirectoryFetcher.getServersDataDir(),
getPidFromServerId(id) + ".data"), StandardCharsets.UTF_8));
HttpRequest<Buffer> request = WebClient.create(vertx)
.head(inferenceConfiguration.port(),
inferenceConfiguration.host(),
"/metrics")
.method(HttpMethod.GET);
Handler<AsyncResult<HttpResponse<Buffer>>> responseHandler = handler -> {
if(handler.succeeded()) {
HttpResponse<Buffer> httpResponse = handler.result();
int statusCode = httpResponse.statusCode();
if(statusCode == 200) {
out.print(handler.result().body());
} else {
out.format("Request failed with status code: %s%nDetails: %s%n", statusCode,
handler.result().bodyAsString());
}
} else {
out.format("Failed request.%nExecute '%s logs %s' to find the cause.%n",
((KonduitServingLauncher) executionContext.launcher()).commandLinePrefix(), id);
}
vertx.close();
};
request.send(responseHandler);
} catch (Exception exception) {
exception.printStackTrace(out);
vertx.close();
}
} else {
out.println("No konduit server exists with an id: " + id);
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/command/ServeCommand.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.cli.launcher.command;
import ai.konduit.serving.cli.launcher.KonduitServingLauncher;
import ai.konduit.serving.cli.launcher.LauncherUtils;
import ai.konduit.serving.pipeline.settings.DirectoryFetcher;
import io.vertx.core.cli.annotations.*;
import io.vertx.core.impl.launcher.CommandLineUtils;
import io.vertx.core.impl.launcher.commands.ExecUtils;
import io.vertx.core.spi.launcher.DefaultCommand;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.nd4j.common.io.ClassPathResource;
import org.nd4j.common.io.StringUtils;
import org.nd4j.shade.guava.base.Strings;
import java.io.*;
import java.util.*;
import static ai.konduit.serving.cli.launcher.command.KonduitRunCommand.DEFAULT_SERVICE;
@Name("serve")
@Summary("Start a konduit server application")
@Description("Start a konduit server application. " +
"The application is identified with an id that can be set using the `--serving-id` or `-id` option. " +
"The application can be stopped with the `stop` command. " +
"This command takes the `run` command parameters. To see the " +
"run command parameters, execute `run --help`\n\n" +
"Example usages:\n" +
"--------------\n" +
"- Starts a server in the foreground with an id of 'inf_server' using 'config.json' as configuration file:\n" +
"$ konduit serve -id inf_server -c config.json\n\n" +
"- Starts a server in the background with an id of 'inf_server' using 'config.yaml' as configuration file:\n" +
"$ konduit serve -id inf_server -c config.yaml -b\n" +
"--------------")
@Slf4j
public class ServeCommand extends DefaultCommand {
protected String host;
protected int port;
protected String id;
protected String launcher;
protected int instances = 1;
protected String classpath;
protected String service;
protected String configuration;
protected boolean redirect;
protected String jvmOptions;
/**
* Sets the host name of the konduit server.
*
* @param host host name
*/
@Option(shortName = "h", longName = "host", argName = "host")
@DefaultValue("localhost")
@Description("Specifies the host name of the konduit server when the configuration provided is " +
"just a pipeline configuration instead of a whole inference configuration. Defaults to 'localhost'.")
public void setHost(String host) {
this.host = host;
}
/**
* Sets the port of the konduit server.
*
* @param port port number
*/
@Option(shortName = "p", longName = "port", argName = "port")
@DefaultValue("0")
@Description("Specifies the port number of the konduit server when the configuration provided is " +
"just a pipeline configuration instead of a whole inference configuration. Defaults to '0'.")
public void setPort(int port) {
this.port = port;
}
/**
* Sets the number of instance of the verticle to create.
*
* @param instances the number of instances
*/
@Option(shortName = "i", longName = "instances", argName = "instances")
@DefaultValue("1")
@Description("Specifies how many instances of the server will be deployed. Defaults to 1.")
public void setInstances(int instances) {
this.instances = instances;
}
/**
* Sets the classpath.
*
* @param classpath the classpath
*/
@Option(shortName = "cp", longName = "classpath", argName = "classpath")
@Description("Provides an extra classpath to be used for the verticle deployment.")
public void setClasspath(String classpath) {
this.classpath = classpath;
}
@Option(longName = "service", shortName = "s", argName = "type")
@DefaultValue(DEFAULT_SERVICE)
@Description("Service type that needs to be deployed. Defaults to \"inference\"")
public void setMainVerticle(String konduitServiceType) {
this.service = konduitServiceType;
}
/**
* The main verticle configuration, it can be a json file or a json string.
*
* @param configuration the configuration
*/
@Option(shortName = "c", longName = "config", argName = "server-config", required = true)
@Description("Specifies configuration that should be provided to the verticle. <config> should reference either a " +
"text file containing a valid JSON object which represents the configuration OR be a JSON string.")
public void setConfig(String configuration) {
this.configuration = configuration;
}
/**
* Sets the "application id" that would be to stop the application and be listed in the {@link ListCommand} command.
*
* @param id the application ID.
*/
@Option(longName = "serving-id", shortName = "id")
@Description("Id of the serving process. This will be visible in the 'list' command. This id can be used to call 'predict' and 'stop' commands on the running servers. " +
"If not given then an 8 character UUID is created automatically.")
public void setApplicationId(String id) {
this.id = id;
}
/**
* Sets the Java Virtual Machine options to pass to the spawned process. If not set, the JAVA_OPTS environment
* variable is used.
*
* @param options the jvm options
*/
@Option(shortName = "jo", longName = "java-opts")
@Description("Java Virtual Machine options to pass to the spawned process such as \"-Xmx1G -Xms256m " +
"-XX:MaxPermSize=256m\". If not set the `JAVA_OPTS` environment variable is used.")
public void setJavaOptions(String options) {
this.jvmOptions = options;
}
/**
* A hidden option to set the launcher class.
*
* @param clazz the class
*/
@Option(longName = "launcher-class")
@Hidden
public void setLauncherClass(String clazz) {
this.launcher = clazz;
}
@Option(shortName = "b", longName = "background", flag = true)
@Description("Runs the process in the background, if set.")
public void setRedirect(boolean background) {
this.redirect = !background;
}
private void addCustomServeOptions(List<String> cliArguments) {
if(classpath != null) {
cliArguments.add("--classpath");
cliArguments.add(classpath);
}
if(service != null) {
cliArguments.add("-s");
cliArguments.add(service);
}
if(configuration != null) {
cliArguments.add("-c");
cliArguments.add(configuration);
}
}
/**
* Starts the application in background.
*/
@Override
public void run() {
out.println("Starting konduit server...");
List<String> cmd = new ArrayList<>();
ProcessBuilder builder = new ProcessBuilder();
addJavaCommand(cmd);
// Must be called only once !
List<String> cliArguments = new ArrayList<>();
cliArguments.add("--instances");
cliArguments.add(String.valueOf(instances));
addCustomServeOptions(cliArguments);
cliArguments.addAll(getArguments());
String finalClassPath = Strings.isNullOrEmpty(classpath) ? System.getProperty("java.class.path") : classpath + File.pathSeparator + System.getProperty("java.class.path");
// Add the classpath to env.
builder.environment().putAll(System.getenv());
builder.environment().put("CLASSPATH", finalClassPath);
out.format("Expected classpath: %s%n", finalClassPath);
if (launcher != null) {
ExecUtils.addArgument(cmd, launcher);
// Do we have a valid command ?
Optional<String> maybeCommand = cliArguments.stream()
.filter(arg -> executionContext.launcher().getCommandNames().contains(arg))
.findFirst();
if (!maybeCommand.isPresent()) {
// No command, add `run`
ExecUtils.addArgument(cmd, "run");
}
} else if (isLaunchedAsFatJar()) {
if(classpath != null && classpath.contains(id) && StringUtils.endsWithIgnoreCase(classpath, "manifest.jar")) {
ExecUtils.addArgument(cmd, "-jar");
cmd.add(classpath);
} else {
cmd.add("-cp");
cmd.add(finalClassPath);
cmd.add(KonduitServingLauncher.class.getCanonicalName());
}
ExecUtils.addArgument(cmd, "run");
} else {
// probably a `vertx` command line usage, or in IDE.
ExecUtils.addArgument(cmd, CommandLineUtils.getFirstSegmentOfCommand());
ExecUtils.addArgument(cmd, "run");
}
cliArguments.forEach(arg -> ExecUtils.addArgument(cmd, arg));
try {
out.format("INFO: Running command %s%n", String.join(" ", cmd));
builder.command(cmd); // Setting the builder command
if (redirect) {
runAndTailOutput(builder);
} else {
String commandLinePrefix = ((KonduitServingLauncher) executionContext.launcher()).commandLinePrefix();
builder.start();
out.format("For server status, execute: '%s list'%nFor logs, execute: '%s logs %s'%n",
commandLinePrefix,
commandLinePrefix,
id);
}
} catch (Exception e) {
out.println("Cannot create konduit server process");
e.printStackTrace(out);
ExecUtils.exitBecauseOfProcessIssue();
}
LauncherUtils.cleanServerDataFilesOnceADay();
}
private void runAndTailOutput(ProcessBuilder builder) throws IOException {
Process process = builder.start();
try(BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream())); BufferedReader errReader = new BufferedReader(new InputStreamReader(process.getErrorStream()))) {
Thread.sleep(2000);
while (LauncherUtils.isProcessExists(id)) {
while(reader.ready()){
out.println(reader.readLine());
}
while(errReader.ready()) {
out.println(errReader.readLine());
}
Thread.sleep(100);
}
//Print any additional errors
while(reader.ready()) {
out.println(reader.readLine());
}
while(errReader.ready()) {
out.println(errReader.readLine());
}
} catch (InterruptedException interruptedException) {
out.format("Killing server (%s) logs%n", id);
}
if (!process.isAlive()) {
out.format("Server with id (%s) terminated with exit code %s...%n", id, process.exitValue());
}
}
private void addJavaCommand(List<String> cmd) {
if (ExecUtils.isWindows()) {
ExecUtils.addArgument(cmd, "cmd.exe");
ExecUtils.addArgument(cmd, "/C");
ExecUtils.addArgument(cmd, "start");
ExecUtils.addArgument(cmd, "serving-id - " + id);
ExecUtils.addArgument(cmd, "/B");
}
ExecUtils.addArgument(cmd, getJava().getAbsolutePath());
// Compute JVM Options
if (jvmOptions == null) {
String opts = System.getenv("JAVA_OPTS");
if (opts != null) {
Arrays.stream(opts.split(" ")).forEach(s -> ExecUtils.addArgument(cmd, s));
}
} else {
Arrays.stream(jvmOptions.split(" ")).forEach(s -> ExecUtils.addArgument(cmd, s));
}
String konduitLogsFileProperty = "konduit.logs.file.path";
String konduitRuntimeLogbackFileProperty = "logback.configurationFile.runCommand";
String logbackFileProperty = "logback.configurationFile";
String defaultLogbackFile = "logback-run_command.xml";
if (!String.join(" ", cmd).contains(logbackFileProperty)) {
String konduitLogsFileSystemProperty = System.getProperty(konduitLogsFileProperty);
String konduitRuntimeLogbackFileSystemProperty = System.getProperty(konduitRuntimeLogbackFileProperty);
ExecUtils.addArgument(cmd, String.format("-D%s=%s", konduitLogsFileProperty,
(konduitLogsFileSystemProperty != null ?
new File(konduitLogsFileSystemProperty) :
new File(DirectoryFetcher.getCommandLogsDir(), id + ".log"))
.getAbsolutePath()
)
);
File logbackFile = extractLogbackFile(konduitRuntimeLogbackFileSystemProperty != null ?
konduitRuntimeLogbackFileSystemProperty :
defaultLogbackFile);
ExecUtils.addArgument(cmd, String.format("-D%s=%s", logbackFileProperty, logbackFile.getAbsolutePath()));
}
}
private File extractLogbackFile(String file) {
String s = UUID.randomUUID().toString().replace("-","").substring(0, 16);
int idx = file.lastIndexOf('.');
String name = file.substring(0, idx) + "_" + s + file.substring(idx);
File out = new File(FileUtils.getTempDirectory(), name);
File inputFile = new File(file);
if(inputFile.exists() && inputFile.isFile()) {
return inputFile;
}
else {
try(InputStream is = new ClassPathResource(file).getInputStream();
OutputStream os = new BufferedOutputStream(new FileOutputStream(out))) {
IOUtils.copy(is, os);
} catch (IOException e){
log.error("Error extracting logback file: file does not exist or temp directory cannot be written to?", e);
}
return out;
}
}
private File getJava() {
File java;
File home = new File(System.getProperty("java.home"));
if (ExecUtils.isWindows()) {
java = new File(home, "bin/java.exe");
} else {
java = new File(home, "bin/java");
}
if (!java.isFile()) {
out.println("Cannot find java executable - " + java.getAbsolutePath() + " does not exist");
ExecUtils.exitBecauseOfSystemConfigurationIssue();
}
return java;
}
private boolean isLaunchedAsFatJar() {
return CommandLineUtils.getJar() != null;
}
private List<String> getArguments() {
List<String> args = executionContext.commandLine().allArguments();
// Add system properties passed as parameter
if (systemProperties != null) {
systemProperties.stream().map(entry -> "-D" + entry).forEach(args::add);
}
// Add id - it's important as it's the application mark.
args.add("-Dserving.id=" + getId());
return args;
}
protected String getId() {
if (id == null) {
id = UUID.randomUUID().toString().substring(0, 8);
}
if (LauncherUtils.isProcessExists(id)) {
out.println(String.format("A konduit server with an id: '%s' already exists.", id));
System.exit(1);
}
return id;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/command/StopCommand.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.cli.launcher.command;
import ai.konduit.serving.cli.launcher.LauncherUtils;
import ai.konduit.serving.pipeline.settings.DirectoryFetcher;
import io.vertx.core.cli.annotations.*;
import io.vertx.core.impl.launcher.commands.ExecUtils;
import io.vertx.core.spi.launcher.DefaultCommand;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import java.io.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static ai.konduit.serving.cli.launcher.LauncherUtils.getPidFromServerId;
import static ai.konduit.serving.cli.launcher.LauncherUtils.isProcessExists;
@Name(value = "stop", priority = 1)
@Summary("Stop a running konduit server")
@Description("This command stops a konduit server started with the `serve` command. The command requires the " +
"serving id as argument. Use the `list` command to get the list of running konduit servers.\n\n" +
"Example usages:\n" +
"--------------\n" +
"- Stops the server with an id of 'inf_server':\n" +
"$ konduit stop inf_server\n" +
"--------------")
@Slf4j
public class StopCommand extends DefaultCommand {
private String id;
/**
* Whether or not we are in redeploy mode. In redeploy mode, do not exit the VM.
*/
private boolean redeploy;
private static final Pattern PS = Pattern.compile("([0-9]+)\\s.*-Dserving.id=.*");
/**
* As the {@code stop} command takes only a single argument, it's the application id.
*
* @param id the id.
*/
@Argument(index = 0, argName = "serving.id", required = false)
@Description("The konduit server id")
public void setApplicationId(String id) {
this.id = id;
}
@Option(longName = "redeploy", flag = true)
@Hidden
public void setRedeploy(boolean redeploy) {
this.redeploy = redeploy;
}
/**
* Stops a running konduit server launched with the `serve` command.
*/
@Override
public void run() {
if (id == null) {
out.println("Application id not specified. See `stop --help` for more info.");
executionContext.execute("list");
LauncherUtils.cleanServerDataFilesOnceADay();
return;
}
if(!isProcessExists(id)) {
out.println(String.format("No konduit server exists with an id: '%s'.", id));
LauncherUtils.cleanServerDataFilesOnceADay();
return;
} else {
// Cleaning up current server data file
File serverDataFile = new File(DirectoryFetcher.getServersDataDir(), getPidFromServerId(id) + ".data");
try {
FileUtils.forceDelete(serverDataFile);
} catch (FileNotFoundException exception) {
// Ignoring FileNotFoundException since the file won't need to be deleted then.
} catch (IOException exception) {
log.error("Unable to delete server data file at: {}", serverDataFile.getAbsolutePath(), exception);
}
}
out.println("Stopping konduit server '" + id + "'");
if (ExecUtils.isWindows()) {
terminateWindowsApplication();
} else {
terminateLinuxApplication();
}
LauncherUtils.cleanServerDataFilesOnceADay();
}
private void terminateLinuxApplication() {
String pid = pid();
if (pid == null) {
out.println("Cannot find process for application using the id '" + id + "'.");
if (!redeploy) {
ExecUtils.exitBecauseOfProcessIssue();
}
return;
}
List<String> cmd = new ArrayList<>();
cmd.add("kill");
cmd.add(pid);
try {
int result = new ProcessBuilder(cmd).start().waitFor();
out.println("Application '" + id + "' terminated with status " + result);
if (!redeploy && result != 0) {
// We leave the application using the same exit code.
ExecUtils.exit(result);
}
} catch (Exception e) {
out.println("Failed to stop application '" + id + "'");
e.printStackTrace(out);
if (!redeploy) {
ExecUtils.exitBecauseOfProcessIssue();
}
}
}
private void terminateWindowsApplication() {
// Use wmic.
List<String> cmd = Arrays.asList(
"WMIC",
"PROCESS",
"WHERE",
"\"CommandLine like '%serving.id=" + id + "' and name!='wmic.exe'\"",
"CALL",
"TERMINATE"
);
try {
final Process process = new ProcessBuilder(cmd).start();
int result = process.waitFor();
out.println("Application '" + id + "' terminated with status " + result);
if (!redeploy && result != 0) {
// We leave the application using the same exit code.
ExecUtils.exit(result);
}
} catch (Exception e) {
out.println("Failed to stop application '" + id + "'");
e.printStackTrace(out);
if (!redeploy) {
ExecUtils.exitBecauseOfProcessIssue();
}
}
}
private String pid() {
try {
final Process process = new ProcessBuilder(Arrays.asList("sh", "-c", "ps axww | grep \"Dserving.id=" + id + "$\"")).start();
BufferedReader reader =
new BufferedReader(new InputStreamReader(process.getInputStream()));
String line;
while ((line = reader.readLine()) != null) {
final Matcher matcher = PS.matcher(line);
if (matcher.find()) {
return matcher.group(1);
}
}
process.waitFor();
reader.close();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
e.printStackTrace(out);
} catch (Exception e) {
e.printStackTrace(out);
out.println("Failed to get process ID.");
}
return null;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/command/VersionCommand.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.cli.launcher.command;
import io.vertx.core.cli.annotations.Description;
import io.vertx.core.cli.annotations.Name;
import io.vertx.core.cli.annotations.Summary;
import io.vertx.core.spi.launcher.DefaultCommand;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
@Name(value = "version", priority = 1)
@Summary("Displays konduit-serving version.")
@Description("Prints the konduit-serving version used by the application along with other build details.")
public class VersionCommand extends DefaultCommand {
private static String version;
@Override
public void run() {
out.println(getVersion());
}
/**
* Reads the version from the {@code serving-version.txt} file.
*
* @return the version
*/
public static String getVersion() {
if (version != null) {
return version;
}
try (InputStream is = VersionCommand.class.getClassLoader().getResourceAsStream("META-INF/konduit-serving-cli-git.properties")) {
if (is == null) {
throw new IllegalStateException("Cannot find konduit-serving-cli-git.properties on classpath");
}
Properties gitProperties = new Properties();
gitProperties.load(is);
version = String.format("Version: %s%nCommit hash: %s%nCommit time: %s%nBuild time: %s%n",
gitProperties.getProperty("git.build.version"),
gitProperties.getProperty("git.commit.id").substring(0, 8),
gitProperties.getProperty("git.commit.time"),
gitProperties.getProperty("git.build.time"));
return version;
} catch (IOException e) {
throw new IllegalStateException(e.getMessage());
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/command/build
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/command/build/extension/ProfileCommand.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.cli.launcher.command.build.extension;
import ai.konduit.serving.cli.launcher.command.build.extension.model.Profile;
import ai.konduit.serving.pipeline.api.python.models.AppendType;
import ai.konduit.serving.pipeline.api.python.models.PythonConfigType;
import ai.konduit.serving.pipeline.util.ObjectMappers;
import ai.konduit.serving.pipeline.settings.DirectoryFetcher;
import io.vertx.core.cli.annotations.*;
import io.vertx.core.spi.launcher.DefaultCommand;
import lombok.NonNull;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.SystemUtils;
import org.nd4j.common.primitives.Pair;
import oshi.SystemInfo;
import oshi.hardware.CentralProcessor;
import oshi.hardware.GraphicsCard;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Name("profile")
@Summary("Command to List, view, edit, create and delete konduit serving run profiles.")
@Description("A utility command to create, view, edit, list and delete konduit serving run profiles. Run profiles " +
"configures the background run architecture such as CPU, GPU (CUDA) along with additional dependencies, server " +
"types, operating system etc. Konduit serving tries to identify the best profiles during the first server " +
"launch but you can manage more of your own profile configurations with this command. \n\n" +
"Example usages:\n" +
"--------------\n" +
"- Creates a CUDA 10.2 profile with the name 'CUDA-10.2':\n" +
"$ konduit profile create CUDA-10.2 -d CUDA_10.2 \n\n" +
"- Creates a simple profile for x86_avx2 architecture with name 'CPU-1':\n" +
"$ konduit profile create CPU-1 -a x86_avx2\n\n" +
"- Listing all the profiles:\n" +
"$ konduit profile list\n\n" +
"- Viewing a profile:\n" +
"$ konduit profile view CPU-1\n\n" +
"- Edit a profile with name 'CPU-1' from old architecture to 'x86':\n" +
"$ konduit profile edit CPU-1 -a x86 \n" +
"--------------")
@Slf4j
public class ProfileCommand extends DefaultCommand {
public static final String DEFAULT_CPU_PROFILE_NAME = "CPU";
public static final String DEFAULT_CUDA_PROFILE_NAME = "CUDA";
public static final String DEFAULT_CUDA_DEVICE = "CUDA_10.1";
public static final String CUDA_10_1_REDIST_DEPENDENCY = "org.bytedeco:cuda-platform-redist:10.1-7.6-1.5.2";
public static final String CUDA_10_2_REDIST_DEPENDENCY = "org.bytedeco:cuda-platform-redist:10.2-7.6-1.5.3";
private static final File PROFILES_SAVE_PATH = new File(DirectoryFetcher.getProfilesDir(), "profiles.yaml");
private static final File DEFAULT_PROFILE_NAME_PATH = new File(DirectoryFetcher.getProfilesDir(), "default");
private SubCommand subCommand;
private String profileName;
private String cpuArchitecture;
private String operatingSystem;
private String computeDevice;
private List<String> serverTypes;
private List<String> additionalDependencies;
private PythonConfigType pythonConfigType;
private String pythonPath;
private String environmentName;
private AppendType appendType;
@Argument(index = 0, argName = "sub_command", required = false)
@DefaultValue("LIST")
@Description("Sub command to be used with the profile command. Sub commands are: [default, create, list, view, edit, delete]. " +
"Defaults to 'LIST'")
public void setSubCommand(String subCommand) {
try {
this.subCommand = SubCommand.valueOf(subCommand.toUpperCase());
} catch (Exception e) {
System.out.format("Invalid sub command name: '%s'. Allowed values are: %s -> (case insensitive).",
subCommand, Arrays.toString(SubCommand.values()));
System.exit(1);
}
}
@Argument(index = 1, argName = "profile_name", required = false)
@Description("Name of the profile to create, view, edit or delete.")
public void setProfileName(String profileName) {
this.profileName = profileName;
}
@Option(shortName = "a", longName = "arch", argName = "cpu_architecture")
@DefaultValue("x86_avx2")
@Description("Name of the cpu architecture. Accepted values are: [x86, x86_64, x86_avx2, x86_64-avx2, x86_64_avx2, x86_avx512, x86_64-avx512, " +
"x86_64_avx512, armhf, arm64, ppc64le].")
public void setCpuArchitecture(String cpuArchitecture) {
this.cpuArchitecture = cpuArchitecture;
}
@Option(shortName = "o", longName = "os", argName = "operating_system")
@Description("Operating system the server needs to run at. Accepted values are: [windows, linux, mac]. Defaults to the current OS value.")
public void setOperatingSystem(String operatingSystem) {
this.operatingSystem = operatingSystem;
}
@Option(shortName = "d", longName = "device", argName = "device")
@DefaultValue(DEFAULT_CPU_PROFILE_NAME)
@Description("Compute device to use with the server. Accepted values are: [CPU, CUDA_10.0, CUDA_10.1, CUDA_10.2].")
public void setComputeDevice(String computeDevice) {
this.computeDevice = computeDevice;
}
@Option(shortName = "st", longName = "server_types", argName = "server_types", acceptMultipleValues = true)
@DefaultValue("HTTP GRPC")
@Description("One or more space separated values, indicating the backend server type. Accepted values are: [HTTP, GRPC, MQTT].")
public void setServerTypes(List<String> serverTypes) {
this.serverTypes = serverTypes;
}
@Option(shortName = "ad", longName = "addDep", argName = "additional_dependencies", acceptMultipleValues = true)
@Description("One or more space separated values (maven coordinates) indicating additional dependencies to be included with " +
"the server launch. The pattern of additional dependencies should be either <group_id>:<artifact_id>:<version> or " +
"<group_id>:<artifact_id>:<version>:<classifier>.")
public void setAdditionalDependencies(List<String> additionalDependencies) {
this.additionalDependencies = additionalDependencies;
}
@Option(shortName = "pt", longName = "python_type", argName = "python_type")
@Description("Override property for python config for selecting python install type. Available values are: [JAVACPP, CUSTOM, PYTHON, CONDA, VENV].")
public void setPythonType(String pythonType) {
try {
this.pythonConfigType = PythonConfigType.valueOf(pythonType.toUpperCase());
} catch (Exception e) {
System.out.format("Invalid python type: '%s'. Allowed values are: %s -> (case insensitive).",
pythonType, Arrays.toString(PythonConfigType.values()));
System.exit(1);
}
}
@Option(shortName = "pp", longName = "python_path", argName = "python_path")
@Description("Override property for python config for selecting specifying python path id for python type [PYTHON, CONDA, VENV] and absolute path " +
"for python type CUSTOM. Ignored for python type JAVACPP.")
public void setPythonPath(String pythonPath) {
this.pythonPath = pythonPath;
}
@Option(shortName = "en", longName = "env_name", argName = "env_name")
@Description("Override property for python config for selecting environment name for python type CONDA. Ignored for python type [CUSTOM, JAVACPP, VENV, PYTHON].")
public void setEnvironmentName(String environmentName) {
this.environmentName = environmentName;
}
@Option(shortName = "at", longName = "append_type", argName = "append_type")
@Description("Override property for python config for specifying append type with javacpp cpython library paths. Available values are: [BEFORE, NONE, AFTER].")
public void setAppendType(String appendType) {
try {
this.appendType = AppendType.valueOf(appendType.toUpperCase());
} catch (Exception e) {
System.out.format("Invalid append type: '%s'. Allowed values are: %s -> (case insensitive).",
appendType, Arrays.toString(AppendType.values()));
System.exit(1);
}
}
private enum SubCommand {
DEFAULT, CREATE, LIST, VIEW, EDIT, DELETE
}
@Override
public void run() {
if(profileName == null && !this.subCommand.equals(SubCommand.LIST)) {
out.println("A profile name must be specified for command \"" + subCommand + "\" - for example, \"konduit profile create my_profile\"");
System.exit(1);
}
switch (this.subCommand) {
case DEFAULT:
setDefaultProfile(profileName);
break;
case LIST:
listProfiles();
break;
case EDIT:
editProfile();
break;
case DELETE:
deleteProfile(profileName);
break;
case VIEW:
viewProfile(profileName);
break;
case CREATE:
createProfile();
break;
}
}
public static void setDefaultProfile(String profileName) {
if(!isProfileExists(profileName)) {
log.info("No profile with name {} exists.", profileName);
} else {
try {
FileUtils.writeStringToFile(DEFAULT_PROFILE_NAME_PATH, profileName, StandardCharsets.UTF_8);
log.info("Successfully set '{}' profile as default.", profileName);
} catch (IOException e) {
log.error("Unable to set default profile", e);
}
}
}
public static Profile getDefaultProfile() {
try {
if(DEFAULT_PROFILE_NAME_PATH.exists()) {
return getProfile(FileUtils.readFileToString(DEFAULT_PROFILE_NAME_PATH, StandardCharsets.UTF_8));
} else {
Map<String, Profile> profiles = getAllProfiles();
if(profiles.containsKey(DEFAULT_CUDA_PROFILE_NAME)) {
setDefaultProfile(DEFAULT_CUDA_PROFILE_NAME);
return profiles.get(DEFAULT_CUDA_PROFILE_NAME);
} else {
setDefaultProfile(DEFAULT_CPU_PROFILE_NAME);
return profiles.get(DEFAULT_CPU_PROFILE_NAME);
}
}
} catch (IOException e) {
log.error("Unable to get default profile", e);
return null;
}
}
private void createProfile() {
if(isProfileExists(profileName)) {
out.format("Profile with name %s already exists.%n", profileName);
} else {
saveProfile(profileName, fillProfileValues(new Profile()));
}
}
private void editProfile() {
if(isProfileExists(profileName)) {
saveProfile(profileName, fillProfileValues(getProfile(profileName)));
} else {
out.format("No profile found with the name of %s%n", profileName);
}
}
private Profile fillProfileValues(@NonNull Profile profile) {
if(cpuArchitecture != null) {
profile.cpuArchitecture(cpuArchitecture);
}
if(operatingSystem != null) {
profile.operatingSystem(operatingSystem);
}
if(computeDevice != null) {
profile.computeDevice(computeDevice);
}
if(StringUtils.containsIgnoreCase(profile.computeDevice(), "cuda")) {
profile.cpuArchitecture(getCpuArchitectureForCudaDevice(profile.cpuArchitecture()));
}
if(serverTypes != null && !serverTypes.isEmpty()) {
profile.serverTypes(serverTypes);
}
if(additionalDependencies != null && !additionalDependencies.isEmpty()) {
profile.additionalDependencies(additionalDependencies);
}
if(pythonConfigType != null) {
profile.pythonConfigType(pythonConfigType.name());
}
if(pythonPath != null) {
profile.pythonPath(pythonPath);
}
if(environmentName != null) {
profile.environmentName(environmentName);
}
if(appendType != null) {
profile.appendType(appendType.name());
}
if(StringUtils.containsIgnoreCase(profile.computeDevice(), "cuda")) {
String cudaVersion = profile.computeDevice().split("_")[1].trim();
String cudaRedistPackage = null;
Pair<String, String> cudaInstall = findCudaInstall();
if(cudaInstall == null) {
switch (cudaVersion) {
case "10.0":
out.format("No CUDA install found and no available redist package for cuda version: '%s' found. " +
"You can set device to CUDA 10.1 or 10.2 as an alternative. Otherwise, make sure to install CUDA " +
"from: %s before starting a konduit server.%n", cudaVersion,
"https://developer.nvidia.com/cuda-10.0-download-archive");
break;
case "10.1":
cudaRedistPackage = CUDA_10_1_REDIST_DEPENDENCY;
break;
case "10.2":
cudaRedistPackage = CUDA_10_2_REDIST_DEPENDENCY;
break;
default:
throw new IllegalStateException("Unsupported cuda version: " + cudaVersion);
}
if(cudaRedistPackage != null && !profile.additionalDependencies().contains(cudaRedistPackage)) {
out.format("No cuda install found. Adding cuda redist package: %s as an additional dependency. " +
"This will be downloaded and setup automatically on runtime konduit server build.%n",
cudaRedistPackage);
addDependencies(profile, cudaRedistPackage);
}
} else {
if(!cudaVersion.equals(cudaInstall.getKey())) {
out.format("Installed cuda version %s is not the same as the profile cuda version %s.%n", cudaInstall.getKey(), cudaVersion);
switch (cudaVersion) {
case "10.0":
out.format("No available redist package for cuda version: '%s' found. " +
"Make sure to install CUDA from: %s before starting a konduit server.%n", cudaVersion,
"https://developer.nvidia.com/cuda-10.0-download-archive");
break;
case "10.1":
cudaRedistPackage = CUDA_10_1_REDIST_DEPENDENCY;
break;
case "10.2":
cudaRedistPackage = CUDA_10_2_REDIST_DEPENDENCY;
break;
default:
throw new IllegalStateException("Unsupported cuda version: " + cudaVersion);
}
if(cudaRedistPackage != null && !profile.additionalDependencies().contains(cudaRedistPackage)) {
out.format("Adding cuda redist package: %s as an additional dependency. This will be " +
"downloaded and setup automatically on runtime konduit server start build.%n",
cudaRedistPackage);
addDependencies(profile, cudaRedistPackage);
}
}
}
}
return profile;
}
private static void addDependencies(@NonNull Profile profile, String dependency) {
List<String> dependencies = profile.additionalDependencies() == null ?
new ArrayList<>() :
new ArrayList<>(profile.additionalDependencies());
dependencies.add(dependency);
profile.additionalDependencies(dependencies);
}
public static boolean isProfileExists(String profileName) {
if(!PROFILES_SAVE_PATH.exists()){
return false;
}
return getAllProfiles().containsKey(profileName);
}
public static Map<String, Profile> firstTimeProfilesSetup() {
log.info("Performing first time profiles setup.");
Map<String, Profile> profiles = new HashMap<>();
Profile cpuProfile = new Profile().cpuArchitecture(getCpuArchitecture());
profiles.put(DEFAULT_CPU_PROFILE_NAME, cpuProfile);
log.info("Looking for CUDA compatible devices in the current system...");
List<GraphicsCard> nvidiaGraphicsCard = new ArrayList<>();
for(GraphicsCard graphicsCard : new SystemInfo().getHardware().getGraphicsCards()) {
String vendor = graphicsCard.getVendor();
if (vendor != null && StringUtils.containsIgnoreCase(vendor, "nvidia")) {
nvidiaGraphicsCard.add(graphicsCard);
}
}
if(!nvidiaGraphicsCard.isEmpty() && !cpuProfile.operatingSystem().equalsIgnoreCase(Profile.OperatingSystem.MAC.name())) {
log.info("Found the following CUDA compatible devices in the local system: {}", nvidiaGraphicsCard);
Profile cudaProfile = new Profile()
.computeDevice(DEFAULT_CUDA_DEVICE)
.operatingSystem(cpuProfile.operatingSystem())
.cpuArchitecture(getCpuArchitectureForCudaDevice(cpuProfile.cpuArchitecture()));
Pair<String, String> cudaInstall = findCudaInstall();
if(cudaInstall != null) {
log.info("Found CUDA install -- Version: {} | Path: {}", cudaInstall.getKey(), cudaInstall.getValue() != null ? cudaInstall.getValue() : "(Unable to identify)");
cudaProfile.computeDevice(String.format("CUDA_%s", cudaInstall.getKey().trim()));
} else {
log.info("Unable to find a valid cuda install in the local system. The server will try to " +
"automatically download the CUDA redist 10.1 package on runtime build");
addDependencies(cudaProfile, CUDA_10_1_REDIST_DEPENDENCY);
}
profiles.put(DEFAULT_CUDA_PROFILE_NAME, cudaProfile);
saveProfiles(profiles);
setDefaultProfile(DEFAULT_CUDA_PROFILE_NAME);
} else {
log.info("No CUDA compatible devices found in the current system.");
saveProfiles(profiles);
setDefaultProfile(DEFAULT_CPU_PROFILE_NAME);
}
log.info("Created profiles: \n{}", ObjectMappers.toYaml(profiles));
return profiles;
}
private static String getCpuArchitecture() {
SystemInfo systemInfo = new SystemInfo();
CentralProcessor.ProcessorIdentifier processorIdentifier = systemInfo.getHardware().getProcessor().getProcessorIdentifier();
String cpuArch = processorIdentifier.getMicroarchitecture();
if(cpuArch != null) {
if (StringUtils.containsIgnoreCase(cpuArch, "arm")) {
if(processorIdentifier.isCpu64bit()) {
return Profile.CpuArchitecture.arm64.name();
} else {
return Profile.CpuArchitecture.armhf.name();
}
} else if(StringUtils.containsIgnoreCase(cpuArch, "ppc")) {
return Profile.CpuArchitecture.ppc64le.name();
} else {
return Profile.CpuArchitecture.x86_avx2.name();
}
} else {
return Profile.CpuArchitecture.x86_avx2.name();
}
}
private static String getCpuArchitectureForCudaDevice(String cpuArchitecture) {
if(cpuArchitecture != null) {
if (StringUtils.containsIgnoreCase(cpuArchitecture, "x86")) {
return Profile.CpuArchitecture.x86.name();
} else if (StringUtils.containsIgnoreCase(cpuArchitecture, "arm")) {
return Profile.CpuArchitecture.arm64.name();
} else {
return Profile.CpuArchitecture.ppc64le.name();
}
} else {
return null;
}
}
public static Map<String, Profile> getAllProfiles() {
if(!PROFILES_SAVE_PATH.exists()) {
return firstTimeProfilesSetup();
} else {
try {
Map<String, Profile> profiles = new HashMap<>();
Map profilesMap = ObjectMappers.fromYaml(FileUtils.readFileToString(PROFILES_SAVE_PATH, StandardCharsets.UTF_8), Map.class);
for(Object key : profilesMap.keySet()) {
Profile profile = ObjectMappers.json().convertValue(profilesMap.get(key), Profile.class);
profiles.put((String) key, profile);
}
return profiles;
} catch (IOException e) {
log.error("Unable to read profiles data from {}.", PROFILES_SAVE_PATH.getAbsolutePath(), e);
System.exit(1);
return null;
}
}
}
public void deleteProfile(String profileName) {
Map<String, Profile> profiles = getAllProfiles();
if(profiles.containsKey(profileName)) {
if(profileName.equals(DEFAULT_CPU_PROFILE_NAME) || profileName.equals(DEFAULT_CUDA_PROFILE_NAME)) {
out.format("Cannot delete pre-set profiles with name '%s' or '%s'.%n", DEFAULT_CPU_PROFILE_NAME, DEFAULT_CUDA_PROFILE_NAME);
System.exit(1);
} else {
profiles.remove(profileName);
saveProfiles(profiles);
out.format("Profile \"%s\" deleted successfully.%n", profileName);
}
} else {
out.format("Profile with name \"%s\" does not exist.%n", profileName);
}
}
public static Profile getProfile(String profileName) {
if(isProfileExists(profileName)) {
return getAllProfiles().get(profileName);
} else {
log.info("Profile with name: {} doesn't exist.", profileName);
return null;
}
}
private void viewProfile(String profileName) {
if(isProfileExists(profileName)) {
out.println(ObjectMappers.toYaml(getAllProfiles().get(profileName)));
} else {
out.format("Profile with name: %s doesn't exist.%n", profileName);
}
}
private void listProfiles() {
if(PROFILES_SAVE_PATH.exists()) {
out.println(ObjectMappers.toYaml(getAllProfiles()));
} else {
getAllProfiles();
}
}
private void saveProfile(String profileName, Profile profile) {
Map<String, Profile> profiles = getAllProfiles();
profiles.put(profileName, profile);
saveProfiles(profiles);
out.format("Profile %s saved with details:%n%s%n", profileName, ObjectMappers.toYaml(profile));
}
private static void saveProfiles(Map<String, Profile> profiles) {
try {
FileUtils.writeStringToFile(PROFILES_SAVE_PATH,
ObjectMappers.toYaml(profiles),
StandardCharsets.UTF_8);
} catch (IOException e) {
log.error("Unable to save profiles to {}", PROFILES_SAVE_PATH.getAbsolutePath(), e);
System.exit(1);
}
}
private static Pair<String, String> findCudaInstall() {
Pair<String, String> cudaInstallFromNvcc = findCudaInstallFromNvcc();
if(cudaInstallFromNvcc != null) {
return cudaInstallFromNvcc;
} else {
return findCudaInstallFromNvidiaSmi();
}
}
private static Pair<String, String> findCudaInstallFromNvcc() {
String mainCommand = "nvcc";
try {
String cudaVersion = findCudaVersion(Arrays.asList(mainCommand, "--version"), Pattern.compile("release (.*),"));
if(cudaVersion == null) {
return null;
} else {
return new Pair<>(cudaVersion, findCudaInstallPath(mainCommand, cudaVersion));
}
} catch (Exception exception) {
log.error("Couldn't find cuda version from {} command", mainCommand, exception);
System.exit(1);
return null;
}
}
private static Pair<String, String> findCudaInstallFromNvidiaSmi() {
String mainCommand = "nvidia-smi";
try {
String cudaVersion = findCudaVersion(Collections.singletonList(mainCommand), Pattern.compile("CUDA Version:\b+(.*)\b+"));
if(cudaVersion == null) {
return null;
} else {
return new Pair<>(cudaVersion, findCudaInstallPath(mainCommand, cudaVersion));
}
} catch (Exception exception) {
log.error("Couldn't find cuda version from {} command", mainCommand, exception);
System.exit(1);
return null;
}
}
private static String findCudaVersion(List<String> command, Pattern pattern) throws IOException {
try (BufferedReader bufferedReader = new BufferedReader(
new InputStreamReader(
new ProcessBuilder(command).start().getInputStream()
))) {
String line = bufferedReader.readLine();
while (line != null) {
Matcher matcher = pattern.matcher(line);
if(matcher.find()) {
return matcher.group(1).trim();
}
line = bufferedReader.readLine();
}
}
return null;
}
private static String findCudaInstallPath(String mainCommandName, String cudaVersion) throws IOException {
try (BufferedReader bufferedReader = new BufferedReader(
new InputStreamReader(
new ProcessBuilder(Arrays.asList(SystemUtils.IS_OS_WINDOWS ? "where" : "which", mainCommandName))
.start().getInputStream()
))) {
String line = bufferedReader.readLine();
while (line != null) {
if(line.contains(cudaVersion)) {
// to go back from <cuda_install_path>/bin/nvcc to <cuda_install_path>
File parentFile = new File(line.trim()).getParentFile().getParentFile();
if(parentFile.exists()) {
return parentFile.getAbsolutePath();
}
break;
}
line = bufferedReader.readLine();
}
}
return null;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/command/build
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/command/build/extension/PythonPathsCommand.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.cli.launcher.command.build.extension;
import ai.konduit.serving.pipeline.api.process.ProcessUtils;
import ai.konduit.serving.pipeline.api.python.models.CondaDetails;
import ai.konduit.serving.pipeline.api.python.models.JavaCppDetails;
import ai.konduit.serving.pipeline.api.python.models.PythonConfigType;
import ai.konduit.serving.pipeline.api.python.models.PythonDetails;
import io.vertx.core.cli.annotations.*;
import io.vertx.core.spi.launcher.DefaultCommand;
import lombok.extern.slf4j.Slf4j;
import org.bytedeco.cpython.PyObject;
import org.bytedeco.javacpp.Pointer;
import org.nd4j.python4j.PythonObject;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static ai.konduit.serving.pipeline.api.python.PythonPathUtils.*;
import static org.bytedeco.cpython.global.python.*;
import static org.bytedeco.cpython.helper.python.Py_AddPath;
import static org.bytedeco.cpython.presets.python.cachePackages;
@Name("pythonpaths")
@Summary("A utility command to manage system installed and manually registered python binaries.")
@Description("A utility command to manage system installed and manually registered python binaries. Python binaries " +
"could be from either a regular python install, through a conda environment, javacpp cpython or a virtual " +
"environment through 'venv' package. Each python installation has been assigned a particular id and a " +
"specific type which ultimately identifies which python installation is going to be used with a particular " +
"PythonStep configuration. You can also register a python binary if it's not listed through 'pythonpaths add' " +
"subcommand. \n\n" +
"Example usages:\n" +
"--------------\n" +
"- Lists all the installed and registered python binaries:\n" +
"$ konduit pythonpaths list \n\n" +
"- Lists python installs with their included packages:\n" +
"$ konduit pythonpaths -wip \n\n" +
"- Register a custom python installation:\n" +
"$ konduit pythonpaths add -t=python -p=E:\\python37\\python.exe \n" +
"--------------")
@Slf4j
public class PythonPathsCommand extends DefaultCommand {
private SubCommand subCommand;
private Object type;
private String path;
private boolean withInstalledPackages;
@Argument(index = 0, argName = "sub_command", required = false)
@DefaultValue("LIST")
@Description("Sub command to be used with the pythonpaths command. Sub commands are: [add, list, config]. " +
"Defaults to 'LIST'")
public void setSubCommand(String subCommand) {
try {
this.subCommand = PythonPathsCommand.SubCommand.valueOf(subCommand.toUpperCase());
} catch (Exception e) {
System.out.format("Invalid sub command name: '%s'. Allowed values are: %s -> (case insensitive).",
subCommand, Arrays.toString(PythonPathsCommand.SubCommand.values()));
System.exit(1);
}
}
@Option(shortName = "t", longName = "type", argName = "type", required = true)
@Description("Name of the python type. For the 'add' subcommand, accepted values are: [python, conda, venv]. " +
"For the 'list' subcommand, accepted values are: [all, javacpp, python, conda, venv]. " +
"For 'config' subcommand the accepted values are: [custom, javacpp, python, conda, venv]")
public void setType(String type) {
this.type = type;
}
@Option(shortName = "p", longName = "path", argName = "install_path")
@Description("Absolute path of the python installation. For conda and venv types this refers to the absolute path " +
"of the root installation folder.")
public void setPath(String path) {
this.path = path;
}
@Option(shortName = "wip", longName = "with-installed-packages", flag = true)
@Description("Absolute path of the python installation. For conda and venv types this refers to the absolute path " +
"of the root installation folder.")
public void setPath(boolean withInstalledPackages) {
this.withInstalledPackages = withInstalledPackages;
}
private enum SubCommand {
ADD, LIST, CONFIG
}
public enum ListInstallationType {
ALL, JAVACPP, PYTHON, CONDA, VENV
}
@Override
public void run() {
switch (this.subCommand) {
case ADD:
try {
this.type = PythonType.valueOf(((String) type).toUpperCase());
} catch (Exception e) {
out.format("Invalid type name: '%s'. Allowed values are: %s -> (case insensitive).",
type, Arrays.toString(PythonType.values()));
System.exit(1);
}
break;
case CONFIG:
try {
this.type = PythonConfigType.valueOf(((String) type).toUpperCase());
} catch (Exception e) {
out.format("Invalid type name: '%s'. Allowed values are: %s -> (case insensitive).",
type, Arrays.toString(PythonConfigType.values()));
System.exit(1);
}
break;
case LIST:
try {
this.type = PythonPathsCommand.ListInstallationType.valueOf(((String) type).toUpperCase());
} catch (Exception e) {
out.format("Invalid type name: '%s'. Allowed values are: %s -> (case insensitive).",
type, Arrays.toString(PythonPathsCommand.ListInstallationType.values()));
System.exit(1);
}
break;
default:
out.format("Invalid sub command name: '%s'. Allowed values are: %s -> (case insensitive).",
subCommand, Arrays.toString(PythonPathsCommand.SubCommand.values()));
}
switch (this.subCommand) {
case ADD:
registerInstallation((PythonType) type, path);
break;
case LIST:
listInstallations((ListInstallationType) type, withInstalledPackages);
break;
case CONFIG:
createConfig((PythonConfigType) type);
break;
default:
log.error("Invalid sub command name: {}. Allowed values are: {} -> (case insensitive).",
subCommand, Arrays.toString(PythonPathsCommand.SubCommand.values()));
}
}
private void createConfig(PythonConfigType pythonConfigType) {
throw new UnsupportedOperationException("This will be implemented in a continuation PR");
// add logic here
}
public static void listInstallations(ListInstallationType type, boolean withInstalledPackages) {
switch (type) {
case ALL:
listJavacppInstallations(withInstalledPackages);
listPythonInstallations(withInstalledPackages);
listCondaInstallations(withInstalledPackages);
listVenvInstallations(withInstalledPackages);
break;
case JAVACPP:
listJavacppInstallations(withInstalledPackages);
break;
case PYTHON:
listPythonInstallations(withInstalledPackages);
break;
case CONDA:
listCondaInstallations(withInstalledPackages);
break;
case VENV:
listVenvInstallations(withInstalledPackages);
break;
default:
System.out.format("Invalid installation type name: '%s'. Allowed values are: %s -> (case insensitive).",
type.name(), Arrays.toString(PythonPathsCommand.ListInstallationType.values()));
}
}
private static void listJavacppInstallations(boolean withInstalledPackages) {
JavaCppDetails javaCppDetails = getJavaCppDetails();
System.out.println("\n----------------------------JAVACPP INSTALLS---------------------------");
System.out.print(
formatPythonInstallation(new PythonDetails(javaCppDetails.id(), javaCppDetails.path(), javaCppDetails.version()),
false)
);
if(!withInstalledPackages) {
System.out.println("\n-----------------------------------------------------------------------");
} else {
System.out.println("\t--------Installed Modules--------");
printJavaCppInstalledModules();
System.out.println("\t---------------------------------");
System.out.println("-----------------------------------------------------------------------");
}
}
public static JavaCppDetails getJavaCppDetails() {
try {
Py_AddPath(cachePackages());
Pointer program = Py_DecodeLocale(PythonPathsCommand.class.getSimpleName(), null);
if (program == null) {
System.out.println("Fatal error: cannot get class name");
System.exit(1);
}
Py_SetProgramName(program); /* optional but recommended */
Py_Initialize();
PyObject globals = PyModule_GetDict(PyImport_AddModule("__main__"));
PyRun_StringFlags(
"import os, sys; " +
"executable = os.path.abspath(os.path.join(os.__file__, '..', '..')) + ' (embedded python)'; " +
"version = sys.version.split(' ')[0]",
Py_single_input,
globals,
null,
null);
JavaCppDetails javaCppDetails = new JavaCppDetails("0",
getStringFromPythonObject(PyDict_GetItemString(globals, "executable")),
getStringFromPythonObject(PyDict_GetItemString(globals, "version")) + System.lineSeparator());
PyMem_RawFree(program);
if (Py_FinalizeEx() < 0) {
System.exit(120);
}
return javaCppDetails;
} catch (IOException e) {
System.out.println(e.getMessage());
System.exit(1);
return null;
}
}
private static void listPythonInstallations(boolean withInstalledPackages) {
System.out.println("\n----------------------------PYTHON INSTALLS----------------------------");
System.out.print(
findPythonInstallations().stream()
.map(pythonDetails -> formatPythonInstallation(pythonDetails, withInstalledPackages))
.collect(Collectors.joining(System.lineSeparator()))
);
System.out.println("-----------------------------------------------------------------------");
}
private static void listCondaInstallations(boolean withInstalledPackages) {
System.out.println("\n----------------------------CONDA INSTALLS-----------------------------");
System.out.print(
findCondaInstallations().stream()
.map(condaDetails -> formatCondaInstallation(condaDetails, withInstalledPackages))
.collect(Collectors.joining(System.lineSeparator()))
);
System.out.println("-----------------------------------------------------------------------");
}
private static String formatPythonInstallation(PythonDetails pythonDetails, boolean withInstalledPackages) {
return formatPythonInstallation(pythonDetails, 1, withInstalledPackages);
}
private static String formatPythonInstallation(PythonDetails pythonDetails, int numberOfTabs, boolean withInstalledPackages) {
String tabs = IntStream.range(0, numberOfTabs).mapToObj(index -> "\t").collect(Collectors.joining(""));
return String.format(" -%s%s: %s%n%spath: %s%n%sversion: %s%s",
"\t",
numberOfTabs > 1 ? "name" : "id",
pythonDetails.id(),
tabs,
pythonDetails.path(),
tabs,
pythonDetails.version(),
withInstalledPackages ?
String.format("%s--------Installed Modules--------%n%s%n%s---------------------------------%n",
tabs,
Arrays.stream(ProcessUtils.runAndGetOutput(pythonDetails.path(), "-c", "from pip import _internal; _internal.main(['list'])")
.split(System.lineSeparator()))
.map(line -> String.format("%s- %s", tabs, line))
.collect(Collectors.joining(System.lineSeparator())),
tabs) :
"");
}
private static void printJavaCppInstalledModules() {
try {
Py_AddPath(cachePackages());
Pointer program = Py_DecodeLocale(PythonPathsCommand.class.getSimpleName(), null);
if (program == null) {
System.out.println("Fatal error: cannot get class name");
System.exit(1);
}
Py_SetProgramName(program); /* optional but recommended */
Py_Initialize();
PyRun_SimpleStringFlags(
"from pip import _internal\n" +
"import warnings\n" +
"warnings.filterwarnings(action='ignore')\n" +
"class writer :\n" +
" def __init__(self, *writers) :\n" +
" self.writers = writers\n" +
"\n" +
" def write(self, text) :\n" +
" for w in self.writers :\n" +
" w.write('\t- ' + text)\n" +
"\n" +
" def flush(self):\n" +
" pass\n" +
"import sys\n" +
"sys.stdout = writer(sys.stdout)\n" +
"installed_modules = _internal.main(['list'])",
null);
if (Py_FinalizeEx() < 0) {
System.exit(120);
}
PyMem_RawFree(program);
} catch (IOException e) {
System.out.println(e.getMessage());
System.exit(1);
}
}
private static String formatCondaInstallation(CondaDetails condaDetails, boolean withInstalledPackages) {
List<String> formattedCondaEnvironments = new ArrayList<>();
condaDetails.environments().forEach(pythonDetails ->
formattedCondaEnvironments.add(formatPythonInstallation(pythonDetails, 2, withInstalledPackages)));
return String.format(" -\tid: %s%n\tpath: %s%n\tversion: %s%s",
condaDetails.id(),
condaDetails.path(),
condaDetails.version(),
String.format(
"\t--------------------------ENVIRONMENTS-------------------------%n" +
"\t%s" +
"\t---------------------------------------------------------------%n",
String.join(System.lineSeparator() + "\t", formattedCondaEnvironments)
)
);
}
private static void listVenvInstallations(boolean withInstalledPackages) {
System.out.println("\n-----------------------------VENV INSTALLS-----------------------------");
System.out.print(
findVenvInstallations().stream()
.map(venvDetails -> formatPythonInstallation(new PythonDetails(venvDetails.id(),
venvDetails.path(),
venvDetails.version()),
withInstalledPackages))
.collect(Collectors.joining(System.lineSeparator()))
);
System.out.println("-----------------------------------------------------------------------");
}
private static String getStringFromPythonObject(PyObject pythonObject) {
PyObject pythonEncodedString = PyUnicode_AsEncodedString(pythonObject, "utf-8", "~E~");
String javaString = PyBytes_AsString(pythonEncodedString).getString();
Py_DecRef(pythonEncodedString);
return javaString;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/command/build
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/command/build/extension/ServeBuildCommand.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.cli.launcher.command.build.extension;
import ai.konduit.serving.build.cli.BuildCLI;
import ai.konduit.serving.cli.launcher.command.ServeCommand;
import ai.konduit.serving.cli.launcher.command.build.extension.model.Profile;
import ai.konduit.serving.pipeline.util.ObjectMappers;
import ai.konduit.serving.pipeline.settings.DirectoryFetcher;
import io.vertx.core.cli.annotations.Description;
import io.vertx.core.cli.annotations.Name;
import io.vertx.core.cli.annotations.Option;
import io.vertx.core.cli.annotations.Summary;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.nd4j.shade.guava.base.Strings;
import org.nd4j.shade.jackson.databind.JsonNode;
import org.nd4j.shade.jackson.databind.node.JsonNodeFactory;
import org.nd4j.shade.jackson.databind.node.ObjectNode;
import org.nd4j.shade.jackson.databind.node.TextNode;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
@Name(value = "serve", priority = 1)
@Summary("Start a konduit server application")
@Description("Start a konduit server application. " +
"The application is identified with an id that can be set using the `--serving-id` or `-id` option. " +
"The application can be stopped with the `stop` command. " +
"This command takes the `run` command parameters. To see the " +
"run command parameters, execute `run --help`\n\n" +
"Example usages:\n" +
"--------------\n" +
"- Starts a server in the foreground with an id of 'inf_server' using 'config.json' as configuration file:\n" +
"$ konduit serve -id inf_server -c config.json\n\n" +
"- Starts a server in the foreground with an id of 'inf_server' using 'config.json' as configuration file and CPU profile:\n" +
"$ konduit serve -id inf_server -c config.json -p CPU\n\n" +
"- Starts a server in the background with an id of 'inf_server' using 'config.yaml' as configuration file:\n" +
"$ konduit serve -id inf_server -c config.yaml -b\n" +
"--------------")
@Slf4j
public class ServeBuildCommand extends ServeCommand {
private String profileName;
private List<String> additionalDependencies;
private boolean runWithoutManifestJar;
@Option(shortName = "p", longName = "profileName", argName = "profile_name")
@Description("Name of the profile to be used with the server launch.")
public void setProfileName(String profileName) {
this.profileName = profileName;
}
@Option(shortName = "ad", longName = "addDep", argName = "additional_dependencies")
@Description("Additional dependencies to include with the launch")
public void setAdditionalDependencies(List<String> additionalDependencies) {
this.additionalDependencies = additionalDependencies;
}
@Option(shortName = "rwm", longName = "runWithoutManifest", argName = "run_without_manifest", flag = true)
@Description("Do not create the manifest jar file before launching the server.")
public void setRunWithoutManifestJar(boolean runWithoutManifestJar) {
this.runWithoutManifestJar = runWithoutManifestJar;
}
@Override
public void run() {
File profileRootDir = new File(DirectoryFetcher.getBuildDir(), getId());
if ((!profileRootDir.exists() || !profileRootDir.isDirectory()) && !profileRootDir.mkdir()) {
log.error("Unable to create build directory for path: {}.", profileRootDir.getAbsolutePath());
System.exit(1);
}
File savePath = new File(profileRootDir, "pipeline.json");
File mfJar = new File(profileRootDir, "manifest.jar");
try {
JsonNode jsonConfiguration = getConfigurationFromFileOrString(configuration);
if (jsonConfiguration == null) {
out.format("Invalid JSON/YAML configuration or invalid configuration file path defined by: %n%s", configuration);
System.exit(1);
} else {
if (false) { // todo: set this to !runWithoutManifestJar after the build command is working successfully with profiles
if (!(jsonConfiguration.has("host") || jsonConfiguration.has("port") ||
jsonConfiguration.has("pipeline"))) {
// Assume that it's a json for a konduit serving pipeline and not a complete inference configuration
jsonConfiguration = new ObjectNode(JsonNodeFactory.instance)
.put("host", this.host)
.put("port", this.port)
.set("pipeline", jsonConfiguration.deepCopy());
}
Object pipeline = jsonConfiguration.get("pipeline");
if (pipeline == null) {
out.format("Invalid JSON/YAML configuration or invalid configuration file path defined by: %n%s", configuration);
System.exit(1);
}
FileUtils.writeStringToFile(savePath, pipeline.toString(), StandardCharsets.UTF_8);
Profile profile = profileName != null ? ProfileCommand.getProfile(profileName) : ProfileCommand.getDefaultProfile();
if (profile == null) {
if (profileName == null) {
out.println("Couldn't find a default profile.");
} else {
out.format("Couldn't find a profile with the specified name: '%s'.%n", profileName);
}
System.exit(1);
}
// todo: add logic here for overriding python paths variable through profiles (kept for a separate PR).
List<String> args = new ArrayList<>();
args.add("-p");
args.add(savePath.getAbsolutePath());
args.add("-c");
args.add(String.format("classpath.outputFile=%s", mfJar.getAbsolutePath()));
args.add("classpath.type=JAR_MANIFEST");
args.add("-dt");
args.add("CLASSPATH");
args.add("-d");
args.add(profile.computeDevice());
args.add("-a");
args.add(profile.cpuArchitecture());
args.add("-o");
args.add(profile.operatingSystem());
if (profile.serverTypes() != null) {
for (String serverType : profile.serverTypes()) {
args.add("-s");
args.add(serverType);
}
}
List<String> additionalDeps = null;
if (profile.additionalDependencies() != null && !profile.additionalDependencies().isEmpty()) {
additionalDeps = new ArrayList<>(profile.additionalDependencies());
}
if (this.additionalDependencies != null && !this.additionalDependencies.isEmpty()) {
additionalDeps = new ArrayList<>();
additionalDeps.addAll(this.additionalDependencies);
}
if (additionalDeps != null) {
for (String ad : additionalDeps) {
args.add("-ad");
args.add(ad);
}
}
// Issue: https://github.com/KonduitAI/konduit-serving/issues/437
BuildCLI.main(args.toArray(new String[0])); //TODO we could just call build tool directly instead of via CLI (more robust to refactoring, compile time args checking etc)
if (Strings.isNullOrEmpty(this.classpath)) {
this.classpath = mfJar.getAbsolutePath();
} else {
this.classpath += File.pathSeparator + mfJar.getAbsolutePath();
}
}
}
} catch (IOException e) {
log.error("Unable to write build pipeline data to {}.", savePath.getAbsolutePath(), e);
System.exit(1);
} catch (Exception e) {
log.error("Unable to build classpath manifest jar for the given pipeline and profile.", e);
System.exit(1);
}
super.run();
}
/**
* Takes a file path to a valid JSON/YAML or a JSON String and parses it into {@link JsonNode}
*
* @param jsonOrYamlFileOrString JSON/YAML file or a valid JSON String
* @return {@link JsonNode} that was parsed
*/
protected JsonNode getConfigurationFromFileOrString(String jsonOrYamlFileOrString) {
if (jsonOrYamlFileOrString != null) {
try (Scanner scanner = new Scanner(new File(jsonOrYamlFileOrString), "UTF-8").useDelimiter("\\A")) {
return readConfiguration(scanner.next());
} catch (FileNotFoundException e) {
return readConfiguration(jsonOrYamlFileOrString);
}
} else {
return null;
}
}
/**
* Parse the given configuration yaml/json string to {@link JsonNode}.
*
* @param configurationString given configuration string. Can be a JSON/YAML string
* @return Read configuration to {@link JsonNode}. Returns null on failure.
*/
private JsonNode readConfiguration(String configurationString) {
try {
return ObjectMappers.json().readTree(configurationString);
} catch (Exception jsonProcessingErrors) {
try {
JsonNode jsonNode = ObjectMappers.yaml().readTree(configurationString);
if (jsonNode instanceof TextNode) {
throw new FileNotFoundException("File does not exist at path: " + configurationString);
} else {
return jsonNode;
}
} catch (Exception yamlProcessingErrors) {
log.error("Given configuration: '{}' does not contain a valid JSON/YAML object", configurationString);
log.error("\n\nErrors while processing as a json string:", jsonProcessingErrors);
log.error("\n\nErrors while processing as a yaml string:", yamlProcessingErrors);
return null;
}
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/command/build/extension
|
java-sources/ai/konduit/serving/konduit-serving-cli/0.3.0/ai/konduit/serving/cli/launcher/command/build/extension/model/Profile.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.cli.launcher.command.build.extension.model;
import ai.konduit.serving.pipeline.api.python.models.AppendType;
import ai.konduit.serving.pipeline.api.python.models.PythonConfigType;
import ai.konduit.serving.vertx.config.ServerProtocol;
import lombok.EqualsAndHashCode;
import lombok.ToString;
import org.apache.commons.lang3.SystemUtils;
import org.nd4j.shade.jackson.annotation.JsonGetter;
import org.nd4j.shade.jackson.annotation.JsonInclude;
import org.nd4j.shade.jackson.annotation.JsonSetter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
@EqualsAndHashCode
@ToString
@JsonInclude(JsonInclude.Include.NON_NULL)
public class Profile {
private static final List<String> validComputeDevices = Arrays.asList("CPU", "CUDA_10.0", "CUDA_10.1", "CUDA_10.2");
public enum CpuArchitecture {
x86, x86_64, x86_avx2, x86_64_avx2, x86_avx512, x86_64_avx512, armhf, arm64, ppc64le;
public static CpuArchitecture forName(String s) {
switch (s.toLowerCase()) {
case "x86":
return CpuArchitecture.x86;
case "x86_64":
return CpuArchitecture.x86_64;
case "x86_avx2":
return CpuArchitecture.x86_avx2;
case "x86_64-avx2":
case "x86_64_avx2":
return CpuArchitecture.x86_64_avx2;
case "x86_avx512":
return CpuArchitecture.x86_avx512;
case "x86_64-avx512":
case "x86_64_avx512":
return CpuArchitecture.x86_64_avx512;
case "arm64":
return CpuArchitecture.arm64;
case "armhf":
return CpuArchitecture.armhf;
case "ppc64le":
return CpuArchitecture.ppc64le;
default:
return null;
}
}
}
public enum OperatingSystem {
LINUX, WINDOWS, MAC;
public static OperatingSystem forName(String s) {
if ("MAC".equalsIgnoreCase(s) || "OSX".equalsIgnoreCase(s)) {
return MAC;
}
return valueOf(s.toUpperCase());
}
}
private String computeDevice;
private CpuArchitecture cpuArchitecture;
private OperatingSystem operatingSystem;
private List<ServerProtocol> serverTypes;
private List<String> additionalDependencies;
private PythonConfigType pythonConfigType;
private String pythonPath;
private String environmentName;
private AppendType appendType;
public Profile() {
this.computeDevice = "CPU";
this.cpuArchitecture = CpuArchitecture.x86_avx2;
this.operatingSystem = getCurrentOS();
this.serverTypes = Arrays.asList(ServerProtocol.HTTP, ServerProtocol.GRPC);
this.additionalDependencies = new ArrayList<>();
this.pythonConfigType = PythonConfigType.CONDA;
this.pythonPath = "1";
this.environmentName = "base";
this.appendType = AppendType.BEFORE;
}
public Profile(String computeDevice, String cpuArchitecture, String operatingSystem, List<String> serverTypes,
List<String> additionalDependencies, String pythonConfigType, String pythonPath,
String environmentName, String appendType) {
computeDevice(computeDevice);
cpuArchitecture(cpuArchitecture);
operatingSystem(operatingSystem);
serverTypes(serverTypes);
additionalDependencies(additionalDependencies);
pythonConfigType(pythonConfigType);
pythonPath(pythonPath);
environmentName(environmentName);
appendType(appendType);
}
@JsonSetter("computeDevice")
public Profile computeDevice(String computeDevice) {
if (validComputeDevices.contains(computeDevice)) {
this.computeDevice = computeDevice;
} else {
throw new UnsupportedOperationException("Invalid, unknown, not supported or not yet implemented device type: " + computeDevice +
". Valid values are: " + validComputeDevices);
}
return this;
}
@JsonSetter("operatingSystem")
public Profile operatingSystem(String operatingSystem) {
this.operatingSystem = OperatingSystem.forName(operatingSystem);
return this;
}
@JsonSetter("cpuArchitecture")
public Profile cpuArchitecture(String cpuArchitecture) {
this.cpuArchitecture = CpuArchitecture.forName(cpuArchitecture);
return this;
}
@JsonSetter("serverTypes")
public Profile serverTypes(List<String> serverTypes) {
this.serverTypes = serverTypes != null ?
serverTypes.stream().map(serverType -> {
try {
return ServerProtocol.valueOf(serverType);
} catch (Exception e) {
System.out.format("Invalid server type: '%s'. Allowed values are: %s -> (case insensitive).",
serverType, Arrays.toString(ServerProtocol.values()));
System.exit(1);
return null;
}
}).collect(Collectors.toList()) :
Arrays.asList(ServerProtocol.HTTP, ServerProtocol.GRPC);
return this;
}
@JsonSetter("additionalDependencies")
public Profile additionalDependencies(List<String> additionalDependencies) {
if(additionalDependencies != null) {
for (String additionalDependency : additionalDependencies) {
String[] split = additionalDependency.split(":");
if (split.length != 3 && split.length != 4) {
throw new IllegalStateException("Invalid additionalDependency setting: Dependencies must " +
"be specified in \"group_id:artifact_id:version\" or \"group_id:artifact_id:version:classifier\" format. Got " + additionalDependency);
}
}
}
this.additionalDependencies = additionalDependencies;
return this;
}
@JsonSetter("pythonConfigType")
public Profile pythonConfigType(String pythonConfigType) {
try {
this.pythonConfigType = PythonConfigType.valueOf(pythonConfigType.toUpperCase());
} catch (Exception e) {
System.out.format("Invalid python config type: '%s'. Allowed values are: %s -> (case insensitive).",
pythonConfigType, Arrays.toString(PythonConfigType.values()));
System.exit(1);
return null;
}
return this;
}
@JsonSetter("pythonPath")
public Profile pythonPath(String pythonPath) {
this.pythonPath = pythonPath;
return this;
}
@JsonSetter("environmentName")
public Profile environmentName(String environmentName) {
this.environmentName = environmentName;
return this;
}
@JsonSetter("appendType")
public Profile appendType(String appendType) {
try {
this.appendType = AppendType.valueOf(appendType.toUpperCase());
} catch (Exception e) {
System.out.format("Invalid python append type: '%s'. Allowed values are: %s -> (case insensitive).",
appendType, Arrays.toString(AppendType.values()));
System.exit(1);
return null;
}
return this;
}
@JsonGetter("computeDevice")
public String computeDevice() {
return this.computeDevice;
}
@JsonGetter("cpuArchitecture")
public String cpuArchitecture() {
return this.cpuArchitecture.name();
}
@JsonGetter("operatingSystem")
public String operatingSystem() {
return this.operatingSystem.name();
}
@JsonGetter("serverTypes")
public List<String> serverTypes() {
return this.serverTypes.stream().map(ServerProtocol::name).collect(Collectors.toList());
}
@JsonGetter("additionalDependencies")
public List<String> additionalDependencies() {
return this.additionalDependencies;
}
@JsonGetter("pythonConfigType")
public String pythonConfigType() {
return this.pythonConfigType.name();
}
@JsonGetter("pythonPath")
public String pythonPath() {
return this.pythonPath;
}
@JsonGetter("environmentName")
public String environmentName() {
return this.environmentName;
}
@JsonGetter("appendType")
public String appendType() {
return this.appendType.name();
}
public static OperatingSystem getCurrentOS() {
if (SystemUtils.IS_OS_WINDOWS) {
return OperatingSystem.WINDOWS;
} else if (SystemUtils.IS_OS_LINUX) {
return OperatingSystem.LINUX;
} else if (SystemUtils.IS_OS_MAC) {
return OperatingSystem.MAC;
} else { // todo: find other operating systems if valid.
throw new IllegalStateException("Invalid operating system specified. Should be one of: " + Arrays.asList(OperatingSystem.values()));
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-clients/0.3.0/ai/konduit/serving/clients
|
java-sources/ai/konduit/serving/konduit-serving-clients/0.3.0/ai/konduit/serving/clients/generators/GenerateRestClients.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.clients.generators;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import io.swagger.codegen.v3.ClientOptInput;
import io.swagger.codegen.v3.ClientOpts;
import io.swagger.codegen.v3.CodegenConstants;
import io.swagger.codegen.v3.DefaultGenerator;
import io.swagger.codegen.v3.generators.java.JavaClientCodegen;
import io.swagger.codegen.v3.generators.python.PythonClientCodegen;
import io.swagger.v3.core.converter.ModelConverters;
import io.swagger.v3.core.util.Json;
import io.swagger.v3.core.util.Yaml;
import io.swagger.v3.oas.models.ExternalDocumentation;
import io.swagger.v3.oas.models.OpenAPI;
import io.swagger.v3.oas.models.Operation;
import io.swagger.v3.oas.models.PathItem;
import io.swagger.v3.oas.models.info.Contact;
import io.swagger.v3.oas.models.info.Info;
import io.swagger.v3.oas.models.info.License;
import io.swagger.v3.oas.models.media.*;
import io.swagger.v3.oas.models.parameters.RequestBody;
import io.swagger.v3.oas.models.responses.ApiResponse;
import io.swagger.v3.oas.models.responses.ApiResponses;
import io.swagger.v3.oas.models.tags.Tag;
import javassist.CannotCompileException;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.NotFoundException;
import javassist.bytecode.AnnotationsAttribute;
import javassist.bytecode.ClassFile;
import javassist.bytecode.ConstPool;
import javassist.bytecode.annotation.*;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.nd4j.common.io.ClassPathResource;
import org.nd4j.common.primitives.Pair;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.stream.Collectors;
import static io.netty.handler.codec.http.HttpHeaderValues.APPLICATION_JSON;
import static io.netty.handler.codec.http.HttpHeaderValues.APPLICATION_OCTET_STREAM;
@Slf4j
public class GenerateRestClients {
public static void main(String[] args) throws NotFoundException, IOException {
System.out.println("Classpath: " + System.getProperty("java.class.path"));
// Setting this so that the Json Serializer is able see private fields without standard getter methods.
Json.mapper()
.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY)
.setVisibility(PropertyAccessor.CREATOR, JsonAutoDetect.Visibility.ANY);
Map<String, List<Pair<String, String>>> mappings = getJsonNameMappings();
mappings.put("ai.konduit.serving.endpoint.Endpoint",
Collections.singletonList(new Pair<>(null, "ai.konduit.serving.endpoint.AssetServingEndpoint"))
);
mappings.put("ai.konduit.serving.pipeline.api.data.BoundingBox",
Arrays.asList(new Pair<>(null, "ai.konduit.serving.pipeline.impl.data.box.BBoxCHW"),
new Pair<>(null, "ai.konduit.serving.pipeline.impl.data.box.BBoxXY"))
);
mappings.put("ai.konduit.serving.pipeline.api.pipeline.Pipeline",
Arrays.asList(new Pair<>(null, "ai.konduit.serving.pipeline.impl.pipeline.SequencePipeline"),
new Pair<>(null, "ai.konduit.serving.pipeline.impl.pipeline.GraphPipeline"))
);
ClassPool classPool = ClassPool.getDefault();
OpenAPI openAPI = new OpenAPI();
createApiInfo(openAPI);
List<CtClass> annotatedClasses = createAnnotatedClasses(classPool, mappings);
annotatedClasses.add(classPool.get("ai.konduit.serving.vertx.config.InferenceConfiguration"));
annotatedClasses.add(classPool.get("ai.konduit.serving.vertx.protocols.http.api.ErrorResponse"));
// This has to be done in order otherwise we'll have duplicated classes compilation error.
// This is to make sure that the classes referenced in the later iterations are defined in the previous one.
try {
addSchemas(openAPI, annotatedClasses.get(findIndex(annotatedClasses, "ai.konduit.serving.endpoint.Endpoint")).toClass());
addSchemas(openAPI, annotatedClasses.get(findIndex(annotatedClasses, "ai.konduit.serving.pipeline.api.data.BoundingBox")).toClass());
addSchemas(openAPI, annotatedClasses.get(findIndex(annotatedClasses, "ai.konduit.serving.pipeline.api.step.PipelineStep")).toClass());
addSchemas(openAPI, annotatedClasses.get(findIndex(annotatedClasses, "ai.konduit.serving.pipeline.impl.pipeline.graph.SwitchFn")).toClass());
addSchemas(openAPI, annotatedClasses.get(findIndex(annotatedClasses, "ai.konduit.serving.pipeline.impl.pipeline.graph.GraphStep")).toClass());
addSchemas(openAPI, annotatedClasses.get(findIndex(annotatedClasses, "ai.konduit.serving.pipeline.api.pipeline.Pipeline")).toClass());
addSchemas(openAPI, annotatedClasses.get(findIndex(annotatedClasses, "ai.konduit.serving.vertx.config.InferenceConfiguration")).toClass());
addSchemas(openAPI, annotatedClasses.get(findIndex(annotatedClasses, "ai.konduit.serving.vertx.protocols.http.api.ErrorResponse")).toClass());
} catch (CannotCompileException e) {
log.error("Error while adding schema classes to OpenApi specs", e);
System.exit(1);
}
log.info("Generated open api spec is: \n{}\n", Yaml.pretty(openAPI));
generateClients(openAPI);
}
private static void generateClients(OpenAPI openAPI) throws IOException {
String clientsSavePath = System.getProperty("konduit.generator.clients.directory");
File clientsDirectory = new File(clientsSavePath == null ? "clients" : clientsSavePath);
log.info("Generating clients at: {}", clientsDirectory.getAbsolutePath());
try {
if (clientsDirectory.exists() && clientsDirectory.isDirectory())
FileUtils.deleteDirectory(clientsDirectory);
} catch (IOException exception) {
log.error("Unable to clean 'clients' directory at {}", clientsDirectory.getAbsolutePath(), exception);
System.exit(1);
}
DefaultGenerator defaultGenerator = new DefaultGenerator();
JavaClientCodegen javaClientCodegen = new JavaClientCodegen();
javaClientCodegen.setOutputDir(new File(clientsDirectory, "java").getAbsolutePath());
javaClientCodegen.setModelPackage("ai.konduit.serving.client.java.models");
javaClientCodegen.setInvokerPackage("ai.konduit.serving.client.java.invoker");
javaClientCodegen.setApiPackage("ai.konduit.serving.client.java");
javaClientCodegen.setGroupId("ai.konduit.serving");
javaClientCodegen.setArtifactId("konduit-serving-client");
javaClientCodegen.setArtifactVersion("0.1.0-SNAPSHOT");
javaClientCodegen.setTemplateDir("konduit-client-templates/Java");
List<File> generatedJavaClientFiles = defaultGenerator
.opts(new ClientOptInput()
.openAPI(openAPI)
.config(javaClientCodegen)
.opts(new ClientOpts()))
.generate();
PythonClientCodegen pythonClientCodegen = new PythonClientCodegen();
pythonClientCodegen.setOutputDir(new File(clientsDirectory, "python").getAbsolutePath());
pythonClientCodegen.setTemplateDir("konduit-client-templates/python");
ClientOpts pythonClientOpts = new ClientOpts();
pythonClientOpts.getProperties().put(CodegenConstants.PACKAGE_NAME, "konduit");
pythonClientOpts.getProperties().put(CodegenConstants.PACKAGE_VERSION, "0.2.0"); // new version after already available "konduit" version on PyPi (which is 0.1.10) - https://pypi.org/project/konduit/0.1.10/
List<File> generatedPythonClientFiles = defaultGenerator
.opts(new ClientOptInput()
.openAPI(openAPI)
.config(pythonClientCodegen)
.opts(pythonClientOpts))
.generate();
findAndReplaceCharacters(generatedJavaClientFiles);
findAndReplaceCharacters(generatedPythonClientFiles);
}
private static void findAndReplaceCharacters(List<File> generatedFiles) throws IOException {
log.info("\n\nReplacing new line characters in the generated files: ");
for(File file : generatedFiles) {
if(file.getAbsolutePath().endsWith(".md") || file.getAbsolutePath().endsWith(".java")) {
replace(file, "<br>", "<br>");
}
if(file.getAbsolutePath().endsWith(".md")) {
replace(file, """, "\"");
replace(file, "<", "<");
replace(file, ">", ">");
}
if(file.getAbsolutePath().endsWith(".py")) {
replace(file, "<br>", "\n\t\t");
}
}
}
private static String escape(String input) {
return input.replace("\"", "\\\"")
.replace("\n", "\\n")
.replace("\t", "\\t");
}
private static void replace(File file, String target, String replacement) throws IOException {
replace(file, target, replacement, true);
}
private static void replace(File file, String target, String replacement, boolean showMessage) throws IOException {
FileUtils.writeStringToFile(file,
FileUtils.readFileToString(file, StandardCharsets.UTF_8).replace(target, replacement),
StandardCharsets.UTF_8);
if(showMessage) {
log.info("Replaced {} to {} in {}", escape(target), escape(replacement), file.getAbsolutePath());
}
}
private static int findIndex(List<CtClass> array, String className) {
for(int i = 0; i < array.size(); i++) {
if(array.get(i).getName().equals(className)) {
return i;
}
}
return -1;
}
private static Map<String, List<Pair<String, String>>> getJsonNameMappings() throws IOException {
String resourcePath = "META-INF/konduit-serving/JsonNameMapping";
try(BufferedReader bufferedReader = new BufferedReader(new FileReader(new ClassPathResource(resourcePath).getFile()))) {
Map<String, List<Pair<String, String>>> mappings = new LinkedHashMap<>();
while (true) {
String line = bufferedReader.readLine();
if(line == null) {
break;
} else {
line = line.trim();
}
String[] splits = line.split(",");
if(splits.length > 2) {
String key = splits[2]; // Super class
Pair<String, String> value = new Pair<>(splits[0], splits[1]); // (Type, sub type class)
if(mappings.containsKey(key)) {
mappings.get(key).add(value);
} else {
mappings.put(key, new ArrayList<>(Collections.singleton(value)));
}
}
}
return mappings;
} catch (FileNotFoundException exception) {
log.error("Couldn't find file: {}. Installing 'konduit-serving-meta' module might fix this.", resourcePath);
System.exit(1);
}
return null;
}
private static List<CtClass> createAnnotatedClasses(ClassPool classPool, Map<String, List<Pair<String, String>>> mappings) {
return mappings.entrySet().stream().map(
entry -> {
String superClass = entry.getKey();
List<Pair<String, String>> jsonNamesAndClasses = entry.getValue();
CtClass ctClass;
try {
ctClass = classPool.get(superClass);
} catch (NotFoundException e) {
log.error("Couldn't create annotated classes from the given inputs", e);
System.exit(1);
return null;
}
ClassFile classFile = ctClass.getClassFile();
ConstPool constPool = classFile.getConstPool();
AnnotationsAttribute annotationsAttribute = new AnnotationsAttribute(constPool, AnnotationsAttribute.visibleTag);
Annotation annotation = new Annotation("io.swagger.v3.oas.annotations.media.Schema", constPool);
ArrayMemberValue arrayMemberValue = new ArrayMemberValue(constPool);
arrayMemberValue.setValue(jsonNamesAndClasses.stream()
.map(jsonNameAndClass -> new ClassMemberValue(jsonNameAndClass.getValue(), constPool)).toArray(ClassMemberValue[]::new)
);
// Add discriminator and their mappings for polymorphism if their json type names aren't null
if(jsonNamesAndClasses.get(0).getKey() != null) {
annotation.addMemberValue("discriminatorProperty", new StringMemberValue("@type", constPool));
ArrayMemberValue discriminatorMappingArray = new ArrayMemberValue(constPool);
discriminatorMappingArray.setValue(jsonNamesAndClasses.stream()
.map(jsonNameAndClass -> {
Annotation discriminatorMappingAnnotation = new Annotation("io.swagger.v3.oas.annotations.media.DiscriminatorMapping", constPool);
discriminatorMappingAnnotation.addMemberValue("value", new StringMemberValue(jsonNameAndClass.getKey(), constPool));
discriminatorMappingAnnotation.addMemberValue("schema", new ClassMemberValue(jsonNameAndClass.getValue(), constPool));
return new AnnotationMemberValue(discriminatorMappingAnnotation, constPool);
}).toArray(AnnotationMemberValue[]::new)
);
annotation.addMemberValue("discriminatorMapping", discriminatorMappingArray);
}
// Ignore the graph builder for GraphStep
if(superClass.equals("ai.konduit.serving.pipeline.impl.pipeline.graph.GraphStep")) {
Annotation jsonIgnorePropertyAnnotation = new Annotation("com.fasterxml.jackson.annotation.JsonIgnoreProperties", constPool);
ArrayMemberValue ignoredPropertiesValue = new ArrayMemberValue(constPool);
ignoredPropertiesValue.setValue(new StringMemberValue[] { new StringMemberValue("builder", constPool) });
jsonIgnorePropertyAnnotation.addMemberValue("value", ignoredPropertiesValue);
annotationsAttribute.addAnnotation(jsonIgnorePropertyAnnotation);
}
annotation.addMemberValue("subTypes", arrayMemberValue);
annotationsAttribute.addAnnotation(annotation);
ctClass.getClassFile().addAttribute(annotationsAttribute);
return ctClass;
}
).collect(Collectors.toList());
}
private static void addSchemas(OpenAPI openAPI, Class<?> clazz) {
ModelConverters.getInstance().readAll(clazz).forEach(openAPI::schema);
}
private static void createApiInfo(OpenAPI openAPI) {
try (InputStream is = GenerateRestClients.class.getClassLoader().getResourceAsStream("META-INF/konduit-serving-clients-git.properties")) {
if (is == null) {
throw new IllegalStateException("Cannot find konduit-serving-clients-git.properties on classpath");
}
Properties gitProperties = new Properties();
gitProperties.load(is);
String projectVersion = gitProperties.getProperty("git.build.version");
String commitId = gitProperties.getProperty("git.commit.id").substring(0, 8);
openAPI.info(new Info()
.title("Konduit Serving REST API")
.version(String.format("%s | Commit: %s", projectVersion, commitId))
.description("RESTful API for various operations inside konduit-serving")
.license(new License()
.name("Apache 2.0")
.url("https://github.com/KonduitAI/konduit-serving/blob/master/LICENSE"))
.contact(new Contact()
.url("https://konduit.ai/contact")
.name("Konduit K.K.")
.email("hello@konduit.ai")))
.tags(Collections.singletonList(
new Tag()
.name("inference")
.description("Inference server operations")))
.externalDocs(new ExternalDocumentation()
.description("Online documentation")
.url("https://serving.konduit.ai"))
.path("/predict", new PathItem()
.summary("Predicts an output based on the given JSON (key/value) or binary string")
.description("Takes a JSON string of key value pairs or a binary data string (protobuf) as input " +
"and processes it in the pipeline. The output could be json or a binary string based on " +
"the accept header value (application/json or application/octet-stream respectively).")
.post(new Operation()
.operationId("predict")
.addTagsItem("inference")
.requestBody(new RequestBody()
.required(true)
.content(new Content()
.addMediaType(APPLICATION_JSON.toString(),
new MediaType().schema(new MapSchema()))
.addMediaType(APPLICATION_OCTET_STREAM.toString(),
new MediaType().schema(new BinarySchema()))
)
).responses(new ApiResponses()
.addApiResponse("200", new ApiResponse()
.description("Successful operation")
.content(new Content()
.addMediaType(APPLICATION_JSON.toString(),
new MediaType().schema(new MapSchema()))
.addMediaType(APPLICATION_OCTET_STREAM.toString(),
new MediaType().schema(new BinarySchema()))
)
).addApiResponse("500", new ApiResponse()
.description("Internal server error")
.content(new Content()
.addMediaType(APPLICATION_JSON.toString(), new MediaType()
.schema(new ObjectSchema().$ref("#/components/schemas/ErrorResponse"))
)
)
)
)
)
);
} catch (IOException e) {
throw new IllegalStateException(e.getMessage());
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-common-tests/0.3.0/ai/konduit/serving/common
|
java-sources/ai/konduit/serving/konduit-serving-common-tests/0.3.0/ai/konduit/serving/common/test/BaseJsonCoverageTest.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.common.test;
import lombok.extern.slf4j.Slf4j;
import org.junit.AfterClass;
import org.junit.Before;
import org.reflections.Reflections;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
@Slf4j
public abstract class BaseJsonCoverageTest {
protected static Set<Class<?>> allClasses;
protected static Set<Class<?>> seen;
public abstract String getPackageName();
public abstract Object fromJson(Class<?> c, String json);
public abstract Object fromYaml(Class<?> c, String yaml);
@Before
public void before() throws Exception {
if(allClasses == null) {
//Perform initialization only once
//Collect all classes implementing TextConfig interface (i.e., has JSON and YAML conversion support)
allClasses = new LinkedHashSet<>();
seen = new LinkedHashSet<>();
Reflections reflections = new Reflections(getPackageName());
Class<Object> tcClass = (Class<Object>) Class.forName("ai.konduit.serving.pipeline.api.TextConfig");
Set<Class<?>> subTypes = reflections.getSubTypesOf(tcClass);
System.out.println(String.format("All subtypes of %s:", tcClass.getCanonicalName()));
for (Class<?> c : subTypes) {
if (!ignores().contains(c)) {
int mod = c.getModifiers();
if (Modifier.isAbstract(mod) || Modifier.isInterface(mod))
continue;
allClasses.add(c);
System.out.println(c);
}
}
}
}
@AfterClass
public static void afterClass() {
if(!seen.containsAll(allClasses)) {
List<String> notTested = new ArrayList<>();
for(Class<?> c : allClasses){
if(!seen.contains(c)) {
notTested.add(c.getName());
}
}
Collections.sort(notTested);
for(String s : notTested){
log.warn("Class was not tested for JSON/YAML serialization/deserialization: {}", s);
}
fail(notTested.size() + " of " + allClasses.size() + " classes implementing TextConfig were not tested for JSON/YAML serialization and deserialization");
}
}
public void testConfigSerDe(Object o) {
try{
testConfigSerDeHelper(o);
} catch (Exception e){
throw new RuntimeException(e);
}
}
protected void testConfigSerDeHelper(Object o) throws Exception {
seen.add(o.getClass()); //Record class for coverage tracking
Class<Object> tcClass = (Class<Object>) Class.forName("ai.konduit.serving.pipeline.api.TextConfig");
Method toJsonMethod = tcClass.getDeclaredMethod("toJson");
Method fromYamlMethod = tcClass.getDeclaredMethod("toYaml");
String json = (String) toJsonMethod.invoke(o);
String yaml = (String) fromYamlMethod.invoke(o);
Object fromJson = fromJson(o.getClass(), json);
Object fromYaml = fromYaml(o.getClass(), yaml);
assertEquals("to/from JSON object is not equal", o, fromJson);
assertEquals("to/from YAML object is not equal ", o, fromYaml);
}
public Set<Class<?>> ignores(){
return Collections.emptySet();
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-common-tests/0.3.0/ai/konduit/serving/common
|
java-sources/ai/konduit/serving/konduit-serving-common-tests/0.3.0/ai/konduit/serving/common/test/BaseSwaggerAnnotationCheck.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.common.test;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.extern.slf4j.Slf4j;
import org.reflections.Reflections;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.stream.Collectors;
import static org.junit.Assert.fail;
@Slf4j
public abstract class BaseSwaggerAnnotationCheck {
public abstract String getPackageName();
public void runTest() throws ClassNotFoundException {
Set<Class<?>> failedClasses = new HashSet<>();
Reflections reflections = new Reflections(getPackageName());
Class<Object> tcClass = (Class<Object>) Class.forName("ai.konduit.serving.pipeline.api.step.PipelineStep");
Set<Class<?>> subTypes = reflections.getSubTypesOf(tcClass);
Class<?> schemaClass = Schema.class;
Set<Class<?>> ignores = ignores();
for (Class<?> c : subTypes) {
if (ignores.contains(c))
continue; //Skip
Field[] fields = c.getDeclaredFields();
for (Field f : fields) {
if (Modifier.isStatic(f.getModifiers())) //Skip static fields
continue;
boolean foundSchemaAnnotation = false;
Annotation[] annotations = f.getDeclaredAnnotations();
for (Annotation a : annotations) {
if (a.annotationType() == schemaClass) {
foundSchemaAnnotation = true;
break;
}
}
if (!foundSchemaAnnotation) {
log.warn("MISSING ANNOTATION: " + c + " - field " + f.getName());
failedClasses.add(c);
}
}
}
if (!failedClasses.isEmpty()){
fail("There are still " + failedClasses.size() + " classes with missing annotation:\n" + failedClasses.stream()
.map(n -> n.getCanonicalName())
.collect(Collectors.joining("\n")));
}
}
public Set<Class<?>> ignores() {
return Collections.emptySet();
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator/ExecParameterConsumer.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.configcreator;
import picocli.CommandLine;
import java.util.List;
import java.util.Stack;
public class ExecParameterConsumer implements CommandLine.IParameterConsumer {
public ExecParameterConsumer() {
}
@Override
public void consumeParameters(Stack<String> args, CommandLine.Model.ArgSpec argSpec, CommandLine.Model.CommandSpec commandSpec) {
List<String> list = argSpec.getValue();
while (!args.isEmpty()) {
String arg = args.pop();
list.add(arg);
}
//remove the help prompt and replace it with our underlying help function.
if(list.size() == 1 && list.contains("--help") || list.contains("-h")) {
list.clear();
}
//add this as default for the user to show a proper help command
if(list.isEmpty()) {
list.add("-exec");
}
//also always ensure that if the user omits exec, specify it as the first parameter
//allowing seamless bridging
if(!list.isEmpty() && !list.get(0).equals("-exec")) {
list.add(0,"-exec");
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator/InferenceServerCreate.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.configcreator;
import ai.konduit.serving.pipeline.api.pipeline.Pipeline;
import ai.konduit.serving.pipeline.util.ObjectMappers;
import ai.konduit.serving.vertx.config.InferenceConfiguration;
import ai.konduit.serving.vertx.config.ServerProtocol;
import org.nd4j.common.base.Preconditions;
import org.nd4j.shade.jackson.databind.ObjectMapper;
import picocli.CommandLine;
import java.io.File;
import java.util.concurrent.Callable;
@CommandLine.Command(name = "inference-server-create",mixinStandardHelpOptions = true,description = "Create an inference server configuration for starting a rest api based on the pipeline specified.")
public class InferenceServerCreate implements Callable<Void> {
@CommandLine.Option(names = {"--pipeline"},description = "Pipeline file path, must end in json, yml, or yaml",required = true)
private File pipelineFile;
@CommandLine.Option(names = {"--port"},description = "The port to use for the inference server, defaults to 9999. 0 means that the server will pick a random port on startup.")
private int port = 9999;
@CommandLine.Option(names = {"--protocol"},description = "The protocol to use. One of kafka,mqtt,http,grpc are supported. Defaults to http")
private String protocol = "http";
@CommandLine.Spec
private CommandLine.Model.CommandSpec spec; // injected by picocli
private ObjectMapper jsonMapper = ObjectMappers.json();
private ObjectMapper yamlMapper = ObjectMappers.yaml();
@Override
public Void call() throws Exception {
Preconditions.checkNotNull(pipelineFile,"No file found!");
InferenceConfiguration inferenceConfiguration = new InferenceConfiguration()
.protocol(ServerProtocol.valueOf(protocol.toUpperCase()))
.port(port);
if(pipelineFile.getName().endsWith(".json")) {
Pipeline p = jsonMapper.readValue(pipelineFile,Pipeline.class);
inferenceConfiguration.pipeline(p);
spec.commandLine().getOut().println(inferenceConfiguration.toJson());
} else if(pipelineFile.getName().endsWith(".yml") || pipelineFile.getName().endsWith(".yaml")) {
Pipeline p = yamlMapper.readValue(pipelineFile,Pipeline.class);
inferenceConfiguration.pipeline(p);
spec.commandLine().getOut().println(inferenceConfiguration.toYaml());
}
return null;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator/MainCommand.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.configcreator;
import picocli.CommandLine;
import java.io.PrintWriter;
import java.io.Writer;
@CommandLine.Command(name = "konduit",subcommands = {
InferenceServerCreate.class,
SequencePipelineCombiner.class,
StepCreator.class
},mixinStandardHelpOptions = true)
public class MainCommand {
/**
* Create a command line initializing {@link StepCreator}
* dynamic {@link picocli.CommandLine.Model.CommandSpec}
* with System.out as the default initializer
* @return the associated {@link CommandLine}
* @throws Exception
*/
public static CommandLine createCommandLine() throws Exception {
return createCommandLine(null);
}
/**
* Create a {@link CommandLine}
* with a dynamic {@link StepCreator}
* {@link picocli.CommandLine.Model.CommandSpec}
* and an optional (can be null) {@link PrintWriter}
* for collecting output
* @param out
* @return
* @throws Exception
*/
public static CommandLine createCommandLine(Writer out) throws Exception {
CommandLine commandLine = new CommandLine(new MainCommand());
if(out != null) {
commandLine.setOut(new PrintWriter(out));
}
return commandLine;
}
public static void main(String...args) throws Exception {
CommandLine commandLine = MainCommand.createCommandLine();
int exit = commandLine.execute(args);
System.exit(exit);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator/PipelineStepType.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.configcreator;
import ai.konduit.serving.annotation.json.JsonName;
import ai.konduit.serving.pipeline.api.step.PipelineStep;
import ai.konduit.serving.pipeline.impl.step.logging.LoggingStep;
import ai.konduit.serving.pipeline.impl.step.ml.classifier.ClassifierOutputStep;
import ai.konduit.serving.pipeline.impl.step.ml.ssd.SSDToBoundingBoxStep;
public enum PipelineStepType {
CROP_GRID,
CROP_FIXED_GRID,
DL4J,
KERAS,
DRAW_BOUNDING_BOX,
DRAW_FIXED_GRID,
DRAW_GRID,
DRAW_SEGMENTATION,
EXTRACT_BOUNDING_BOX,
CAMERA_FRAME_CAPTURE,
VIDEO_FRAME_CAPTURE,
IMAGE_TO_NDARRAY,
LOGGING,
SSD_TO_BOUNDING_BOX,
SAMEDIFF,
SHOW_IMAGE,
TENSORFLOW,
ND4JTENSORFLOW,
PYTHON,
ONNX,
CLASSIFIER_OUTPUT,
IMAGE_RESIZE,
RELATIVE_TO_ABSOLUTE,
DRAW_POINTS,
DRAW_HEATMAP,
PERSPECTIVE_TRANSFORM,
IMAGE_CROP,
GRAY_SCALE,
TVM,
TENSORRT,
SAMEDIFF_TRAINING;
/**
* Returns the {@link PipelineStepType}
* of a class that extends {@link PipelineStep}.
* Note that all {@link PipelineStep} s that interact
* with this class must be annotated with {@link JsonName}
* - that annotation information is mapped to
* the proper type
* @param clazz the class
* @return
*/
public static PipelineStepType typeForClazz(Class<? extends PipelineStep> clazz) {
if(clazz == null || clazz.getDeclaredAnnotation(JsonName.class) == null) {
throw new IllegalArgumentException("Class " + clazz.getName() + " is null or does not have annotation JsonName!");
}
JsonName annotation = clazz.getDeclaredAnnotation(JsonName.class);
return PipelineStepType.valueOf(annotation.value().toUpperCase());
}
public static Class<? extends PipelineStep> clazzForType(PipelineStepType pipelineStepType) throws ClassNotFoundException {
Class<?> clazz;
switch (valueOf(pipelineStepType.name().toUpperCase())) {
case IMAGE_CROP:
clazz = Class.forName("ai.konduit.serving.data.image.step.crop.ImageCropStep");
return (Class<? extends PipelineStep>) clazz;
case DRAW_HEATMAP:
clazz = Class.forName("ai.konduit.serving.data.image.step.point.heatmap.DrawHeatmapStep");
return (Class<? extends PipelineStep>) clazz;
case GRAY_SCALE:
clazz = Class.forName("ai.konduit.serving.data.image.step.grayscale.GrayScaleStep");
return (Class<? extends PipelineStep>) clazz;
case DRAW_POINTS:
clazz = Class.forName("ai.konduit.serving.data.image.step.point.draw.DrawPointsStep");
return (Class<? extends PipelineStep>) clazz;
case RELATIVE_TO_ABSOLUTE:
clazz = Class.forName("ai.konduit.serving.data.image.step.point.convert.RelativeToAbsoluteStep");
return (Class<? extends PipelineStep>) clazz;
case IMAGE_RESIZE:
clazz = Class.forName("ai.konduit.serving.data.image.step.resize.ImageResizeStep");
return (Class<? extends PipelineStep>) clazz;
case PERSPECTIVE_TRANSFORM:
clazz = Class.forName("ai.konduit.serving.data.image.step.point.perspective.convert.PerspectiveTransformStep");
return (Class<? extends PipelineStep>) clazz;
case CROP_GRID:
clazz = Class.forName("ai.konduit.serving.data.image.step.grid.crop.CropGridStep");
return (Class<? extends PipelineStep>) clazz;
case CROP_FIXED_GRID:
clazz = Class.forName("ai.konduit.serving.data.image.step.grid.crop.CropFixedGridStep");
return (Class<? extends PipelineStep>) clazz;
case DL4J:
clazz = Class.forName("ai.konduit.serving.models.deeplearning4j.step.DL4JStep");
return (Class<? extends PipelineStep>) clazz;
case KERAS:
clazz = Class.forName("ai.konduit.serving.models.deeplearning4j.step.keras.KerasStep");
return (Class<? extends PipelineStep>) clazz;
case DRAW_BOUNDING_BOX:
clazz = Class.forName("ai.konduit.serving.data.image.step.bb.draw.DrawBoundingBoxStep");
return (Class<? extends PipelineStep>) clazz;
case DRAW_FIXED_GRID:
clazz = Class.forName("ai.konduit.serving.data.image.step.grid.draw.DrawFixedGridStep");
return (Class<? extends PipelineStep>) clazz;
case DRAW_GRID:
clazz = Class.forName("ai.konduit.serving.data.image.step.grid.draw.DrawGridStep");
return (Class<? extends PipelineStep>) clazz;
case DRAW_SEGMENTATION:
clazz = Class.forName("ai.konduit.serving.data.image.step.segmentation.index.DrawSegmentationStep");
return (Class<? extends PipelineStep>) clazz;
case EXTRACT_BOUNDING_BOX:
clazz = Class.forName("ai.konduit.serving.data.image.step.bb.extract.ExtractBoundingBoxStep");
return (Class<? extends PipelineStep>) clazz;
case CAMERA_FRAME_CAPTURE:
clazz = Class.forName("ai.konduit.serving.data.image.step.capture.CameraFrameCaptureStep");
return (Class<? extends PipelineStep>) clazz;
case VIDEO_FRAME_CAPTURE:
clazz = Class.forName("ai.konduit.serving.data.image.step.capture.VideoFrameCaptureStep");
return (Class<? extends PipelineStep>) clazz;
case IMAGE_TO_NDARRAY:
clazz = Class.forName("ai.konduit.serving.data.image.step.ndarray.ImageToNDArrayStep");
return (Class<? extends PipelineStep>) clazz;
case LOGGING:
return LoggingStep.class;
case SSD_TO_BOUNDING_BOX:
return SSDToBoundingBoxStep.class;
case SAMEDIFF:
clazz = Class.forName("ai.konduit.serving.models.samediff.step.SameDiffStep");
return (Class<? extends PipelineStep>) clazz;
case SAMEDIFF_TRAINING:
clazz = Class.forName("ai.konduit.serving.models.samediff.step.trainer.SameDiffTrainerStep");
return (Class<? extends PipelineStep>) clazz;
case SHOW_IMAGE:
clazz = Class.forName("ai.konduit.serving.data.image.step.show.ShowImageStep");
return (Class<? extends PipelineStep>) clazz;
case TENSORFLOW:
clazz = Class.forName("ai.konduit.serving.models.tensorflow.step.TensorFlowStep");
return (Class<? extends PipelineStep>) clazz;
case ND4JTENSORFLOW:
clazz = Class.forName("ai.konduit.serving.models.nd4j.tensorflow.step.Nd4jTensorFlowStep");
return (Class<? extends PipelineStep>) clazz;
case ONNX:
clazz = Class.forName("ai.konduit.serving.models.onnx.step.ONNXStep");
return (Class<? extends PipelineStep>) clazz;
case PYTHON:
return (Class<? extends PipelineStep>) Class.forName("ai.konduit.serving.python.PythonStep");
case CLASSIFIER_OUTPUT:
return ClassifierOutputStep.class;
case TVM:
return (Class<? extends PipelineStep>) Class.forName("ai.konduit.serving.models.tvm.step.TVMStep");
case TENSORRT:
return (Class<? extends PipelineStep>) Class.forName("ai.konduit.serving.tensorrt.TensorRTStep");
}
return null;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator/SequencePipelineCombiner.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.configcreator;
import ai.konduit.serving.pipeline.api.step.PipelineStep;
import ai.konduit.serving.pipeline.impl.pipeline.SequencePipeline;
import ai.konduit.serving.pipeline.util.ObjectMappers;
import org.apache.commons.io.FileUtils;
import org.nd4j.shade.jackson.databind.ObjectMapper;
import picocli.CommandLine;
import java.io.File;
import java.nio.charset.Charset;
import java.util.List;
import java.util.concurrent.Callable;
@CommandLine.Command(name = "sequence-pipeline-creator",mixinStandardHelpOptions = true,description = "Combine a list of pipeline json or yaml files (specified by file format) together to form a pipeline.")
public class SequencePipelineCombiner implements Callable<Void> {
@CommandLine.Option(names = {"--pipeline"},description = "Pipeline String",required = true)
private List<File> pipelineStep;
@CommandLine.Option(names = {"--file-format"},description = "Pipeline String")
private String format = "json";
@CommandLine.Spec
private CommandLine.Model.CommandSpec spec; // injected by picocli
private ObjectMapper jsonMapper = ObjectMappers.json();
private ObjectMapper yamlMapper = ObjectMappers.yaml();
@Override
public Void call() throws Exception {
SequencePipeline.Builder pipelineBuilder = SequencePipeline.builder();
for(File f : pipelineStep) {
if(format.equals("json")) {
PipelineStep pipelineStep = jsonMapper.readValue(f, PipelineStep.class);
pipelineBuilder.add(pipelineStep);
} else if(format.equals("yml") || format.equals("yaml")) {
PipelineStep pipelineStep = yamlMapper.readValue(f,PipelineStep.class);
pipelineBuilder.add(pipelineStep);
}
}
if(format.equals("json")) {
spec.commandLine().getOut().println(pipelineBuilder.build().toJson());
} else if(format.equals("yml") || format.equals("yaml")) {
spec.commandLine().getOut().println(pipelineBuilder.build().toYaml());
} else {
System.err.println("Invalid format: please specify json,yml,yaml");
}
return null;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator/StepCreator.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.configcreator;
import ai.konduit.serving.configcreator.converter.*;
import ai.konduit.serving.data.image.convert.ImageToNDArrayConfig;
import ai.konduit.serving.model.PythonConfig;
import ai.konduit.serving.pipeline.api.data.Point;
import ai.konduit.serving.pipeline.api.step.PipelineStep;
import ai.konduit.serving.tensorrt.NamedDimensionList;
import io.swagger.v3.oas.annotations.media.Schema;
import org.nd4j.linalg.learning.config.IUpdater;
import org.nd4j.linalg.schedule.ISchedule;
import picocli.CommandLine;
import java.io.PrintWriter;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.*;
import java.util.concurrent.Callable;
import static ai.konduit.serving.configcreator.PipelineStepType.*;
@CommandLine.Command(name = "step-create",description = "Create a pipeline step for use in a pipeline.",
modelTransformer = StepCreator.class,mixinStandardHelpOptions = true)
public class StepCreator implements CommandLine.IModelTransformer, Callable<Integer> {
private static Map<String, CommandLine.ITypeConverter> converters = new HashMap<>();
@CommandLine.Spec
private CommandLine.Model.CommandSpec spec; // injected by picocli
private static Set<String> commandsAdding = new HashSet<>();
private static CommandLine.Model.CommandSpec root;
@Override
public CommandLine.Model.CommandSpec transform(CommandLine.Model.CommandSpec commandSpec) {
if(root == null) {
root = commandSpec;
registerConverters();
}
try {
addSubCommandForSteps(commandSpec);
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
return commandSpec;
}
@Override
public Integer call() throws Exception {
return run(spec.commandLine().getParseResult());
}
public static int run(CommandLine.ParseResult parseResult) throws Exception {
PipelineStep stepFromResult = createStepFromResult(parseResult);
//same as above: if a user passes the help signal, this method returns null
if(stepFromResult == null) {
if(parseResult.subcommand() != null)
parseResult.subcommand().commandSpec().commandLine().usage(System.err);
else {
parseResult.commandSpec().commandLine().usage(System.err);
}
return 1;
}
CommandLine.Model.OptionSpec optionSpec = parseResult.matchedOption("--fileFormat");
String fileFormat = optionSpec == null ? "json" : optionSpec.getValue();
if(fileFormat.equals("json")) {
parseResult.commandSpec().commandLine().getOut().println(stepFromResult.toJson());
} else if(fileFormat.equals("yaml") || fileFormat.equals("yml")) {
parseResult.commandSpec().commandLine().getOut().println(stepFromResult.toYaml());
}
return 0;
}
private enum GraphStepType {
SWITCH,
MERGE,
ANY
}
private void registerConverters() {
converters.put(ImageToNDArrayConfig.class.getName(),new ImageToNDArrayConfigTypeConverter());
converters.put(Point.class.getName(),new PointConverter());
converters.put(PythonConfig.class.getName(),new PythonConfigTypeConverter());
converters.put(NamedDimensionList.class.getName(),new NameDimensionConverter());
converters.put(IUpdater.class.getName(),new UpdaterConverter());
converters.put(ISchedule.class.getName(),new LearningRateScheduleConverter());
}
private void addSubCommandForSteps(CommandLine.Model.CommandSpec ret) throws ClassNotFoundException {
PipelineStepType[] values = null;
if(System.getProperty("os.arch").contains("amd")) {
values = PipelineStepType.values();
}//non amd, probably arm, pick steps we can load on non intel/amd devices
else {
values = new PipelineStepType[] {
CROP_GRID,
CROP_FIXED_GRID,
DL4J,
KERAS,
DRAW_BOUNDING_BOX,
DRAW_FIXED_GRID,
DRAW_GRID,
DRAW_SEGMENTATION,
EXTRACT_BOUNDING_BOX,
CAMERA_FRAME_CAPTURE,
VIDEO_FRAME_CAPTURE,
IMAGE_TO_NDARRAY,
LOGGING,
SSD_TO_BOUNDING_BOX,
SAMEDIFF,
SHOW_IMAGE,
PYTHON,
ONNX,
CLASSIFIER_OUTPUT,
IMAGE_RESIZE,
RELATIVE_TO_ABSOLUTE,
DRAW_POINTS,
DRAW_HEATMAP,
PERSPECTIVE_TRANSFORM,
IMAGE_CROP,
GRAY_SCALE,
TENSORRT,
};
}
for(PipelineStepType pipelineStepType : values) {
//already contains step
if(commandsAdding.contains(pipelineStepType.name().toLowerCase()))
continue;
commandsAdding.add(pipelineStepType.name().toLowerCase());
Class<? extends PipelineStep> aClass = PipelineStepType.clazzForType(pipelineStepType);
if(aClass != null) {
CommandLine.Model.CommandSpec spec = CommandLine.Model.CommandSpec.forAnnotatedObject(this);
addStep(PipelineStepType.clazzForType(pipelineStepType),spec);
spec.name(pipelineStepType.name().toLowerCase());
spec.addOption(CommandLine.Model.OptionSpec.builder("--fileFormat")
.type(String.class)
.required(true)
.description("The file format (either json or yaml/yml) to output the pipeline step in")
.build());
root.addSubcommand(pipelineStepType.name().toLowerCase(),spec);
} else {
System.err.println("No class found for " + pipelineStepType);
}
}
}
public CommandLine.Model.CommandSpec spec() throws Exception {
registerConverters();
CommandLine.Model.CommandSpec ret = CommandLine.Model.CommandSpec.create();
addSubCommandForSteps(ret);
ret.name("step-create");
ret.mixinStandardHelpOptions(true);
return ret;
}
public void addStep(Class<? extends PipelineStep> clazz,CommandLine.Model.CommandSpec spec) {
for(Field field : clazz.getDeclaredFields()) {
if(Modifier.isStatic(field.getModifiers())) {
continue;
}
field.setAccessible(true);
CommandLine.Model.OptionSpec.Builder builder = CommandLine
.Model.OptionSpec.builder("--" + field.getName())
.type(field.getType());
StringBuilder description = new StringBuilder();
if(clazz.isAnnotationPresent(Schema.class)) {
Schema annotation = clazz.getAnnotation(Schema.class);
String description2 = annotation.description();
builder.description(description2);
}
if(field.isAnnotationPresent(Schema.class)) {
Schema annotation = field.getAnnotation(Schema.class);
description.append(annotation.description());
builder.description(annotation.description());
appendEnumTypesIfApplicable(description, field);
}
if(converters.containsKey(field.getType().getName())) {
for(Field f : field.getType().getDeclaredFields()) {
if(f.isAnnotationPresent(Schema.class)) {
Schema annotation = f.getAnnotation(Schema.class);
description.append("\n");
description.append("\n Parameter value of name " + f.getName() + " " + annotation.description() + " \n");
appendEnumTypesIfApplicable(description, f);
}
}
for(Field f : field.getType().getDeclaredFields()) {
if(f.isAnnotationPresent(Schema.class)) {
Schema annotation = f.getAnnotation(Schema.class);
description.append("\n");
description.append("\nParameter value of name " + f.getName() + " for value " + field.getName() + " " + annotation.description() + "\n");
appendEnumTypesIfApplicable(description, f);
}
}
builder.converters(converters.get(field.getType().getName()));
}
builder.names("--" + field.getName());
builder.description(description.toString());
builder.toString();
spec.addOption(builder.build());
}
}
private void appendEnumTypesIfApplicable(StringBuilder description, Field f) {
if(Enum.class.isAssignableFrom(f.getType())) {
description.append("\n Possible values are: ");
Object[] values = f.getType().getEnumConstants();
for(Object value : values) {
description.append(value.toString());
description.append(",");
}
description.append("\n");
}
}
public static PipelineStep createStepFromResult(CommandLine.ParseResult parseResult) throws Exception {
CommandLine.ParseResult subcommand = parseResult;
String name = subcommand.commandSpec().name();
if(subcommand != null && subcommand.subcommand() == null) {
name = subcommand.commandSpec().name();
return getPipelineStep(subcommand, name);
} else if(subcommand != null && subcommand.commandSpec() != null) {
name = subcommand.commandSpec().name();
return getPipelineStep(subcommand.subcommand(), name);
}
return null;
}
private static PipelineStep getPipelineStep(CommandLine.ParseResult subcommand, String name) throws ClassNotFoundException, InstantiationException, IllegalAccessException {
PipelineStepType pipelineStepType = PipelineStepType.valueOf(name.toUpperCase());
Class<? extends PipelineStep> aClass = PipelineStepType.clazzForType(pipelineStepType);
PipelineStep ret = aClass.newInstance();
for(Field field : aClass.getDeclaredFields()) {
field.setAccessible(true);
if(subcommand.hasMatchedOption("--" + field.getName())) {
CommandLine.Model.OptionSpec optionSpec = subcommand.matchedOption("--" + field.getName());
Object value = optionSpec.getValue();
field.set(ret,value);
}
}
return ret;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator/StringSplitter.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.configcreator;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* Parse a string to extract a set of key, value pairs
* based on the passed in delimiter. Handles cases where
* string literals are declared as values.
*
* @author Adam Gibson
*/
public class StringSplitter {
private String delimiter;
public StringSplitter(String delimiter) {
this.delimiter = delimiter;
}
/**
* Split the given string in to a set of key value pairs
* handling quoted string literals.
* When a literal is found, this function will pass the value
* as is as a value. It is assumed the user will know how to process
* the given literal.
* @param input the input to process
* @return the key value pairs as strings.
*/
public Map<String,String> splitResult(String input) {
Map<String,String> ret = new LinkedHashMap<>();
StringBuilder key = new StringBuilder();
StringBuilder value = new StringBuilder();
StringBuilder currBuff = key;
boolean inLiteral = false;
for(int i = 0; i < input.length(); i++) {
//still in middle of literal
if(inLiteral && input.charAt(i) != '"') {
currBuff.append(input.charAt(i));
continue;
} else if(input.charAt(i) == delimiter.charAt(0)) {
//new key and value
ret.put(key.toString(),value.toString());
key = new StringBuilder();
value = new StringBuilder();
//reset to key as default value
currBuff = key;
continue;
}
switch(input.charAt(i)) {
//finished key
case '=':
currBuff = value;
break;
case '"':
//begin or end literal
inLiteral = !inLiteral;
break;
default:
currBuff.append(input.charAt(i));
break;
}
}
//put last value
if(!key.toString().isEmpty() && !value.toString().isEmpty())
ret.put(key.toString(),value.toString());
return ret;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator/converter/ImageNormalizationTypeConverter.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.configcreator.converter;
import ai.konduit.serving.data.image.convert.config.ImageNormalization;
import picocli.CommandLine;
/**
* Split the image normalization configuration by space
* due to us using CSV for parsing other values.
*/
public class ImageNormalizationTypeConverter implements CommandLine.ITypeConverter<ImageNormalization> {
@Override
public ImageNormalization convert(String value) throws Exception {
String[] split = value.split(" ");
ImageNormalization.Type type = ImageNormalization.Type.valueOf(split[0].toUpperCase());
//first 3 values are mean, second 3 values are standard deviation
double[] mean = null,std = null;
Double maxValue = null;
if(split.length >= 4) {
mean = new double[3];
mean[0] = Double.parseDouble(split[1]);
mean[1] = Double.parseDouble(split[2]);
mean[2] = Double.parseDouble(split[3]);
}
if(split.length >= 7) {
std = new double[3];
std[0] = Double.parseDouble(split[4]);
std[1] = Double.parseDouble(split[5]);
std[2] = Double.parseDouble(split[6]);
}
if(split.length >= 8) {
maxValue = Double.parseDouble(split[7]);
}
return new ImageNormalization(type,maxValue,mean,std);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator/converter/ImageToNDArrayConfigTypeConverter.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.configcreator.converter;
import ai.konduit.serving.configcreator.StringSplitter;
import ai.konduit.serving.data.image.convert.ImageToNDArrayConfig;
import ai.konduit.serving.data.image.convert.config.AspectRatioHandling;
import ai.konduit.serving.data.image.convert.config.ImageNormalization;
import ai.konduit.serving.data.image.convert.config.NDChannelLayout;
import ai.konduit.serving.data.image.convert.config.NDFormat;
import ai.konduit.serving.pipeline.api.data.NDArrayType;
import picocli.CommandLine;
import java.util.HashMap;
import java.util.Map;
/**
* Converts an {@link ImageToNDArrayConfig} with the following format:
* Split fields of {@link ImageToNDArrayConfig} by comma
* with each key/value being
* fieldName=value
*/
public class ImageToNDArrayConfigTypeConverter implements CommandLine.ITypeConverter<ImageToNDArrayConfig> {
@Override
public ImageToNDArrayConfig convert(String value) throws Exception {
StringSplitter stringSplitter = new StringSplitter(",");
Map<String,String> input = stringSplitter.splitResult(value);
ImageToNDArrayConfig imageToNDArrayConfig = new ImageToNDArrayConfig();
for(Map.Entry<String,String> entry : input.entrySet()) {
switch(entry.getKey()) {
case "height":
imageToNDArrayConfig.height(Integer.parseInt(entry.getValue()));
break;
case "width":
imageToNDArrayConfig.width(Integer.parseInt(entry.getValue()));
break;
case "format":
imageToNDArrayConfig.format(NDFormat.valueOf(entry.getValue().toUpperCase()));
break;
case "channelLayout":
imageToNDArrayConfig.channelLayout(NDChannelLayout.valueOf(entry.getValue().toUpperCase()));
break;
case "aspectRatioHandling":
imageToNDArrayConfig.aspectRatioHandling(AspectRatioHandling.valueOf(entry.getValue().toUpperCase()));
break;
case "dataType":
imageToNDArrayConfig.dataType(NDArrayType.valueOf(entry.getValue().toUpperCase()));
break;
case "listHandling":
imageToNDArrayConfig.listHandling(ImageToNDArrayConfig.ListHandling.valueOf(entry.getValue().toUpperCase()));
break;
case "normalization":
ImageNormalizationTypeConverter imageNormalizationTypeConverter = new ImageNormalizationTypeConverter();
ImageNormalization convert = imageNormalizationTypeConverter.convert(entry.getValue());
imageToNDArrayConfig.normalization(convert);
break;
}
}
return imageToNDArrayConfig;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator/converter/LearningRateScheduleConverter.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.configcreator.converter;
import ai.konduit.serving.configcreator.StringSplitter;
import org.nd4j.linalg.schedule.*;
import picocli.CommandLine;
import java.util.Collections;
import java.util.Map;
public class LearningRateScheduleConverter implements CommandLine.ITypeConverter<ISchedule> {
public final static String DELIMITER = ",";
public final static String SCHEDULE_TYPE_KEY = "type";
public enum Scheduletype {
CYCLE,
EXPONENTIAL,
FIXED,
INVERSE,
MAP,
POLY,
RAMP,
SIGMOID,
STEP
}
@Override
public ISchedule convert(String value) throws Exception {
StringSplitter stringSplitter = new StringSplitter(DELIMITER);
Map<String,String> result = stringSplitter.splitResult(value);
String type = result.get(SCHEDULE_TYPE_KEY);
result.remove(SCHEDULE_TYPE_KEY);
return instanceForType(type,result);
}
private ISchedule instanceForType(String type,Map<String,String> configurationValues) {
switch(Scheduletype.valueOf(type.toUpperCase())) {
case MAP:
return new MapSchedule(ScheduleType.EPOCH, Collections.emptyMap());
case POLY:
return new PolySchedule(ScheduleType.EPOCH,getValue(configurationValues,"initialValue"),getValue(configurationValues,"power"),1);
case STEP:
return new StepSchedule(ScheduleType.EPOCH,getValue(configurationValues,"initialValue"),getValue(configurationValues,"decayRate"),getValue(configurationValues,"step"));
case CYCLE:
return new CycleSchedule(ScheduleType.EPOCH,getValue(configurationValues,"initialLearningRate"),getValue(configurationValues,"maxLearningRate"),getIntValue(configurationValues,"cycleLength"),getIntValue(configurationValues,"annealingLength"),getValue(configurationValues,"annealingDecay"));
case FIXED:
return new FixedSchedule(getValue(configurationValues,"value"));
case INVERSE:
return new InverseSchedule(ScheduleType.EPOCH,getValue(configurationValues,"initialValue"),getValue(configurationValues,"gamma"),getValue(configurationValues,"power"));
case SIGMOID:
return new SigmoidSchedule(ScheduleType.EPOCH,getValue(configurationValues,"initialValue"),getValue(configurationValues,"gamma"),getIntValue(configurationValues,"stepSize"));
case EXPONENTIAL:
return new ExponentialSchedule(ScheduleType.EPOCH,getValue(configurationValues,"initialValue"),getValue(configurationValues,"gamma"));
default:
throw new IllegalArgumentException("Unable to create learning rate schedule of type " + type);
}
}
private int getIntValue(Map<String,String> getFrom,String key) {
if(!getFrom.containsKey(key)) {
throw new IllegalArgumentException("Unable to find configuration value " + key);
}
return Integer.parseInt(getFrom.get(key));
}
private double getValue(Map<String,String> getFrom,String key) {
if(!getFrom.containsKey(key)) {
throw new IllegalArgumentException("Unable to find configuration value " + key);
}
return Double.parseDouble(getFrom.get(key));
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator/converter/NameDimensionConverter.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.configcreator.converter;
import ai.konduit.serving.tensorrt.NamedDimension;
import ai.konduit.serving.tensorrt.NamedDimensionList;
import picocli.CommandLine;
public class NameDimensionConverter implements CommandLine.ITypeConverter<NamedDimensionList> {
public final static String ENTRY_DELIMITER = ";";
@Override
public NamedDimensionList convert(String value) throws Exception {
String[] split = value.split(ENTRY_DELIMITER);
NamedDimensionList namedDimensions = new NamedDimensionList();
for(String entry : split) {
NamedDimension.NamedDimensionBuilder builder = NamedDimension.builder();
String[] entrySplit = entry.split("=");
String key = entrySplit[0];
String[] valSplit = entrySplit[1].split(",");
long[] result = new long[valSplit.length];
for(int i = 0; i < result.length; i++) {
result[i] = Long.parseLong(valSplit[i]);
}
builder.name(key);
builder.dimensions(result);
namedDimensions.add(builder.build());
builder.build();
}
return namedDimensions;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator/converter/PointConverter.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.configcreator.converter;
import ai.konduit.serving.configcreator.StringSplitter;
import ai.konduit.serving.pipeline.api.data.Point;
import ai.konduit.serving.pipeline.impl.data.point.NDPoint;
import picocli.CommandLine;
import java.util.HashMap;
import java.util.Map;
/**
* Convert a {@link Point} usually {@link NDPoint} implementation
* from a string with the following format:
* Split fields by comma, Separate field names with
* fieldName=value.
* For the coordinate array {@link NDPoint#coords} each value
* is separated by a space.
*/
public class PointConverter implements CommandLine.ITypeConverter<Point> {
@Override
public Point convert(String value) throws Exception {
StringSplitter stringSplitter = new StringSplitter(",");
Map<String,String> input = stringSplitter.splitResult(value);
double[] coords = null;
String label = null;
String probability = null;
for(Map.Entry<String,String> entry : input.entrySet()) {
switch(entry.getKey()) {
//x,y
case "x":
if(coords == null) {
coords = new double[2];
}
coords[0] = Double.parseDouble(entry.getValue());
break;
case "y":
if(coords == null) {
coords = new double[2];
}
coords[1] = Double.parseDouble(entry.getValue());
break;
case "coords":
String[] coordSplit = entry.getValue().split(" ");
double[] parsed = new double[coordSplit.length];
for(int i = 0; i < coordSplit.length; i++) {
parsed[i] = Double.parseDouble(coordSplit[i]);
}
coords = parsed;
break;
case "label":
label = entry.getValue();
break;
case "probability":
probability = entry.getValue();
break;
}
}
return Point.create(coords,label,probability == null ? 0.0 : Double.parseDouble(probability));
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator/converter/PythonConfigTypeConverter.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.configcreator.converter;
import ai.konduit.serving.configcreator.StringSplitter;
import ai.konduit.serving.model.PythonConfig;
import ai.konduit.serving.model.PythonIO;
import ai.konduit.serving.pipeline.api.data.ValueType;
import ai.konduit.serving.pipeline.api.python.models.AppendType;
import ai.konduit.serving.pipeline.api.python.models.PythonConfigType;
import picocli.CommandLine;
import java.util.Map;
public class PythonConfigTypeConverter implements CommandLine.ITypeConverter<PythonConfig> {
@Override
public PythonConfig convert(String value) throws Exception {
//remove literal if it exists for real parsing
value = value.replace("\"","");
StringSplitter stringSplitter = new StringSplitter(",");
Map<String, String> stringStringMap = stringSplitter.splitResult(value);
PythonConfig.PythonConfigBuilder builder = PythonConfig.builder();
for(Map.Entry<String,String> entry : stringStringMap.entrySet()) {
switch(entry.getKey()) {
case "appendType":
builder.appendType(AppendType.valueOf(entry.getValue().toUpperCase()));
break;
case "pythonPath":
builder.pythonPath(entry.getValue());
break;
case "pythonConfigType":
builder.pythonConfigType(PythonConfigType.valueOf(entry.getValue().toUpperCase()));
break;
case "pythonCode":
builder.pythonCode(entry.getValue());
break;
case "pythonCodePath":
builder.pythonCodePath(entry.getValue());
break;
case "returnAllInputs":
builder.returnAllInputs(Boolean.parseBoolean(entry.getValue()));
break;
case "setupAndRun":
builder.setupAndRun(Boolean.parseBoolean(entry.getValue()));
break;
case "pythonLibrariesPath":
builder.pythonLibrariesPath(entry.getValue());
break;
case "ioInput":
PythonIO.PythonIOBuilder pythonIOBuilder = PythonIO.builder();
String[] ioDescriptor = entry.getValue().split(" ");
pythonIOBuilder.name(ioDescriptor[0]);
if(ioDescriptor.length > 1)
pythonIOBuilder.pythonType(ioDescriptor[1]);
if(ioDescriptor.length > 2)
pythonIOBuilder.type(ValueType.valueOf(ioDescriptor[2]));
if(ioDescriptor.length > 3)
pythonIOBuilder.secondaryType(ValueType.valueOf(ioDescriptor[3]));
builder.ioInput(ioDescriptor[0],pythonIOBuilder
.build());
break;
case "ioOutput":
PythonIO.PythonIOBuilder pythonIOBuilderOut = PythonIO.builder();
String[] ioDescriptorOut = entry.getValue().split(" ");
pythonIOBuilderOut.name(ioDescriptorOut[0]);
if(ioDescriptorOut.length > 1)
pythonIOBuilderOut.pythonType(ioDescriptorOut[1]);
if(ioDescriptorOut.length > 2)
pythonIOBuilderOut.type(ValueType.valueOf(ioDescriptorOut[2]));
if(ioDescriptorOut.length > 3)
pythonIOBuilderOut.secondaryType(ValueType.valueOf(ioDescriptorOut[3]));
builder.ioOutput(ioDescriptorOut[0],pythonIOBuilderOut.build());
break;
}
}
return builder.build();
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator
|
java-sources/ai/konduit/serving/konduit-serving-config-creator/0.3.0/ai/konduit/serving/configcreator/converter/UpdaterConverter.java
|
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.configcreator.converter;
import ai.konduit.serving.configcreator.StringSplitter;
import org.nd4j.linalg.learning.config.*;
import org.nd4j.linalg.schedule.ISchedule;
import picocli.CommandLine;
import java.util.Map;
public class UpdaterConverter implements CommandLine.ITypeConverter<IUpdater> {
public final static String DELIMITER = ",";
public final static String UPDATER_TYPE_KEY = "type";
public enum UpdaterTypes {
AMSGRAD,
ADABELIEF,
ADAGRAD,
ADADELTA,
ADAMAX,
ADAM,
NADAM,
NESTEROVS,
NOOP,
RMSPROP,
SGD
}
@Override
public IUpdater convert(String value) throws Exception {
StringSplitter stringSplitter = new StringSplitter(DELIMITER);
Map<String,String> result = stringSplitter.splitResult(value);
if(!result.containsKey(UPDATER_TYPE_KEY)) {
throw new IllegalArgumentException("Please specify an updater type for proper creation.");
}
IUpdater updater = instanceForName(result.get(UPDATER_TYPE_KEY));
setValuesFor(updater,result);
return updater;
}
private void setValuesFor(IUpdater updater,Map<String,String> valuesToSet) throws Exception {
for(Map.Entry<String,String> field : valuesToSet.entrySet()) {
if(updater instanceof Sgd) {
Sgd sgd = (Sgd) updater;
if(field.getKey().equals("learningRate")) {
Double rate = Double.parseDouble(field.getValue());
sgd.setLearningRate(rate);
}
if(field.getKey().equals("learningRateSchedule")) {
LearningRateScheduleConverter learningRateScheduleConverter = new LearningRateScheduleConverter();
ISchedule convert = learningRateScheduleConverter.convert(field.getValue());
sgd.setLearningRateSchedule(convert);
}
} else if(updater instanceof RmsProp) {
RmsProp rmsProp = (RmsProp) updater;
if(field.getKey().equals("epsilon")) {
rmsProp.setEpsilon(Double.parseDouble(field.getValue()));
}
if(field.getKey().equals("learningRate")) {
rmsProp.setLearningRate(Double.parseDouble(field.getValue()));
}
if(field.getKey().equals("rmsDecay")) {
rmsProp.setRmsDecay(Double.parseDouble(field.getValue()));
}
if(field.getKey().equals("learningRateSchedule")) {
LearningRateScheduleConverter learningRateScheduleConverter = new LearningRateScheduleConverter();
ISchedule convert = learningRateScheduleConverter.convert(field.getValue());
rmsProp.setLearningRateSchedule(convert);
}
} else if(updater instanceof AMSGrad) {
AMSGrad amsGrad = (AMSGrad) updater;
if(field.getKey().equals("beta1")) {
amsGrad.setBeta1(Double.parseDouble(field.getValue()));
}
if(field.getKey().equals("beta2")) {
amsGrad.setBeta2(Double.parseDouble(field.getValue()));
}
if(field.getKey().equals("epsilon")) {
amsGrad.setEpsilon(Double.parseDouble(field.getValue()));
}
if(field.getKey().equals("learningRate")) {
Double rate = Double.parseDouble(field.getValue());
amsGrad.setLearningRate(rate);
}
if(field.getKey().equals("learningRateSchedule")) {
LearningRateScheduleConverter learningRateScheduleConverter = new LearningRateScheduleConverter();
ISchedule convert = learningRateScheduleConverter.convert(field.getValue());
amsGrad.setLearningRateSchedule(convert);
}
} else if(updater instanceof AdaDelta) {
AdaDelta adaDelta = (AdaDelta) updater;
if(field.getKey().equals("epsilon")) {
adaDelta.setEpsilon(Double.parseDouble(field.getValue()));
}
if(field.getKey().equals("rho")) {
adaDelta.setRho(Double.parseDouble(field.getValue()));
}
} else if(updater instanceof NoOp) {
NoOp noOp = (NoOp) updater;
} else if(updater instanceof AdaGrad) {
AdaGrad adaGrad = (AdaGrad) updater;
if(field.getKey().equals("learningRate")) {
Double rate = Double.parseDouble(field.getValue());
adaGrad.setLearningRate(rate);
}
if(field.getKey().equals("learningRateSchedule")) {
LearningRateScheduleConverter learningRateScheduleConverter = new LearningRateScheduleConverter();
ISchedule convert = learningRateScheduleConverter.convert(field.getValue());
adaGrad.setLearningRateSchedule(convert);
}
if(field.getKey().equals("epsilon")) {
adaGrad.setEpsilon(Double.parseDouble(field.getValue()));
}
} else if(updater instanceof Adam) {
Adam adam = (Adam) updater;
if(field.getKey().equals("beta1")) {
adam.setBeta1(Double.parseDouble(field.getValue()));
}
if(field.getKey().equals("beta2")) {
adam.setBeta2(Double.parseDouble(field.getValue()));
}
if(field.getKey().equals("epsilon")) {
adam.setEpsilon(Double.parseDouble(field.getValue()));
}
if(field.getKey().equals("learningRate")) {
Double rate = Double.parseDouble(field.getValue());
adam.setLearningRate(rate);
}
if(field.getKey().equals("learningRateSchedule")) {
LearningRateScheduleConverter learningRateScheduleConverter = new LearningRateScheduleConverter();
ISchedule convert = learningRateScheduleConverter.convert(field.getValue());
adam.setLearningRateSchedule(convert);
}
} else if(updater instanceof AdaMax) {
AdaMax adaMax = (AdaMax) updater;
if(field.getKey().equals("beta1")) {
adaMax.setBeta1(Double.parseDouble(field.getValue()));
}
if(field.getKey().equals("beta2")) {
adaMax.setBeta2(Double.parseDouble(field.getValue()));
}
if(field.getKey().equals("epsilon")) {
adaMax.setEpsilon(Double.parseDouble(field.getValue()));
}
if(field.getKey().equals("learningRate")) {
Double rate = Double.parseDouble(field.getValue());
adaMax.setLearningRate(rate);
}
if(field.getKey().equals("learningRateSchedule")) {
LearningRateScheduleConverter learningRateScheduleConverter = new LearningRateScheduleConverter();
ISchedule convert = learningRateScheduleConverter.convert(field.getValue());
adaMax.setLearningRateSchedule(convert);
}
} else if(updater instanceof AdaBelief) {
AdaBelief adaBelief = (AdaBelief) updater;
if(field.getKey().equals("beta1")) {
adaBelief.setBeta1(Double.parseDouble(field.getValue()));
}
if(field.getKey().equals("beta2")) {
adaBelief.setBeta2(Double.parseDouble(field.getValue()));
}
if(field.getKey().equals("epsilon")) {
adaBelief.setEpsilon(Double.parseDouble(field.getValue()));
}
if(field.getKey().equals("learningRate")) {
Double rate = Double.parseDouble(field.getValue());
adaBelief.setLearningRate(rate);
}
if(field.getKey().equals("learningRateSchedule")) {
LearningRateScheduleConverter learningRateScheduleConverter = new LearningRateScheduleConverter();
ISchedule convert = learningRateScheduleConverter.convert(field.getValue());
adaBelief.setLearningRateSchedule(convert);
}
} else if(updater instanceof Nesterovs) {
Nesterovs nesterovs = (Nesterovs) updater;
if(field.getKey().equals("learningRate")) {
Double rate = Double.parseDouble(field.getValue());
nesterovs.setLearningRate(rate);
}
if(field.getKey().equals("momentum")) {
Double rate = Double.parseDouble(field.getValue());
nesterovs.setMomentum(rate);
}
if(field.getKey().equals("learningRateSchedule")) {
LearningRateScheduleConverter learningRateScheduleConverter = new LearningRateScheduleConverter();
ISchedule convert = learningRateScheduleConverter.convert(field.getValue());
nesterovs.setLearningRateSchedule(convert);
}
if(field.getKey().equals("momentumISchedule")) {
LearningRateScheduleConverter learningRateScheduleConverter = new LearningRateScheduleConverter();
ISchedule convert = learningRateScheduleConverter.convert(field.getValue());
nesterovs.setMomentumISchedule(convert);
}
} else if(updater instanceof Nadam) {
Nadam nadam = (Nadam) updater;
if(field.getKey().equals("beta1")) {
nadam.setBeta1(Double.parseDouble(field.getValue()));
}
if(field.getKey().equals("beta2")) {
nadam.setBeta2(Double.parseDouble(field.getValue()));
}
if(field.getKey().equals("epsilon")) {
nadam.setEpsilon(Double.parseDouble(field.getValue()));
}
if(field.getKey().equals("learningRate")) {
Double rate = Double.parseDouble(field.getValue());
nadam.setLearningRate(rate);
}
if(field.getKey().equals("learningRateSchedule")) {
LearningRateScheduleConverter learningRateScheduleConverter = new LearningRateScheduleConverter();
ISchedule convert = learningRateScheduleConverter.convert(field.getValue());
nadam.setLearningRateSchedule(convert);
}
}
}
}
private IUpdater instanceForName(String name) {
switch(UpdaterTypes.valueOf(name.toUpperCase())) {
case SGD:
return new Sgd();
case ADAM:
return new Adam();
case NOOP:
return new NoOp();
case NADAM:
return new Nadam();
case ADAMAX:
return new AdaMax();
case ADAGRAD:
return new AdaGrad();
case AMSGRAD:
return new AMSGrad();
case RMSPROP:
return new RmsProp();
case ADADELTA:
return new AdaDelta();
case ADABELIEF:
return new AdaBelief();
case NESTEROVS:
return new Nesterovs();
default:
throw new IllegalArgumentException("Illegal type " + name);
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/metrics/ClassificationMetrics.java
|
/*
*
* * ******************************************************************************
* *
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.metrics;
import ai.konduit.serving.config.metrics.MetricsConfig;
import ai.konduit.serving.config.metrics.MetricsRenderer;
import ai.konduit.serving.config.metrics.impl.ClassificationMetricsConfig;
import io.micrometer.core.instrument.Gauge;
import io.micrometer.core.instrument.ImmutableTag;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Tag;
import lombok.Getter;
import org.datavec.api.records.Record;
import org.datavec.api.writable.NDArrayWritable;
import org.nd4j.common.primitives.AtomicDouble;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.function.Supplier;
/**
* Classification metrics for counting number of classes
* that occur during inference.
*
* @author Adam Gibson
*/
public class ClassificationMetrics implements MetricsRenderer {
private Iterable<Tag> tags;
@Getter
private List<Gauge> classCounterIncrement;
@Getter
private List<CurrentClassTrackerCount> classTrackerCounts;
private ClassificationMetricsConfig classificationMetricsConfig;
public ClassificationMetrics(ClassificationMetricsConfig classificationMetricsConfig) {
this(classificationMetricsConfig, Arrays.asList(new ImmutableTag("machinelearning","classification")));
}
public ClassificationMetrics(ClassificationMetricsConfig classificationMetricsConfig, Iterable<Tag> tags) {
this.classificationMetricsConfig = classificationMetricsConfig;
this.tags = tags;
classCounterIncrement = new ArrayList<>();
classTrackerCounts = new ArrayList<>();
}
@Override
public void bindTo(MeterRegistry meterRegistry) {
for(int i = 0; i < classificationMetricsConfig.getClassificationLabels().size(); i++) {
CurrentClassTrackerCount classTrackerCount = new CurrentClassTrackerCount();
classTrackerCounts.add(classTrackerCount);
classCounterIncrement.add(Gauge.builder(classificationMetricsConfig.getClassificationLabels().get(i),classTrackerCount)
.tags(tags)
.description("Classification counts seen so far for label " + classificationMetricsConfig.getClassificationLabels().get(i))
.baseUnit("classification.outcome")
.register(meterRegistry));
}
}
@Override
public MetricsConfig config() {
return classificationMetricsConfig;
}
@Override
public void updateMetrics(Object... args) {
if(args[0] instanceof Record) {
Record records = (Record) args[0];
incrementClassificationCounters(new Record[]{records});
}
else if(args[0] instanceof Record[]) {
Record[] records = (Record[]) args[0];
incrementClassificationCounters(records);
}
else if(args[0] instanceof INDArray) {
INDArray output = (INDArray) args[0];
incrementClassificationCounters(new INDArray[] {output});
}
else if(args[0] instanceof INDArray[]) {
INDArray[] output = (INDArray[]) args[0];
incrementClassificationCounters(output);
}
}
/**
* A counter that resets the in memory value when
* the metric is exported. It is assumed that when exported,
* a storage system captures the sampled value.
*
*/
private static class CurrentClassTrackerCount implements Supplier<Number> {
private AtomicDouble currCounter = new AtomicDouble(0);
public void increment(double numberToIncrementBy) {
currCounter.getAndAdd(numberToIncrementBy);
}
public void reset() {
currCounter.set(0.0);
}
@Override
public Number get() { ;
double ret = currCounter.get();
reset();
return ret;
}
}
private void incrementClassificationCounters(INDArray[] outputs) {
handleNdArray(outputs[0]);
}
private void incrementClassificationCounters(Record[] records) {
if(classCounterIncrement != null) {
NDArrayWritable ndArrayWritable = (NDArrayWritable) records[0].getRecord().get(0);
INDArray output = ndArrayWritable.get();
handleNdArray(output);
}
}
private void handleNdArray(INDArray array) {
INDArray argMax = Nd4j.argMax(array, -1);
for(int i = 0; i < argMax.length(); i++) {
CurrentClassTrackerCount classTrackerCount = classTrackerCounts.get(argMax.getInt(i));
classTrackerCount.increment(1.0);
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/metrics/MultiLabelMetrics.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.metrics;
import ai.konduit.serving.config.metrics.MetricsConfig;
import ai.konduit.serving.config.metrics.MetricsRenderer;
import ai.konduit.serving.config.metrics.impl.MultiLabelMetricsConfig;
import io.micrometer.core.instrument.Gauge;
import io.micrometer.core.instrument.ImmutableTag;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Tag;
import lombok.Getter;
import org.datavec.api.records.Record;
import org.datavec.api.writable.NDArrayWritable;
import org.nd4j.common.primitives.AtomicDouble;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.function.Supplier;
/**
* A {@link MetricsRenderer} that takes in matrices of counts
* where each column (indexed by what is specified in the {@link #multiLabelMetricsConfig}
* specified in {@link #updateMetrics(Object...)}
* is a count of how to increment the column.
*
* Note that similar to {@link ClassificationMetrics}
* the counts will reset upon sampling via prometheus.
*
* @author Adam Gibson
*/
public class MultiLabelMetrics implements MetricsRenderer {
@Getter
private MultiLabelMetricsConfig multiLabelMetricsConfig;
@Getter
private List<CurrentClassTrackerCount> classTrackerCounts;
private Iterable<Tag> tags;
@Getter
private List<Gauge> classCounterIncrement;
public MultiLabelMetrics(MultiLabelMetricsConfig multiLabelMetricsConfig, Iterable<Tag> tags) {
this.multiLabelMetricsConfig = multiLabelMetricsConfig;
this.tags = tags;
classTrackerCounts = new ArrayList<>();
classCounterIncrement = new ArrayList<>();
}
public MultiLabelMetrics(MultiLabelMetricsConfig multiLabelMetricsConfig) {
this(multiLabelMetricsConfig, Arrays.asList(new ImmutableTag("machinelearning","multilabel")));
}
@Override
public MetricsConfig config() {
return multiLabelMetricsConfig;
}
@Override
public void updateMetrics(Object... args) {
if(args[0] instanceof Record) {
Record records = (Record) args[0];
incrementClassificationCounters(new Record[]{records});
}
else if(args[0] instanceof Record[]) {
Record[] records = (Record[]) args[0];
incrementClassificationCounters(records);
}
else if(args[0] instanceof INDArray) {
INDArray output = (INDArray) args[0];
incrementClassificationCounters(new INDArray[] {output});
}
else if(args[0] instanceof INDArray[]) {
INDArray[] output = (INDArray[]) args[0];
incrementClassificationCounters(output);
}
}
private void incrementClassificationCounters(INDArray[] outputs) {
INDArray argMax = Nd4j.argMax(outputs[0], -1);
for(int i = 0; i < argMax.length(); i++) {
CurrentClassTrackerCount classTrackerCount = classTrackerCounts.get(argMax.getInt(i));
classTrackerCount.increment(1.0);
}
}
private void incrementClassificationCounters(Record[] records) {
if(classCounterIncrement != null) {
NDArrayWritable ndArrayWritable = (NDArrayWritable) records[0].getRecord().get(0);
INDArray output = ndArrayWritable.get();
INDArray argMax = Nd4j.argMax(output, -1);
for (int i = 0; i < argMax.length(); i++) {
CurrentClassTrackerCount classTrackerCount = classTrackerCounts.get(argMax.getInt(i));
classTrackerCount.increment(1.0);
}
}
}
private void handleNdArray(INDArray array) {
if(array.isScalar()) {
CurrentClassTrackerCount classTrackerCount = classTrackerCounts.get(0);
classTrackerCount.increment(array.getDouble(0));
}
else if(array.isMatrix()) {
for(int i = 0; i < array.rows(); i++) {
for(int j = 0; j < array.columns(); j++) {
CurrentClassTrackerCount classTrackerCount = classTrackerCounts.get(array.getInt(i));
classTrackerCount.increment(array.getDouble(i,j));
}
}
}
else if(array.isVector()) {
for (int i = 0; i < array.length(); i++) {
CurrentClassTrackerCount classTrackerCount = classTrackerCounts.get(array.getInt(i));
classTrackerCount.increment(array.getDouble(i));
}
}
}
@Override
public void bindTo(MeterRegistry registry) {
for(int i = 0; i < multiLabelMetricsConfig.getLabels().size(); i++) {
CurrentClassTrackerCount classTrackerCount = new CurrentClassTrackerCount();
classTrackerCounts.add(classTrackerCount);
classCounterIncrement.add(Gauge.builder(multiLabelMetricsConfig.getLabels().get(i),classTrackerCount)
.tags(tags)
.description("Multi-label Classification counts seen so far for label " + multiLabelMetricsConfig.getLabels().get(i))
.baseUnit("multilabelclassification.outcome")
.register(registry));
}
}
/**
* A counter that resets the in memory value when
* the metric is exported. It is assumed that when exported,
* a storage system captures the sampled value.
*
*/
private static class CurrentClassTrackerCount implements Supplier<Number> {
private AtomicDouble currCounter = new AtomicDouble(0);
public void increment(double numberToIncrementBy) {
currCounter.getAndAdd(numberToIncrementBy);
}
public void reset() {
currCounter.set(0.0);
}
@Override
public Number get() { ;
double ret = currCounter.get();
reset();
return ret;
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/metrics/NativeMetrics.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.metrics;
import ai.konduit.serving.config.metrics.MetricsConfig;
import ai.konduit.serving.config.metrics.MetricsRenderer;
import io.micrometer.core.instrument.Gauge;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Tag;
import org.deeplearning4j.core.listener.DeviceMetric;
import org.deeplearning4j.core.listener.HardwareMetric;
import oshi.json.SystemInfo;
import java.util.Map;
import java.util.UUID;
import static java.util.Collections.emptyList;
/**
* Metrics derived from
* {@link HardwareMetric}
* which contains current information about the system and its devices
* such as ram, cpu load, and gpu information
*/
public class NativeMetrics implements MetricsRenderer {
private final Iterable<Tag> tags;
public NativeMetrics() {
this(emptyList());
}
public NativeMetrics(Iterable<Tag> tags) {
this.tags = tags;
}
@Override
public void bindTo(MeterRegistry registry) {
SystemInfo systemInfo = new SystemInfo();
HardwareMetric hardwareMetric = HardwareMetric.fromSystem(systemInfo, UUID.randomUUID().toString());
Gauge.builder("cpuload", hardwareMetric, HardwareMetric::getAveragedCpuLoad)
.tags(tags)
.description("Average cpu load")
.baseUnit("konduit-serving." + hardwareMetric.getHostName())
.register(registry);
Gauge.builder("memoryuse", hardwareMetric, HardwareMetric::getCurrentMemoryUse)
.tags(tags)
.description("Memory use")
.baseUnit("konduit-serving." + hardwareMetric.getHostName())
.register(registry);
Gauge.builder("iowaittime", hardwareMetric, HardwareMetric::getIoWaitTime)
.tags(tags)
.description("I/O Wait time")
.baseUnit("konduit-serving." + hardwareMetric.getHostName())
.register(registry);
if (hardwareMetric.getGpuMetrics() != null)
for (Map.Entry<Integer, DeviceMetric> entry : hardwareMetric.getGpuMetrics().entrySet()) {
DeviceMetric deviceMetric = hardwareMetric.getGpuMetrics().get(entry.getKey());
Gauge.builder("gpu." + entry.getKey() + ".bandwidth.d2d" + entry.getKey(), deviceMetric, DeviceMetric::getBandwidthDeviceToDevice)
.tags(tags)
.description("Gpu " + entry.getKey() + " bandwidth device to device for device " + deviceMetric.getDeviceName())
.baseUnit("konduit-serving." + hardwareMetric.getHostName())
.register(registry);
Gauge.builder("gpu." + entry.getKey() + ".bandwidth.d2h" + entry.getKey(), deviceMetric, DeviceMetric::getBandwidthDeviceToHost)
.tags(tags)
.description("Gpu " + entry.getKey() + " bandwidth device to host for device " + deviceMetric.getDeviceName())
.baseUnit("konduit-serving." + hardwareMetric.getHostName())
.register(registry);
Gauge.builder("gpu." + entry.getKey() + ".load" + entry.getKey(), deviceMetric, DeviceMetric::getLoad)
.tags(tags)
.description("Gpu " + entry.getKey() + " current load for device " + deviceMetric.getDeviceName())
.baseUnit("konduit-serving." + hardwareMetric.getHostName())
.register(registry);
Gauge.builder("gpu." + entry.getKey() + ".memavailable" + entry.getKey(), deviceMetric, DeviceMetric::getMemAvailable)
.tags(tags)
.description("Gpu " + entry.getKey() + " current available memory for device " + deviceMetric.getDeviceName())
.baseUnit("konduit-serving." + hardwareMetric.getHostName())
.register(registry);
}
if (hardwareMetric.getPerCoreMetrics() != null)
for (Map.Entry<Integer, DeviceMetric> entry : hardwareMetric.getPerCoreMetrics().entrySet()) {
DeviceMetric deviceMetric = hardwareMetric.getPerCoreMetrics().get(entry.getKey());
Gauge.builder("Cpu." + entry.getKey() + ".load" + entry.getKey(), deviceMetric, DeviceMetric::getLoad)
.tags(tags)
.description("Cpu " + entry.getKey() + " current load for device " + deviceMetric.getDeviceName())
.baseUnit("konduit-serving." + hardwareMetric.getHostName())
.register(registry);
Gauge.builder("cpu." + entry.getKey() + ".memavailable" + entry.getKey(), deviceMetric, DeviceMetric::getMemAvailable)
.tags(tags)
.description("Cpu " + entry.getKey() + " current available memory for device " + deviceMetric.getDeviceName())
.baseUnit("konduit-serving." + hardwareMetric.getHostName())
.register(registry);
}
}
@Override
public MetricsConfig config() {
return null;
}
@Override
public void updateMetrics(Object... args) {
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/metrics/RegressionMetrics.java
|
/*
*
* * ******************************************************************************
* *
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.metrics;
import ai.konduit.serving.config.metrics.ColumnDistribution;
import ai.konduit.serving.config.metrics.MetricsConfig;
import ai.konduit.serving.config.metrics.MetricsRenderer;
import ai.konduit.serving.config.metrics.impl.RegressionMetricsConfig;
import ai.konduit.serving.util.MetricRenderUtils;
import io.micrometer.core.instrument.Gauge;
import io.micrometer.core.instrument.ImmutableTag;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Tag;
import lombok.Getter;
import org.datavec.api.records.Record;
import org.datavec.api.transform.analysis.counter.StatCounter;
import org.datavec.api.writable.NDArrayWritable;
import org.nd4j.linalg.api.ndarray.INDArray;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.function.Supplier;
/**
* Regression metrics aggregated and displayed using
* {@link StatCounter} and {@link Gauge}
*
* @author Adam Gibson
*/
public class RegressionMetrics implements MetricsRenderer {
private Iterable<Tag> tags;
@Getter
private List<Gauge> outputStatsGauges;
private List<StatCounter> statCounters;
private RegressionMetricsConfig regressionMetricsConfig;
public RegressionMetrics(RegressionMetricsConfig regressionMetricsConfig) {
this(regressionMetricsConfig, Arrays.asList(new ImmutableTag("machinelearning","regression")));
}
public RegressionMetrics(RegressionMetricsConfig regressionMetricsConfig, Iterable<Tag> tags) {
this.regressionMetricsConfig = regressionMetricsConfig;
this.tags = tags;
outputStatsGauges = new ArrayList<>();
statCounters = new ArrayList<>();
}
@Override
public void bindTo(MeterRegistry meterRegistry) {
for(int i = 0; i < regressionMetricsConfig.getRegressionColumnLabels().size(); i++) {
StatCounter statCounter = new StatCounter();
statCounters.add(statCounter);
ColumnDistribution columnDistribution = regressionMetricsConfig.getColumnDistributions() != null &&
regressionMetricsConfig.getColumnDistributions().size() == regressionMetricsConfig.getRegressionColumnLabels().size() ?
regressionMetricsConfig.getColumnDistributions().get(i) : null;
StatCounterSupplier statCounterSupplier = new StatCounterSupplier(statCounter,regressionMetricsConfig.getSampleTypes().get(i),columnDistribution);
outputStatsGauges.add(Gauge.builder(regressionMetricsConfig.getRegressionColumnLabels().get(i),statCounterSupplier)
.tags(tags)
.description("Regression values seen so far for label " + regressionMetricsConfig.getRegressionColumnLabels().get(i))
.baseUnit("regression.outcome")
.register(meterRegistry));
}
}
private static class StatCounterSupplier implements Serializable,Supplier<Number> {
private StatCounter statCounter;
private RegressionMetricsConfig.SampleType sampleType;
private ColumnDistribution columnDistribution;
StatCounterSupplier(StatCounter statCounter, RegressionMetricsConfig.SampleType sampleType,ColumnDistribution columnDistribution) {
this.statCounter = statCounter;
this.sampleType = sampleType;
this.columnDistribution = columnDistribution;
}
@Override
public Double get() {
Double ret = null;
switch(sampleType) {
case SUM:
ret = statCounter.getSum();
break;
case MEAN:
ret = statCounter.getMean();
break;
case MIN:
ret = statCounter.getMin();
break;
case MAX:
ret = statCounter.getMax();
break;
case STDDEV_POP:
ret = statCounter.getStddev(true);
break;
case STDDEV_NOPOP:
ret = statCounter.getStddev(false);
break;
case VARIANCE_POP:
ret = statCounter.getVariance(true);
break;
case VARIANCE_NOPOP:
ret = statCounter.getVariance(false);
break;
default:
return 0.0;
}
if(columnDistribution != null) {
ret = MetricRenderUtils.deNormalizeValue(ret,columnDistribution);
}
return ret;
}
}
@Override
public MetricsConfig config() {
return regressionMetricsConfig;
}
@Override
public void updateMetrics(Object... args) {
if(args[0] instanceof Record) {
Record records = (Record) args[0];
incrementRegressionCounters(new Record[]{records});
}
else if(args[0] instanceof Record[]) {
Record[] records = (Record[]) args[0];
incrementRegressionCounters(records);
}
else if(args[0] instanceof INDArray) {
INDArray output = (INDArray) args[0];
incrementRegressionCounters(new INDArray[] {output});
}
else if(args[0] instanceof INDArray[]) {
INDArray[] output = (INDArray[]) args[0];
incrementRegressionCounters(output);
}
}
private void incrementRegressionCounters(INDArray[] outputs) {
synchronized (statCounters) {
handleNdArray(outputs[0]);
}
}
private void incrementRegressionCounters(Record[] records) {
synchronized (statCounters) {
NDArrayWritable ndArrayWritable = (NDArrayWritable) records[0].getRecord().get(0);
handleNdArray(ndArrayWritable.get());
}
}
private void handleNdArray(INDArray output) {
if(output.isVector()) {
for(int i = 0; i < output.length(); i++) {
statCounters.get(i).add(output.getDouble(i));
}
}
else if(output.isMatrix() && output.length() > 1) {
for(int i = 0; i < output.rows(); i++) {
for(int j = 0; j < output.columns(); j++) {
statCounters.get(i).add(output.getDouble(i,j));
}
}
}
else if(output.isScalar()) {
statCounters.get(0).add(output.sumNumber().doubleValue());
}
else {
throw new IllegalArgumentException("Only vectors and matrices supported right now");
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/normalizer/CustomImagePreProcessingScaler.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.normalizer;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.api.DataSet;
import org.nd4j.linalg.dataset.api.iterator.DataSetIterator;
import org.nd4j.linalg.dataset.api.preprocessor.DataNormalization;
import org.nd4j.linalg.dataset.api.preprocessor.serializer.NormalizerType;
/**
* Created by susaneraly on 6/23/16.
* A preprocessor specifically for images that applies min max scaling
* Can take a range, so pixel values can be scaled from 0 to 255 to minRange to maxRange
* default minRange = 0 and maxRange = 1;
* If pixel values are not 8 bits, you can specify the number of bits as the third argument in the constructor
* For values that are already floating point, specify the number of bits as 1
*/
@Slf4j
@Getter
@Setter
@EqualsAndHashCode
public class CustomImagePreProcessingScaler implements DataNormalization {
private double minRange, maxRange;
private double maxPixelVal;
private int maxBits;
public CustomImagePreProcessingScaler() {
this(0, 1, 8);
}
public CustomImagePreProcessingScaler(double a, double b) {
this(a, b, 8);
}
/**
* Preprocessor can take a range as minRange and maxRange
*
* @param a, default = 0
* @param b, default = 1
* @param maxBits in the image, default = 8
*/
public CustomImagePreProcessingScaler(double a, double b, int maxBits) {
//Image values are not always from 0 to 255 though
//some images are 16-bit, some 32-bit, integer, or float, and those BTW already come with values in [0..1]...
//If the max expected value is 1, maxBits should be specified as 1
maxPixelVal = Math.pow(2, maxBits) - 1;
this.minRange = a;
this.maxRange = b;
}
/**
* Fit a dataset (only compute
* based on the statistics from this dataset0
*
* @param dataSet the dataset to compute on
*/
@Override
public void fit(DataSet dataSet) {
}
/**
* Iterates over a dataset
* accumulating statistics for normalization
*
* @param iterator the iterator to use for
* collecting statistics.
*/
@Override
public void fit(DataSetIterator iterator) {
}
@Override
public void preProcess(DataSet toPreProcess) {
INDArray features = toPreProcess.getFeatures();
this.preProcess(features);
}
public void preProcess(INDArray features) {
features.divi(this.maxPixelVal); //Scaled to 0->1
if (this.maxRange - this.minRange != 1)
features.muli(this.maxRange - this.minRange); //Scaled to minRange -> maxRange
if (this.minRange != 0)
features.addi(this.minRange); //Offset by minRange
}
/**
* Transform the data
*
* @param toPreProcess the dataset to transform
*/
@Override
public void transform(DataSet toPreProcess) {
this.preProcess(toPreProcess);
}
@Override
public void transform(INDArray features) {
this.preProcess(features);
}
@Override
public void transform(INDArray features, INDArray featuresMask) {
transform(features);
}
@Override
public void transformLabel(INDArray label) {
//No op
}
@Override
public void transformLabel(INDArray labels, INDArray labelsMask) {
transformLabel(labels);
}
@Override
public void revert(DataSet toRevert) {
revertFeatures(toRevert.getFeatures());
}
@Override
public NormalizerType getType() {
return NormalizerType.CUSTOM;
}
@Override
public void revertFeatures(INDArray features) {
if (minRange != 0) {
features.subi(minRange);
}
if (maxRange - minRange != 1.0) {
features.divi(maxRange - minRange);
}
features.muli(this.maxPixelVal);
}
@Override
public void revertFeatures(INDArray features, INDArray featuresMask) {
revertFeatures(features);
}
@Override
public void revertLabels(INDArray labels) {
//No op
}
@Override
public void revertLabels(INDArray labels, INDArray labelsMask) {
revertLabels(labels);
}
@Override
public void fitLabel(boolean fitLabels) {
if (fitLabels) {
log.warn("Labels fitting not currently supported for CustomImagePreProcessingScaler. Labels will not be modified");
}
}
@Override
public boolean isFitLabel() {
return false;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/normalizer/CustomImagePreProcessingSerializerStrategy.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.normalizer;
import org.nd4j.linalg.dataset.api.preprocessor.serializer.CustomSerializerStrategy;
import org.nd4j.linalg.dataset.api.preprocessor.serializer.NormalizerSerializerStrategy;
import org.nd4j.linalg.dataset.api.preprocessor.serializer.NormalizerType;
import java.io.*;
/**
* {@link NormalizerSerializerStrategy}
* for {@link CustomImagePreProcessingScaler}
* <p>
* Saves the min range, max range, and max pixel value as
* doubles
*
* @author Adam Gibson
*/
public class CustomImagePreProcessingSerializerStrategy extends CustomSerializerStrategy<CustomImagePreProcessingScaler> {
@Override
public void write(CustomImagePreProcessingScaler normalizer, OutputStream stream) throws IOException {
try (DataOutputStream dataOutputStream = new DataOutputStream(stream)) {
dataOutputStream.writeDouble(normalizer.getMinRange());
dataOutputStream.writeDouble(normalizer.getMaxRange());
dataOutputStream.writeDouble(normalizer.getMaxPixelVal());
dataOutputStream.flush();
}
}
@Override
public CustomImagePreProcessingScaler restore(InputStream stream) throws IOException {
DataInputStream dataOutputStream = new DataInputStream(stream);
double minRange = dataOutputStream.readDouble();
double maxRange = dataOutputStream.readDouble();
double maxPixelVal = dataOutputStream.readDouble();
CustomImagePreProcessingScaler ret = new CustomImagePreProcessingScaler(minRange, maxRange);
ret.setMaxPixelVal(maxPixelVal);
return ret;
}
@Override
public NormalizerType getSupportedType() {
return NormalizerType.CUSTOM;
}
@Override
public Class<CustomImagePreProcessingScaler> getSupportedClass() {
return CustomImagePreProcessingScaler.class;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/normalizer
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/normalizer/loader/DefaultNormalizationLoader.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.normalizer.loader;
import lombok.AllArgsConstructor;
import org.nd4j.linalg.dataset.api.preprocessor.DataNormalization;
import org.nd4j.linalg.dataset.api.preprocessor.serializer.NormalizerSerializer;
/**
* The default normalization loader
* uses dl4j's {@link NormalizerSerializer}
* to load the model from a given path.
* <p>
* Typically this will be a persisted zip file.
* See {@link NormalizerSerializer}
* for more information on the expected file format.
*
* @author Adam Gibson
*/
@AllArgsConstructor
public class DefaultNormalizationLoader implements NormalizationLoader {
protected String path;
@Override
public DataNormalization load() throws Exception {
return NormalizerSerializer.getDefault().restore(path);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/normalizer
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/normalizer/loader/NormalizationLoader.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.normalizer.loader;
import org.nd4j.linalg.dataset.api.preprocessor.DataNormalization;
/**
* Handles loading a {@link DataNormalization}
* from disk.
*
* @author Adam Gibson
*/
public interface NormalizationLoader {
/**
* The loaded {@link DataNormalization}
*
* @return the loaded {@link DataNormalization}
* @throws Exception if an error occurs loading the normalizer
*/
DataNormalization load() throws Exception;
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/output
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/output/types/ErrorResult.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.output.types;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serializable;
import java.util.LinkedHashMap;
import java.util.Map;
@AllArgsConstructor
@NoArgsConstructor
@Data
@Builder
public class ErrorResult<T> implements Serializable {
private T results;
@Builder.Default
private Map<Integer, String> errors = new LinkedHashMap<>();
@Builder.Default
private Map<Integer, Integer> resultIndices = new LinkedHashMap<>();
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/pipeline/handlers/converter/multi/converter/impl
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/pipeline/handlers/converter/multi/converter/impl/arrow/ArrowBinaryInputAdapter.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.pipeline.handlers.converter.multi.converter.impl.arrow;
import ai.konduit.serving.input.adapter.InputAdapter;
import ai.konduit.serving.input.conversion.ConverterArgs;
import io.vertx.core.buffer.Buffer;
import org.datavec.api.split.InputStreamInputSplit;
import org.datavec.arrow.recordreader.ArrowRecordReader;
import org.datavec.arrow.recordreader.ArrowWritableRecordBatch;
import java.io.ByteArrayInputStream;
import java.util.Map;
/**
* A {@link InputAdapter}
* for converting raw {@link Buffer}
* to {@link ArrowWritableRecordBatch}
*
* @author Adam Gibson
*/
public class ArrowBinaryInputAdapter implements InputAdapter<Buffer, ArrowWritableRecordBatch> {
@Override
public ArrowWritableRecordBatch convert(Buffer input, ConverterArgs parameters, Map<String, Object> contextData) {
ArrowRecordReader arrowRecordReader = new ArrowRecordReader();
arrowRecordReader.initialize(new InputStreamInputSplit(new ByteArrayInputStream(input.getBytes())));
arrowRecordReader.next();
return arrowRecordReader.getCurrentBatch();
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/pipeline/handlers/converter/multi/converter/impl
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/pipeline/handlers/converter/multi/converter/impl/image/BaseImageInputAdapter.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.pipeline.handlers.converter.multi.converter.impl.image;
import ai.konduit.serving.input.adapter.InputAdapter;
import ai.konduit.serving.input.conversion.ConverterArgs;
import ai.konduit.serving.util.image.NativeImageLoader;
import org.datavec.api.writable.NDArrayWritable;
import org.datavec.api.writable.Writable;
import org.datavec.image.transform.ImageTransform;
import org.datavec.image.transform.MultiImageTransform;
import org.nd4j.linalg.api.ndarray.INDArray;
import java.io.IOException;
import java.util.Map;
/**
* A base class for {@link InputAdapter}
* for converting an input type
* using a configured {@link NativeImageLoader}
* from {@link #getImageLoader(Object, ConverterArgs)}
*
* @param <T> the type to adapt
*/
public abstract class BaseImageInputAdapter<T> implements InputAdapter<T, Writable> {
@Override
public Writable convert(T input, ConverterArgs parameters, Map<String, Object> contextData) throws IOException {
NativeImageLoader imageLoader = getImageLoader(input, parameters);
return new NDArrayWritable(getArrayUsing(imageLoader, input, parameters));
}
/**
* Uses the passed in {@link NativeImageLoader}
* to convert the specified input to a
* {@link INDArray} (usually a bitmap format)
*
* @param nativeImageLoader the {@link NativeImageLoader}
* used for conversion
* @param input the input to convert
* @param converterArgs converter arguments to use
* @return the converted input: an {@link INDArray}
* representing the image
* @throws IOException if an error occurs during the array creation
*/
public abstract INDArray getArrayUsing(NativeImageLoader nativeImageLoader, T input, ConverterArgs converterArgs) throws IOException;
/**
* Get the image loader
* configuring it using the
* {@link ConverterArgs}
*
* @param input the input to convert
* @param converterArgs the converter args to use
* @return the configured {@link NativeImageLoader}
*/
public NativeImageLoader getImageLoader(T input, ConverterArgs converterArgs) {
if (converterArgs == null || converterArgs.getLongs().isEmpty())
return new NativeImageLoader();
else if (converterArgs.getLongs().size() == 3) {
if (converterArgs.getImageTransformProcess() != null) {
return new NativeImageLoader(converterArgs.getLongs().get(0), converterArgs.getLongs().get(1), converterArgs.getLongs().get(2), new MultiImageTransform(converterArgs.getImageTransformProcess().getTransformList().toArray(new ImageTransform[1])));
} else if (converterArgs.getImageTransformProcess() != null)
return new NativeImageLoader(converterArgs.getLongs().get(0), converterArgs.getLongs().get(1), converterArgs.getLongs().get(2), new MultiImageTransform(converterArgs.getImageTransformProcess().getTransformList().toArray(new ImageTransform[1])));
else {
return new NativeImageLoader(converterArgs.getLongs().get(0), converterArgs.getLongs().get(1), converterArgs.getLongs().get(2));
}
} else if (converterArgs.getLongs().size() == 3) {
if (converterArgs.getImageTransformProcess() != null) {
return new NativeImageLoader(converterArgs.getLongs().get(0).intValue(), converterArgs.getLongs().get(1).intValue()
, converterArgs.getLongs().get(2).intValue(), new MultiImageTransform(converterArgs.getImageTransformProcess().getTransformList().toArray(new ImageTransform[1])));
} else
return new NativeImageLoader(converterArgs.getLongs().get(0).intValue(), converterArgs.getLongs().get(1).intValue()
, converterArgs.getLongs().get(2).intValue());
}
return new NativeImageLoader();
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/pipeline/handlers/converter/multi/converter/impl
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/pipeline/handlers/converter/multi/converter/impl/image/VertxBufferImageInputAdapter.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.pipeline.handlers.converter.multi.converter.impl.image;
import ai.konduit.serving.input.conversion.ConverterArgs;
import ai.konduit.serving.util.image.NativeImageLoader;
import ai.konduit.serving.verticles.VerticleConstants;
import io.vertx.core.buffer.Buffer;
import org.datavec.api.writable.Writable;
import org.datavec.image.data.ImageWritable;
import org.nd4j.linalg.api.ndarray.INDArray;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.Map;
/**
* Convert an image from a buffer to a matrix.
*
* @author Adam Gibson
*/
public class VertxBufferImageInputAdapter extends BaseImageInputAdapter<Buffer> {
@Override
public INDArray getArrayUsing(NativeImageLoader nativeImageLoader, Buffer input, ConverterArgs converterArgs) throws IOException {
return nativeImageLoader.asMatrix(new ByteArrayInputStream(input.getBytes()));
}
@Override
public Writable convert(Buffer input, ConverterArgs parameters, Map<String, Object> contextData) throws IOException {
NativeImageLoader imageLoader = getImageLoader(input, parameters);
ImageWritable image = imageLoader.asWritable(new ByteArrayInputStream(input.getBytes()));
if (contextData != null) {
contextData.put(VerticleConstants.ORIGINAL_IMAGE_HEIGHT, image.getHeight());
contextData.put(VerticleConstants.ORIGINAL_IMAGE_WIDTH, image.getWidth());
}
return image;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/pipeline/handlers/converter/multi/converter/impl
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/pipeline/handlers/converter/multi/converter/impl/nd4j/VertxBufferNd4jInputAdapter.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.pipeline.handlers.converter.multi.converter.impl.nd4j;
import ai.konduit.serving.input.adapter.InputAdapter;
import ai.konduit.serving.input.conversion.ConverterArgs;
import ai.konduit.serving.util.ImagePermuter;
import io.vertx.core.buffer.Buffer;
import org.datavec.api.writable.NDArrayWritable;
import org.datavec.api.writable.Writable;
import org.nd4j.common.base.Preconditions;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.serde.binary.BinarySerde;
import java.util.Map;
/**
* Reads in {@link Buffer}
* containing raw numpy arrays
* converting them to {@link INDArray}
*
* @author Adam Gibson
*/
public class VertxBufferNd4jInputAdapter implements InputAdapter<Buffer, Writable> {
@Override
public NDArrayWritable convert(Buffer input, ConverterArgs parameters, Map<String, Object> contextData) {
Preconditions.checkState(input.length() > 0, "Buffer appears to be empty!");
INDArray fromNpyPointer = BinarySerde.toArray(input.getByteBuf().nioBuffer());
//permute required
if (parameters != null && parameters.getImageProcessingInitialLayout() != null && !parameters.getImageProcessingInitialLayout().equals(parameters.getImageProcessingRequiredLayout())) {
fromNpyPointer = ImagePermuter.permuteOrder(fromNpyPointer, parameters.getImageProcessingInitialLayout(), parameters.getImageProcessingRequiredLayout());
}
return new NDArrayWritable(fromNpyPointer);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/pipeline/handlers/converter/multi/converter/impl
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/pipeline/handlers/converter/multi/converter/impl/numpy/VertxBufferNumpyInputAdapter.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.pipeline.handlers.converter.multi.converter.impl.numpy;
import ai.konduit.serving.input.adapter.InputAdapter;
import ai.konduit.serving.input.conversion.ConverterArgs;
import ai.konduit.serving.util.ImagePermuter;
import io.vertx.core.buffer.Buffer;
import org.bytedeco.javacpp.BytePointer;
import org.datavec.api.writable.NDArrayWritable;
import org.datavec.api.writable.Writable;
import org.nd4j.common.base.Preconditions;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import java.util.Map;
/**
* Reads in {@link Buffer} containing raw numpy arrays and converts them to {@link NDArrayWritable},
* using {@link ConverterArgs}.
*
* @author Adam Gibson
*/
public class VertxBufferNumpyInputAdapter implements InputAdapter<Buffer, Writable> {
private boolean permuteRequired(ConverterArgs parameters) {
return parameters != null && parameters.getImageProcessingInitialLayout() != null
&& !parameters.getImageProcessingInitialLayout().equals(parameters.getImageProcessingRequiredLayout());
}
/**
* Convert Buffer input to NDArray writable. Note that contextData is unused in this implementation of InputAdapter.
*/
@Override
public NDArrayWritable convert(Buffer input, ConverterArgs parameters, Map<String, Object> contextData) {
Preconditions.checkState(input.length() > 0, "Buffer appears to be empty!");
INDArray fromNpyPointer = Nd4j.getNDArrayFactory().createFromNpyPointer(
new BytePointer(input.getByteBuf().nioBuffer())
);
if (permuteRequired(parameters)) {
fromNpyPointer = ImagePermuter.permuteOrder(
fromNpyPointer,
parameters.getImageProcessingInitialLayout(),
parameters.getImageProcessingRequiredLayout()
);
}
return new NDArrayWritable(fromNpyPointer);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/util/ArrowUtils.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.util;
import lombok.extern.slf4j.Slf4j;
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.memory.RootAllocator;
import org.apache.arrow.vector.*;
import org.apache.arrow.vector.dictionary.Dictionary;
import org.apache.arrow.vector.dictionary.DictionaryProvider;
import org.apache.arrow.vector.ipc.ArrowFileReader;
import org.apache.arrow.vector.ipc.ArrowFileWriter;
import org.apache.arrow.vector.ipc.SeekableReadChannel;
import org.apache.arrow.vector.ipc.message.ArrowRecordBatch;
import org.apache.arrow.vector.types.DateUnit;
import org.apache.arrow.vector.types.FloatingPointPrecision;
import org.apache.arrow.vector.types.pojo.ArrowType;
import org.apache.arrow.vector.types.pojo.DictionaryEncoding;
import org.apache.arrow.vector.types.pojo.Field;
import org.apache.arrow.vector.types.pojo.FieldType;
import org.apache.arrow.vector.util.ByteArrayReadableSeekableByteChannel;
import org.datavec.api.transform.ColumnType;
import org.datavec.api.transform.metadata.*;
import org.datavec.api.transform.schema.Schema;
import org.datavec.api.transform.schema.conversion.TypeConversion;
import org.datavec.api.util.ndarray.RecordConverter;
import org.datavec.api.writable.*;
import org.datavec.arrow.ArrowConverter;
import org.datavec.arrow.recordreader.ArrowRecord;
import org.datavec.arrow.recordreader.ArrowWritableRecordBatch;
import org.datavec.arrow.recordreader.ArrowWritableRecordTimeSeriesBatch;
import org.nd4j.common.io.ReflectionUtils;
import org.nd4j.common.primitives.Pair;
import org.nd4j.linalg.api.buffer.DataBuffer;
import org.nd4j.linalg.api.buffer.DataType;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.exception.ND4JIllegalArgumentException;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.serde.binary.BinarySerde;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.Channels;
import java.util.*;
/**
* Various utilities related to arrow.
* Heavily overlaps with {@link ArrowConverter}
*
* @author Adam Gibson
*/
@Slf4j
public class ArrowUtils {
private static java.lang.reflect.Field arrowRecordField;
static {
try {
arrowRecordField = ArrowRecord.class.getDeclaredField("arrowWritableRecordBatch");
arrowRecordField.setAccessible(true);
} catch (NoSuchFieldException e) {
throw new IllegalStateException(e);
}
}
/**
* Returns the underlying {@link ArrowWritableRecordBatch}
* for the input {@link ArrowRecord}
* @param arrowRecord the arrow record to get the underlying batch for
* @return the batch for the given record
*/
public static ArrowWritableRecordBatch getBatchFromRecord(ArrowRecord arrowRecord) {
ArrowWritableRecordBatch writableRecordBatch = (ArrowWritableRecordBatch) ReflectionUtils.getField(arrowRecordField,arrowRecord);
return writableRecordBatch;
}
public static INDArray toArray(ArrowWritableRecordTimeSeriesBatch arrowWritableRecordBatch) {
return RecordConverter.toTensor(arrowWritableRecordBatch);
}
public static INDArray toArray(ArrowWritableRecordBatch arrowWritableRecordBatch) {
List<FieldVector> columnVectors = arrowWritableRecordBatch.getList();
Schema schema = arrowWritableRecordBatch.getSchema();
int rows = 0;
while (rows < schema.numColumns()) {
switch (schema.getType(rows)) {
case Integer:
case Float:
case Double:
case Long:
case NDArray:
++rows;
break;
default:
throw new ND4JIllegalArgumentException("Illegal data type found for column " + schema.getName(rows) + " of type " + schema.getType(rows));
}
}
rows = arrowWritableRecordBatch.getList().get(0).getValueCount();
int i;
if (schema.numColumns() == 1 && schema.getMetaData(0).getColumnType() == ColumnType.NDArray) {
INDArray[] toConcat = new INDArray[rows];
VarBinaryVector valueVectors = (VarBinaryVector) arrowWritableRecordBatch.getList().get(0);
for (i = 0; i < rows; ++i) {
byte[] bytes = valueVectors.get(i);
ByteBuffer direct = ByteBuffer.allocateDirect(bytes.length);
direct.put(bytes);
INDArray fromTensor = BinarySerde.toArray(direct);
toConcat[i] = fromTensor;
}
return Nd4j.concat(0, toConcat);
} else {
int cols = schema.numColumns();
INDArray arr = Nd4j.create(rows, cols);
for (i = 0; i < cols; ++i) {
INDArray put = convertArrowVector(columnVectors.get(i), schema.getType(i));
switch (arr.data().dataType()) {
case FLOAT:
arr.putColumn(i, Nd4j.create(put.data().asFloat()).reshape(rows, 1L));
break;
case DOUBLE:
arr.putColumn(i, Nd4j.create(put.data().asDouble()).reshape(rows, 1L));
}
}
return arr;
}
}
public static INDArray convertArrowVector(FieldVector fieldVector, ColumnType type) {
DataBuffer buffer = null;
int cols = fieldVector.getValueCount();
ByteBuffer direct = ByteBuffer.allocateDirect((int) fieldVector.getDataBuffer().capacity());
direct.order(ByteOrder.nativeOrder());
fieldVector.getDataBuffer().getBytes(0, direct);
direct.rewind();
switch (type) {
case Integer:
buffer = Nd4j.createBuffer(direct, DataType.INT32, cols, 0L);
break;
case Float:
buffer = Nd4j.createBuffer(direct, DataType.FLOAT, cols);
break;
case Double:
buffer = Nd4j.createBuffer(direct, DataType.DOUBLE, cols);
break;
case Long:
buffer = Nd4j.createBuffer(direct, DataType.INT64, cols);
}
return Nd4j.create(buffer, cols, 1);
}
public static List<FieldVector> convertToArrowVector(INDArray from, List<String> name, ColumnType type, BufferAllocator bufferAllocator) {
List<FieldVector> ret = new ArrayList();
long cols;
if (from.isVector()) {
cols = from.length();
switch (type) {
case Integer:
int[] fromDataInt = from.isView() ? from.dup().data().asInt() : from.data().asInt();
ret.add(vectorFor(bufferAllocator, name.get(0), fromDataInt));
break;
case Float:
float[] fromDataFloat = from.isView() ? from.dup().data().asFloat() : from.data().asFloat();
ret.add(vectorFor(bufferAllocator, name.get(0), fromDataFloat));
break;
case Double:
double[] fromData = from.isView() ? from.dup().data().asDouble() : from.data().asDouble();
ret.add(vectorFor(bufferAllocator, name.get(0), fromData));
break;
default:
throw new IllegalArgumentException("Illegal type " + type);
}
} else {
cols = from.size(1);
for (int i = 0; (long) i < cols; ++i) {
INDArray column = from.getColumn(i);
switch (type) {
case Integer:
int[] fromDataInt = column.isView() ? column.dup().data().asInt() : from.data().asInt();
ret.add(vectorFor(bufferAllocator, name.get(i), fromDataInt));
break;
case Float:
float[] fromDataFloat = column.isView() ? column.dup().data().asFloat() : from.data().asFloat();
ret.add(vectorFor(bufferAllocator, name.get(i), fromDataFloat));
break;
case Double:
double[] fromData = column.isView() ? column.dup().data().asDouble() : from.data().asDouble();
ret.add(vectorFor(bufferAllocator, name.get(i), fromData));
break;
default:
throw new IllegalArgumentException("Illegal type " + type);
}
}
}
return ret;
}
public static void writeRecordBatchTo(List<List<Writable>> recordBatch, Schema inputSchema, OutputStream outputStream) {
BufferAllocator bufferAllocator = new RootAllocator(9223372036854775807L);
writeRecordBatchTo(bufferAllocator, recordBatch, inputSchema, outputStream);
}
public static void writeRecordBatchTo(BufferAllocator bufferAllocator, List<List<Writable>> recordBatch, Schema inputSchema, OutputStream outputStream) {
if (!(recordBatch instanceof ArrowWritableRecordBatch)) {
convertWritables(bufferAllocator, recordBatch, inputSchema, outputStream);
} else {
convertWritables(bufferAllocator, recordBatch, inputSchema, outputStream);
}
}
private static void convertWritables(BufferAllocator bufferAllocator, List<List<Writable>> recordBatch, Schema inputSchema, OutputStream outputStream) {
org.apache.arrow.vector.types.pojo.Schema convertedSchema;
List columns;
VectorSchemaRoot root;
ArrowFileWriter writer;
convertedSchema = toArrowSchema(inputSchema);
columns = toArrowColumns(bufferAllocator, inputSchema, recordBatch);
try {
root = new VectorSchemaRoot(convertedSchema, columns, recordBatch.size());
writer = new ArrowFileWriter(root, providerForVectors(columns, convertedSchema.getFields()), Channels.newChannel(outputStream));
writer.start();
writer.writeBatch();
writer.end();
} catch (IOException var9) {
throw new IllegalStateException(var9);
}
}
public static List<List<List<Writable>>> toArrowWritablesTimeSeries(List<FieldVector> fieldVectors, Schema schema, int timeSeriesLength) {
ArrowWritableRecordTimeSeriesBatch arrowWritableRecordBatch = new ArrowWritableRecordTimeSeriesBatch(fieldVectors, schema, timeSeriesLength);
return arrowWritableRecordBatch;
}
public static ArrowWritableRecordBatch toArrowWritables(List<FieldVector> fieldVectors, Schema schema) {
ArrowWritableRecordBatch arrowWritableRecordBatch = new ArrowWritableRecordBatch(fieldVectors, schema);
return arrowWritableRecordBatch;
}
public static List<Writable> toArrowWritablesSingle(List<FieldVector> fieldVectors, Schema schema) {
return toArrowWritables(fieldVectors, schema).get(0);
}
public static Pair<Schema, ArrowWritableRecordBatch> readFromFile(FileInputStream input) throws IOException {
BufferAllocator allocator = new RootAllocator(9223372036854775807L);
Schema retSchema = null;
ArrowWritableRecordBatch ret = null;
SeekableReadChannel channel = new SeekableReadChannel(input.getChannel());
ArrowFileReader reader = new ArrowFileReader(channel, allocator);
reader.loadNextBatch();
retSchema = toDatavecSchema(reader.getVectorSchemaRoot().getSchema());
VectorUnloader unloader = new VectorUnloader(reader.getVectorSchemaRoot());
VectorLoader vectorLoader = new VectorLoader(reader.getVectorSchemaRoot());
ArrowRecordBatch recordBatch = unloader.getRecordBatch();
vectorLoader.load(recordBatch);
ret = asDataVecBatch(recordBatch, retSchema, reader.getVectorSchemaRoot());
ret.setUnloader(unloader);
return Pair.of(retSchema, ret);
}
public static Pair<Schema, ArrowWritableRecordBatch> readFromFile(File input) throws IOException {
return readFromFile(new FileInputStream(input));
}
public static Pair<Schema, ArrowWritableRecordBatch> readFromBytes(byte[] input) throws IOException {
BufferAllocator allocator = new RootAllocator(9223372036854775807L);
Schema retSchema = null;
ArrowWritableRecordBatch ret = null;
SeekableReadChannel channel = new SeekableReadChannel(new ByteArrayReadableSeekableByteChannel(input));
ArrowFileReader reader = new ArrowFileReader(channel, allocator);
reader.loadNextBatch();
retSchema = toDatavecSchema(reader.getVectorSchemaRoot().getSchema());
VectorUnloader unloader = new VectorUnloader(reader.getVectorSchemaRoot());
VectorLoader vectorLoader = new VectorLoader(reader.getVectorSchemaRoot());
ArrowRecordBatch recordBatch = unloader.getRecordBatch();
vectorLoader.load(recordBatch);
ret = asDataVecBatch(recordBatch, retSchema, reader.getVectorSchemaRoot());
ret.setUnloader(unloader);
return Pair.of(retSchema, ret);
}
public static org.apache.arrow.vector.types.pojo.Schema toArrowSchema(Schema schema) {
List<Field> fields = new ArrayList(schema.numColumns());
for (int i = 0; i < schema.numColumns(); ++i) {
fields.add(getFieldForColumn(schema.getName(i), schema.getType(i)));
}
return new org.apache.arrow.vector.types.pojo.Schema(fields);
}
public static Schema toDatavecSchema(org.apache.arrow.vector.types.pojo.Schema schema) {
Schema.Builder schemaBuilder = new Schema.Builder();
for (int i = 0; i < schema.getFields().size(); ++i) {
schemaBuilder.addColumn(metaDataFromField(schema.getFields().get(i)));
}
return schemaBuilder.build();
}
public static Field field(String name, ArrowType arrowType) {
return new Field(name, FieldType.nullable(arrowType), new ArrayList());
}
public static Field getFieldForColumn(String name, ColumnType columnType) {
switch (columnType) {
case Integer:
return field(name, new ArrowType.Int(32, false));
case Float:
return field(name, new ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE));
case Double:
return field(name, new ArrowType.FloatingPoint(FloatingPointPrecision.DOUBLE));
case Long:
return field(name, new ArrowType.Int(64, false));
case NDArray:
return field(name, new ArrowType.Binary());
case Boolean:
return field(name, new ArrowType.Bool());
case Categorical:
return field(name, new ArrowType.Utf8());
case Time:
return field(name, new ArrowType.Date(DateUnit.MILLISECOND));
case Bytes:
return field(name, new ArrowType.Binary());
case String:
return field(name, new ArrowType.Utf8());
default:
throw new IllegalArgumentException("Column type invalid " + columnType);
}
}
public static Field doubleField(String name) {
return getFieldForColumn(name, ColumnType.Double);
}
public static Field floatField(String name) {
return getFieldForColumn(name, ColumnType.Float);
}
public static Field intField(String name) {
return getFieldForColumn(name, ColumnType.Integer);
}
public static Field longField(String name) {
return getFieldForColumn(name, ColumnType.Long);
}
public static Field stringField(String name) {
return getFieldForColumn(name, ColumnType.String);
}
public static Field booleanField(String name) {
return getFieldForColumn(name, ColumnType.Boolean);
}
public static DictionaryProvider providerForVectors(List<FieldVector> vectors, List<Field> fields) {
Dictionary[] dictionaries = new Dictionary[vectors.size()];
for (int i = 0; i < vectors.size(); ++i) {
DictionaryEncoding dictionary = fields.get(i).getDictionary();
if (dictionary == null) {
dictionary = new DictionaryEncoding(i, true, null);
}
dictionaries[i] = new Dictionary(vectors.get(i), dictionary);
}
return new DictionaryProvider.MapDictionaryProvider(dictionaries);
}
public static List<FieldVector> toArrowColumns(BufferAllocator bufferAllocator, Schema schema, List<List<Writable>> dataVecRecord) {
int numRows = dataVecRecord.size();
List<FieldVector> ret = createFieldVectors(bufferAllocator, schema, numRows);
for (int j = 0; j < schema.numColumns(); ++j) {
FieldVector fieldVector = ret.get(j);
int row = 0;
for (Iterator var8 = dataVecRecord.iterator(); var8.hasNext(); ++row) {
List<Writable> record = (List) var8.next();
Writable writable = record.get(j);
setValue(schema.getType(j), fieldVector, writable, row);
}
}
return ret;
}
public static List<FieldVector> toArrowColumnsTimeSeries(BufferAllocator bufferAllocator, Schema schema, List<List<List<Writable>>> dataVecRecord) {
return toArrowColumnsTimeSeriesHelper(bufferAllocator, schema, dataVecRecord);
}
public static <T> List<FieldVector> toArrowColumnsTimeSeriesHelper(BufferAllocator bufferAllocator, Schema schema, List<List<List<T>>> dataVecRecord) {
int numRows = 0;
List timeStep;
for (Iterator var4 = dataVecRecord.iterator(); var4.hasNext(); numRows += ((List) timeStep.get(0)).size() * timeStep.size()) {
timeStep = (List) var4.next();
}
numRows /= schema.numColumns();
List<FieldVector> ret = createFieldVectors(bufferAllocator, schema, numRows);
Map<Integer, Integer> currIndex = new HashMap(ret.size());
int i;
for (i = 0; i < ret.size(); ++i) {
currIndex.put(i, 0);
}
for (i = 0; i < dataVecRecord.size(); ++i) {
List<List<T>> record = dataVecRecord.get(i);
for (int j = 0; j < record.size(); ++j) {
List<T> curr = record.get(j);
for (int k = 0; k < curr.size(); ++k) {
Integer idx = currIndex.get(k);
FieldVector fieldVector = ret.get(k);
T writable = curr.get(k);
setValue(schema.getType(k), fieldVector, writable, idx);
currIndex.put(k, idx + 1);
}
}
}
return ret;
}
public static List<FieldVector> toArrowColumnsStringSingle(BufferAllocator bufferAllocator, Schema schema, List<String> dataVecRecord) {
return toArrowColumnsString(bufferAllocator, schema, Arrays.asList(dataVecRecord));
}
public static List<FieldVector> toArrowColumnsStringTimeSeries(BufferAllocator bufferAllocator, Schema schema, List<List<List<String>>> dataVecRecord) {
return toArrowColumnsTimeSeriesHelper(bufferAllocator, schema, dataVecRecord);
}
public static List<FieldVector> toArrowColumnsString(BufferAllocator bufferAllocator, Schema schema, List<List<String>> dataVecRecord) {
int numRows = dataVecRecord.size();
List<FieldVector> ret = createFieldVectors(bufferAllocator, schema, numRows);
for (int j = 0; j < schema.numColumns(); ++j) {
FieldVector fieldVector = ret.get(j);
for (int row = 0; row < numRows; ++row) {
String writable = (String) ((List) dataVecRecord.get(row)).get(j);
setValue(schema.getType(j), fieldVector, writable, row);
}
}
return ret;
}
public static List<FieldVector> createFieldVectors(BufferAllocator bufferAllocator, Schema schema, int numRows) {
List<FieldVector> ret = new ArrayList(schema.numColumns());
for (int i = 0; i < schema.numColumns(); ++i) {
switch (schema.getType(i)) {
case Integer:
ret.add(intVectorOf(bufferAllocator, schema.getName(i), numRows));
break;
case Float:
ret.add(floatVectorOf(bufferAllocator, schema.getName(i), numRows));
break;
case Double:
ret.add(doubleVectorOf(bufferAllocator, schema.getName(i), numRows));
break;
case Long:
ret.add(longVectorOf(bufferAllocator, schema.getName(i), numRows));
break;
case NDArray:
ret.add(ndarrayVectorOf(bufferAllocator, schema.getName(i), numRows));
break;
case Boolean:
ret.add(booleanVectorOf(bufferAllocator, schema.getName(i), numRows));
break;
case Categorical:
ret.add(stringVectorOf(bufferAllocator, schema.getName(i), numRows));
break;
case Time:
ret.add(timeVectorOf(bufferAllocator, schema.getName(i), numRows));
break;
case Bytes:
default:
throw new IllegalArgumentException("Illegal type found for creation of field vectors" + schema.getType(i));
case String:
ret.add(stringVectorOf(bufferAllocator, schema.getName(i), numRows));
}
}
return ret;
}
public static void setValue(ColumnType columnType, FieldVector fieldVector, Object value, int row) {
if (!(value instanceof NullWritable)) {
try {
switch (columnType) {
case Integer:
int set;
if (fieldVector instanceof IntVector) {
IntVector intVector = (IntVector) fieldVector;
set = TypeConversion.getInstance().convertInt(value);
intVector.set(row, set);
} else {
if (!(fieldVector instanceof UInt4Vector)) {
throw new UnsupportedOperationException("Illegal type " + fieldVector.getClass() + " for int type");
}
UInt4Vector uInt4Vector = (UInt4Vector) fieldVector;
set = TypeConversion.getInstance().convertInt(value);
uInt4Vector.set(row, set);
}
break;
case Float:
Float4Vector float4Vector = (Float4Vector) fieldVector;
float set2 = TypeConversion.getInstance().convertFloat(value);
float4Vector.set(row, set2);
break;
case Double:
double set3 = TypeConversion.getInstance().convertDouble(value);
Float8Vector float8Vector = (Float8Vector) fieldVector;
float8Vector.set(row, set3);
break;
case Long:
if (fieldVector instanceof BigIntVector) {
BigIntVector largeIntVector = (BigIntVector) fieldVector;
largeIntVector.set(row, TypeConversion.getInstance().convertLong(value));
} else {
if (!(fieldVector instanceof UInt8Vector)) {
throw new UnsupportedOperationException("Illegal type " + fieldVector.getClass() + " for long type");
}
UInt8Vector uInt8Vector = (UInt8Vector) fieldVector;
uInt8Vector.set(row, TypeConversion.getInstance().convertLong(value));
}
break;
case NDArray:
NDArrayWritable arr = (NDArrayWritable) value;
VarBinaryVector nd4jArrayVector = (VarBinaryVector) fieldVector;
ByteBuffer byteBuffer = BinarySerde.toByteBuffer(arr.get());
nd4jArrayVector.setSafe(row, byteBuffer, 0, byteBuffer.capacity());
case Boolean:
case Bytes:
default:
break;
case Categorical:
case String:
String stringSet = TypeConversion.getInstance().convertString(value);
VarCharVector textVector = (VarCharVector) fieldVector;
textVector.setSafe(row, stringSet.getBytes());
break;
case Time:
long timeSet = TypeConversion.getInstance().convertLong(value);
setLongInTime(fieldVector, row, timeSet);
}
} catch (Exception var16) {
log.warn("Unable to set value at row " + row);
}
}
}
public static void setLongInTime(FieldVector fieldVector, int index, long value) {
TimeStampMilliVector timeStampMilliVector;
if (fieldVector instanceof TimeStampMilliVector) {
timeStampMilliVector = (TimeStampMilliVector) fieldVector;
timeStampMilliVector.set(index, value);
} else if (fieldVector instanceof TimeMilliVector) {
TimeMilliVector timeMilliVector = (TimeMilliVector) fieldVector;
timeMilliVector.set(index, (int) value);
} else if (fieldVector instanceof TimeStampMicroVector) {
TimeStampMicroVector timeStampMicroVector = (TimeStampMicroVector) fieldVector;
timeStampMicroVector.set(index, value);
} else if (fieldVector instanceof TimeSecVector) {
TimeSecVector timeSecVector = (TimeSecVector) fieldVector;
timeSecVector.set(index, (int) value);
} else if (fieldVector instanceof TimeStampMilliVector) {
timeStampMilliVector = (TimeStampMilliVector) fieldVector;
timeStampMilliVector.set(index, value);
} else if (fieldVector instanceof TimeStampMilliTZVector) {
TimeStampMilliTZVector timeStampMilliTZVector = (TimeStampMilliTZVector) fieldVector;
timeStampMilliTZVector.set(index, value);
} else if (fieldVector instanceof TimeStampNanoTZVector) {
TimeStampNanoTZVector timeStampNanoTZVector = (TimeStampNanoTZVector) fieldVector;
timeStampNanoTZVector.set(index, value);
} else {
if (!(fieldVector instanceof TimeStampMicroTZVector)) {
throw new UnsupportedOperationException();
}
TimeStampMicroTZVector timeStampMicroTZVector = (TimeStampMicroTZVector) fieldVector;
timeStampMicroTZVector.set(index, value);
}
}
public static TimeStampMilliVector vectorFor(BufferAllocator allocator, String name, java.util.Date[] data) {
TimeStampMilliVector float4Vector = new TimeStampMilliVector(name, allocator);
float4Vector.allocateNew(data.length);
for (int i = 0; i < data.length; ++i) {
float4Vector.setSafe(i, data[i].getTime());
}
float4Vector.setValueCount(data.length);
return float4Vector;
}
public static TimeStampMilliVector timeVectorOf(BufferAllocator allocator, String name, int length) {
TimeStampMilliVector float4Vector = new TimeStampMilliVector(name, allocator);
float4Vector.allocateNew(length);
float4Vector.setValueCount(length);
return float4Vector;
}
public static VarBinaryVector vectorFor(BufferAllocator bufferAllocator, String name, INDArray[] data) {
VarBinaryVector ret = new VarBinaryVector(name, bufferAllocator);
ret.allocateNew();
for (int i = 0; i < data.length; ++i) {
ByteBuffer byteBuffer = BinarySerde.toByteBuffer(data[i]);
ret.set(i, byteBuffer, 0, byteBuffer.capacity());
}
return ret;
}
public static VarCharVector vectorFor(BufferAllocator allocator, String name, String[] data) {
VarCharVector float4Vector = new VarCharVector(name, allocator);
float4Vector.allocateNew();
for (int i = 0; i < data.length; ++i) {
float4Vector.setSafe(i, data[i].getBytes());
}
float4Vector.setValueCount(data.length);
return float4Vector;
}
public static VarBinaryVector ndarrayVectorOf(BufferAllocator allocator, String name, int length) {
VarBinaryVector ret = new VarBinaryVector(name, allocator);
ret.allocateNewSafe();
ret.setValueCount(length);
return ret;
}
public static VarCharVector stringVectorOf(BufferAllocator allocator, String name, int length) {
VarCharVector float4Vector = new VarCharVector(name, allocator);
float4Vector.allocateNew();
float4Vector.setValueCount(length);
return float4Vector;
}
public static Float4Vector vectorFor(BufferAllocator allocator, String name, float[] data) {
Float4Vector float4Vector = new Float4Vector(name, allocator);
float4Vector.allocateNew(data.length);
for (int i = 0; i < data.length; ++i) {
float4Vector.setSafe(i, data[i]);
}
float4Vector.setValueCount(data.length);
return float4Vector;
}
public static Float4Vector floatVectorOf(BufferAllocator allocator, String name, int length) {
Float4Vector float4Vector = new Float4Vector(name, allocator);
float4Vector.allocateNew(length);
float4Vector.setValueCount(length);
return float4Vector;
}
public static Float8Vector vectorFor(BufferAllocator allocator, String name, double[] data) {
Float8Vector float8Vector = new Float8Vector(name, allocator);
float8Vector.allocateNew(data.length);
for (int i = 0; i < data.length; ++i) {
float8Vector.setSafe(i, data[i]);
}
float8Vector.setValueCount(data.length);
return float8Vector;
}
public static Float8Vector doubleVectorOf(BufferAllocator allocator, String name, int length) {
Float8Vector float8Vector = new Float8Vector(name, allocator);
float8Vector.allocateNew();
float8Vector.setValueCount(length);
return float8Vector;
}
public static BitVector vectorFor(BufferAllocator allocator, String name, boolean[] data) {
BitVector float8Vector = new BitVector(name, allocator);
float8Vector.allocateNew(data.length);
for (int i = 0; i < data.length; ++i) {
float8Vector.setSafe(i, data[i] ? 1 : 0);
}
float8Vector.setValueCount(data.length);
return float8Vector;
}
public static BitVector booleanVectorOf(BufferAllocator allocator, String name, int length) {
BitVector float8Vector = new BitVector(name, allocator);
float8Vector.allocateNew(length);
float8Vector.setValueCount(length);
return float8Vector;
}
public static IntVector vectorFor(BufferAllocator allocator, String name, int[] data) {
IntVector float8Vector = new IntVector(name, FieldType.nullable(new ArrowType.Int(32, true)), allocator);
float8Vector.allocateNew(data.length);
for (int i = 0; i < data.length; ++i) {
float8Vector.setSafe(i, data[i]);
}
float8Vector.setValueCount(data.length);
return float8Vector;
}
public static IntVector intVectorOf(BufferAllocator allocator, String name, int length) {
IntVector float8Vector = new IntVector(name, FieldType.nullable(new ArrowType.Int(32, true)), allocator);
float8Vector.allocateNew(length);
float8Vector.setValueCount(length);
return float8Vector;
}
public static BigIntVector vectorFor(BufferAllocator allocator, String name, long[] data) {
BigIntVector float8Vector = new BigIntVector(name, FieldType.nullable(new ArrowType.Int(64, true)), allocator);
float8Vector.allocateNew(data.length);
for (int i = 0; i < data.length; ++i) {
float8Vector.setSafe(i, data[i]);
}
float8Vector.setValueCount(data.length);
return float8Vector;
}
public static BigIntVector longVectorOf(BufferAllocator allocator, String name, int length) {
BigIntVector float8Vector = new BigIntVector(name, FieldType.nullable(new ArrowType.Int(64, true)), allocator);
float8Vector.allocateNew(length);
float8Vector.setValueCount(length);
return float8Vector;
}
public static ColumnMetaData metaDataFromField(Field field) {
ArrowType arrowType = field.getFieldType().getType();
if (arrowType instanceof ArrowType.Int) {
ArrowType.Int intType = (ArrowType.Int) arrowType;
return intType.getBitWidth() == 32 ? new IntegerMetaData(field.getName()) : new LongMetaData(field.getName());
} else if (arrowType instanceof ArrowType.Bool) {
return new BooleanMetaData(field.getName());
} else if (arrowType instanceof ArrowType.FloatingPoint) {
ArrowType.FloatingPoint floatingPointType = (ArrowType.FloatingPoint) arrowType;
return floatingPointType.getPrecision() == FloatingPointPrecision.DOUBLE ? new DoubleMetaData(field.getName()) : new FloatMetaData(field.getName());
} else if (arrowType instanceof ArrowType.Binary) {
return new BinaryMetaData(field.getName());
} else if (arrowType instanceof ArrowType.Utf8) {
return new StringMetaData(field.getName());
} else if (arrowType instanceof ArrowType.Date) {
return new TimeMetaData(field.getName());
} else {
throw new IllegalStateException("Illegal type " + field.getFieldType().getType());
}
}
public static Writable fromEntry(int item, FieldVector from, ColumnType columnType) {
if (from.getValueCount() < item) {
throw new IllegalArgumentException("Index specified greater than the number of items in the vector with length " + from.getValueCount());
} else {
switch (columnType) {
case Integer:
return new IntWritable(getIntFromFieldVector(item, from));
case Float:
return new FloatWritable(getFloatFromFieldVector(item, from));
case Double:
return new DoubleWritable(getDoubleFromFieldVector(item, from));
case Long:
return new LongWritable(getLongFromFieldVector(item, from));
case NDArray:
VarBinaryVector valueVector = (VarBinaryVector) from;
byte[] bytes = valueVector.get(item);
ByteBuffer direct = ByteBuffer.allocateDirect(bytes.length);
direct.put(bytes);
INDArray fromTensor = BinarySerde.toArray(direct);
return new NDArrayWritable(fromTensor);
case Boolean:
BitVector bitVector = (BitVector) from;
return new BooleanWritable(bitVector.get(item) > 0);
case Categorical:
VarCharVector varCharVector = (VarCharVector) from;
return new Text(varCharVector.get(item));
case Time:
return new LongWritable(getLongFromFieldVector(item, from));
case Bytes:
default:
throw new IllegalArgumentException("Illegal type " + from.getClass().getName());
case String:
VarCharVector varCharVector2 = (VarCharVector) from;
return new Text(varCharVector2.get(item));
}
}
}
public static int getIntFromFieldVector(int row, FieldVector fieldVector) {
if (fieldVector instanceof UInt4Vector) {
UInt4Vector uInt4Vector = (UInt4Vector) fieldVector;
return uInt4Vector.get(row);
} else if (fieldVector instanceof IntVector) {
IntVector intVector = (IntVector) fieldVector;
return intVector.get(row);
} else {
throw new IllegalArgumentException("Illegal vector type for int " + fieldVector.getClass().getName());
}
}
public static long getLongFromFieldVector(int row, FieldVector fieldVector) {
if (fieldVector instanceof UInt8Vector) {
UInt8Vector uInt4Vector = (UInt8Vector) fieldVector;
return uInt4Vector.get(row);
} else {
BigIntVector bigIntVector;
if (fieldVector instanceof IntVector) {
bigIntVector = (BigIntVector) fieldVector;
return bigIntVector.get(row);
} else {
TimeStampMilliVector timeStampMilliVector;
if (fieldVector instanceof TimeStampMilliVector) {
timeStampMilliVector = (TimeStampMilliVector) fieldVector;
return timeStampMilliVector.get(row);
} else if (fieldVector instanceof BigIntVector) {
bigIntVector = (BigIntVector) fieldVector;
return bigIntVector.get(row);
} else if (fieldVector instanceof DateMilliVector) {
DateMilliVector dateMilliVector = (DateMilliVector) fieldVector;
return dateMilliVector.get(row);
} else if (fieldVector instanceof TimeStampMilliVector) {
timeStampMilliVector = (TimeStampMilliVector) fieldVector;
return timeStampMilliVector.get(row);
} else if (fieldVector instanceof TimeMilliVector) {
TimeMilliVector timeMilliVector = (TimeMilliVector) fieldVector;
return timeMilliVector.get(row);
} else if (fieldVector instanceof TimeStampMicroVector) {
TimeStampMicroVector timeStampMicroVector = (TimeStampMicroVector) fieldVector;
return timeStampMicroVector.get(row);
} else if (fieldVector instanceof TimeSecVector) {
TimeSecVector timeSecVector = (TimeSecVector) fieldVector;
return timeSecVector.get(row);
} else if (fieldVector instanceof TimeStampMilliVector) {
timeStampMilliVector = (TimeStampMilliVector) fieldVector;
return timeStampMilliVector.get(row);
} else if (fieldVector instanceof TimeStampMilliTZVector) {
TimeStampMilliTZVector timeStampMilliTZVector = (TimeStampMilliTZVector) fieldVector;
return timeStampMilliTZVector.get(row);
} else if (fieldVector instanceof TimeStampNanoTZVector) {
TimeStampNanoTZVector timeStampNanoTZVector = (TimeStampNanoTZVector) fieldVector;
return timeStampNanoTZVector.get(row);
} else if (fieldVector instanceof TimeStampMicroTZVector) {
TimeStampMicroTZVector timeStampMicroTZVector = (TimeStampMicroTZVector) fieldVector;
return timeStampMicroTZVector.get(row);
} else {
throw new UnsupportedOperationException();
}
}
}
}
public static double getDoubleFromFieldVector(int row, FieldVector fieldVector) {
if (fieldVector instanceof Float8Vector) {
Float8Vector uInt4Vector = (Float8Vector) fieldVector;
return uInt4Vector.get(row);
} else {
throw new IllegalArgumentException("Illegal vector type for int " + fieldVector.getClass().getName());
}
}
public static float getFloatFromFieldVector(int row, FieldVector fieldVector) {
if (fieldVector instanceof Float4Vector) {
Float4Vector uInt4Vector = (Float4Vector) fieldVector;
return uInt4Vector.get(row);
} else {
throw new IllegalArgumentException("Illegal vector type for int " + fieldVector.getClass().getName());
}
}
public static ArrowWritableRecordBatch asDataVecBatch(ArrowRecordBatch arrowRecordBatch, Schema schema, VectorSchemaRoot vectorLoader) {
List<FieldVector> fieldVectors = new ArrayList();
for (int j = 0; j < schema.numColumns(); ++j) {
String name = schema.getName(j);
FieldVector fieldVector = vectorLoader.getVector(name);
fieldVectors.add(fieldVector);
}
ArrowWritableRecordBatch ret = new ArrowWritableRecordBatch(fieldVectors, schema);
ret.setArrowRecordBatch(arrowRecordBatch);
return ret;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/util/ImagePermuter.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.util;
import org.nd4j.common.base.Preconditions;
import org.nd4j.linalg.api.ndarray.INDArray;
/**
* Utilities for rearranging image {@link INDArray}
* channels based on a commonly used idea of:
* N: Number of Images
* C: Channel in an image
* W: Width
* H: Height
* <p>
* NCHW is used to describe the expected layout of an image as input
* in to a deep learning framework.
* Different frameworks require different input formats specified
* as some form of NCHW.
* <p>
* Methods related to manipulating images and image layout should go here.
*
* @author Adam Gibson
*/
public class ImagePermuter {
static int[] determinePermuteOrder(String startingOrder, String destinationOrder) {
startingOrder = startingOrder.toLowerCase().trim();
destinationOrder = destinationOrder.toLowerCase().trim();
Preconditions.checkState(startingOrder.length() == 4 && destinationOrder.length() == 4, "Orders must be of length 4");
Preconditions.checkState(startingOrder.contains("n") && destinationOrder.contains("n"), "One order is missing n");
Preconditions.checkState(startingOrder.contains("c") && destinationOrder.contains("c"), "One order is missing c");
Preconditions.checkState(startingOrder.contains("h") && destinationOrder.contains("h"), "One order is missing h");
Preconditions.checkState(startingOrder.contains("w") && destinationOrder.contains("w"), "One order is missing w");
boolean[] done = new boolean[4];
int[] retPermuteOrder = new int[4];
for (int i = 0; i < 4; i++) {
if (startingOrder.charAt(i) == destinationOrder.charAt(i)) {
retPermuteOrder[i] = i;
done[i] = true;
} else {
int destinationIdxOfCurrentStartingChar = destinationOrder.indexOf(startingOrder.charAt(i));
retPermuteOrder[destinationIdxOfCurrentStartingChar] = i;
}
}
return retPermuteOrder;
}
static String applyPermuteOrderToString(String origin, int[] permuteOrder) {
StringBuilder sb = new StringBuilder();
for (int value : permuteOrder) {
sb.append(origin.charAt(value));
}
return sb.toString();
}
/**
* Permute the order given the input string
* starting order and the target destination order.
* Only nchw are supported.
*
* @param input the input array
* @param startingOrder the starting order (string must be some permutation of nchw)
* @param destinationOrder the destination order (string must be some permutation of nchw)
* @return the output {@link INDArray} rearranged
*/
public static INDArray permuteOrder(INDArray input, String startingOrder, String destinationOrder) {
return input.permute(determinePermuteOrder(startingOrder, destinationOrder));
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/util/MetricsUtils.java
|
/*
*
* * ******************************************************************************
* *
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.util;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.prometheus.PrometheusConfig;
import io.micrometer.prometheus.PrometheusMeterRegistry;
import io.vertx.micrometer.MicrometerMetricsOptions;
import io.vertx.micrometer.VertxPrometheusOptions;
import io.vertx.micrometer.backends.BackendRegistries;
import org.nd4j.common.primitives.Pair;
/**
* Utility class for dealing with {@link io.vertx.micrometer.impl.MicrometerMetrics}
*
* @author Adam Gibson
*/
public class MetricsUtils {
/**
* Sets up promethues and returns the
* registry
* @return
*/
public static Pair<MicrometerMetricsOptions,MeterRegistry> setupPrometheus() {
PrometheusMeterRegistry registry = new PrometheusMeterRegistry(PrometheusConfig.DEFAULT);
MicrometerMetricsOptions micrometerMetricsOptions = new MicrometerMetricsOptions()
.setMicrometerRegistry(registry)
.setPrometheusOptions(new VertxPrometheusOptions()
.setEnabled(true));
BackendRegistries.setupBackend(micrometerMetricsOptions);
return Pair.of(micrometerMetricsOptions,registry);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/util/VertxArrayConversion.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.util;
import io.vertx.core.buffer.Buffer;
import io.vertx.core.json.JsonArray;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.serde.binary.BinarySerde;
import java.nio.ByteBuffer;
/**
* A utility class used to handle buffer array conversion starting from
* a {@link Buffer} generated by vertx.
*
* @author Adam Gibson
*/
public class VertxArrayConversion {
/**
* Convert a {@link Buffer}
* to an {@link INDArray}
* using one of three types:
* numpy: (converts using {@link Nd4j#createNpyFromByteArray(byte[])}
* nd4j: (converts using {@link BinarySerde#toArray(ByteBuffer)}
* with a direct byte buffer copy (nd4j requires direct allocation
* for byte buffers
* json: (converts with a straight for loop, note that this only supports matrices only)
*
* @param buffer the buffer to convert
* @param type the type of buffer
* @return the created ndarray
*/
public static INDArray toArray(Buffer buffer, String type) {
INDArray trueFeedback = null;
switch (type) {
case "numpy":
trueFeedback = Nd4j.createNpyFromByteArray(buffer.getBytes());
break;
case "nd4j":
ByteBuffer direct = ByteBuffer.allocateDirect(buffer.length());
direct.put(buffer.getBytes());
direct.rewind();
trueFeedback = BinarySerde.toArray(direct);
break;
case "json":
JsonArray jsonArray = new JsonArray(buffer.toString());
INDArray arr = Nd4j.create(jsonArray.size(), jsonArray.getJsonArray(0).size());
for (int i = 0; i < arr.rows(); i++) {
for (int j = 0; j < arr.columns(); j++) {
arr.putScalar(i, j, jsonArray.getJsonArray(i).getDouble(j));
}
}
trueFeedback = arr;
break;
default:
throw new IllegalArgumentException("Illegal type " + type);
}
return trueFeedback;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/util
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/util/image/Java2DNativeImageLoader.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.util.image;
import org.bytedeco.javacv.Java2DFrameConverter;
import org.bytedeco.javacv.OpenCVFrameConverter;
import org.datavec.image.transform.ImageTransform;
import org.nd4j.linalg.api.ndarray.INDArray;
import java.awt.image.BufferedImage;
import java.io.IOException;
/**
* Segregates functionality specific to Java 2D that is not available on Android.
*
* @author saudet
*/
public class Java2DNativeImageLoader extends NativeImageLoader {
Java2DFrameConverter converter2 = new Java2DFrameConverter();
public Java2DNativeImageLoader() {
}
public Java2DNativeImageLoader(int height, int width) {
super(height, width);
}
public Java2DNativeImageLoader(int height, int width, int channels) {
super(height, width, channels);
}
public Java2DNativeImageLoader(int height, int width, int channels, boolean centerCropIfNeeded) {
super(height, width, channels, centerCropIfNeeded);
}
public Java2DNativeImageLoader(int height, int width, int channels, ImageTransform imageTransform) {
super(height, width, channels, imageTransform);
}
protected Java2DNativeImageLoader(NativeImageLoader other) {
super(other);
}
/**
* @param image the input image
* @return {@code asMatrix(image, false).ravel()}.
* @throws IOException if an error occurs loading the image
*/
public INDArray asRowVector(BufferedImage image) throws IOException {
return asMatrix(image, false).ravel();
}
/**
* @param image the input image
* @return {@code asMatrix(image, false)}.
* @throws IOException if an error occurs loading the image
*/
public INDArray asMatrix(BufferedImage image) throws IOException {
return asMatrix(image, false);
}
/**
* @param image the input image
* @param flipChannels whether to flip the channels or not
* @return {@code asMatrix(image, flipChannels).ravel()}.
* @throws IOException if an error occurs loading the image
*/
public INDArray asRowVector(BufferedImage image, boolean flipChannels) throws IOException {
return asMatrix(image, flipChannels).ravel();
}
/**
* Loads a {@link INDArray} from a {@link BufferedImage}.
*
* @param image as a BufferedImage
* @param flipChannels to have a format like TYPE_INT_RGB (ARGB) output as BGRA, etc
* @return the loaded matrix
* @throws IOException if an error occurs creating the {@link INDArray}
*/
public INDArray asMatrix(BufferedImage image, boolean flipChannels) throws IOException {
if (converter == null) {
converter = new OpenCVFrameConverter.ToMat();
}
return asMatrix(converter.convert(converter2.getFrame(image, 1.0, flipChannels)));
}
@Override
public INDArray asRowVector(Object image) throws IOException {
return image instanceof BufferedImage ? asRowVector((BufferedImage) image) : null;
}
@Override
public INDArray asMatrix(Object image) throws IOException {
return image instanceof BufferedImage ? asMatrix((BufferedImage) image) : null;
}
/**
* Converts an INDArray to a BufferedImage. Only intended for images with rank 3.
*
* @param array to convert
* @param dataType from JavaCV (DEPTH_FLOAT, DEPTH_UBYTE, etc), or -1 to use same type as the INDArray
* @return data copied to a Frame
*/
public BufferedImage asBufferedImage(INDArray array, int dataType) {
return converter2.convert(asFrame(array, dataType));
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/util
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/util/image/NativeImageLoader.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.util.image;
import org.apache.commons.io.IOUtils;
import org.bytedeco.javacpp.*;
import org.bytedeco.javacpp.indexer.*;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.OpenCVFrameConverter;
import org.bytedeco.leptonica.PIX;
import org.bytedeco.leptonica.PIXA;
import org.bytedeco.opencv.opencv_core.Mat;
import org.bytedeco.opencv.opencv_core.Rect;
import org.bytedeco.opencv.opencv_core.Size;
import org.datavec.image.data.Image;
import org.datavec.image.data.ImageWritable;
import org.datavec.image.loader.AndroidNativeImageLoader;
import org.datavec.image.loader.BaseImageLoader;
import org.datavec.image.transform.ImageTransform;
import org.nd4j.common.util.ArrayUtil;
import org.nd4j.linalg.api.concurrency.AffinityManager;
import org.nd4j.linalg.api.memory.pointers.PagedPointer;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.exception.ND4JIllegalStateException;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.indexing.INDArrayIndex;
import org.nd4j.linalg.indexing.NDArrayIndex;
import java.io.*;
import java.nio.ByteOrder;
import static org.bytedeco.leptonica.global.lept.*;
import static org.bytedeco.opencv.global.opencv_core.*;
import static org.bytedeco.opencv.global.opencv_imgcodecs.*;
import static org.bytedeco.opencv.global.opencv_imgproc.*;
/**
* Uses JavaCV to load images. Allowed formats: bmp, gif, jpg, jpeg, jp2, pbm, pgm, ppm, pnm, png, tif, tiff, exr, webp
*
* @author saudet
*/
public class NativeImageLoader extends BaseImageLoader {
public static final String[] ALLOWED_FORMATS = {"bmp", "gif", "jpg", "jpeg", "jp2", "pbm", "pgm", "ppm", "pnm",
"png", "tif", "tiff", "exr", "webp", "BMP", "GIF", "JPG", "JPEG", "JP2", "PBM", "PGM", "PPM", "PNM",
"PNG", "TIF", "TIFF", "EXR", "WEBP"};
private static final int MIN_BUFFER_STEP_SIZE = 64 * 1024;
protected OpenCVFrameConverter.ToMat converter = new OpenCVFrameConverter.ToMat();
boolean direct = !Loader.getPlatform().startsWith("android");
private byte[] buffer = null;
private Mat bufferMat = null;
/**
* Loads images with no scaling or conversion.
*/
public NativeImageLoader() {
}
/**
* Instantiate an image with the given
* height and width
*
* @param height the height to load
* @param width the width to load
*/
public NativeImageLoader(long height, long width) {
this.height = height;
this.width = width;
}
/**
* Instantiate an image with the given
* height and width
*
* @param height the height to load
* @param width the width to load
* @param channels the number of channels for the image*
*/
public NativeImageLoader(long height, long width, long channels) {
this.height = height;
this.width = width;
this.channels = channels;
}
/**
* Instantiate an image with the given
* height and width
*
* @param height the height to load
* @param width the width to load
* @param channels the number of channels for the image*
* @param centerCropIfNeeded to crop before rescaling and converting
*/
public NativeImageLoader(long height, long width, long channels, boolean centerCropIfNeeded) {
this(height, width, channels);
this.centerCropIfNeeded = centerCropIfNeeded;
}
/**
* Instantiate an image with the given
* height and width
*
* @param height the height to load
* @param width the width to load
* @param channels the number of channels for the image*
* @param imageTransform to use before rescaling and converting
*/
public NativeImageLoader(long height, long width, long channels, ImageTransform imageTransform) {
this(height, width, channels);
this.imageTransform = imageTransform;
}
/**
* Instantiate an image with the given
* height and width
*
* @param height the height to load
* @param width the width to load
* @param channels the number of channels for the image*
* @param mode how to load multipage image
*/
public NativeImageLoader(long height, long width, long channels, MultiPageMode mode) {
this(height, width, channels);
this.multiPageMode = mode;
}
protected NativeImageLoader(NativeImageLoader other) {
this.height = other.height;
this.width = other.width;
this.channels = other.channels;
this.centerCropIfNeeded = other.centerCropIfNeeded;
this.imageTransform = other.imageTransform;
this.multiPageMode = other.multiPageMode;
}
static Mat convert(PIX pix) {
PIX tempPix = null;
int dtype = -1;
int height = pix.h();
int width = pix.w();
Mat mat2;
if (pix.colormap() != null) {
PIX pix2 = pixRemoveColormap(pix, REMOVE_CMAP_TO_FULL_COLOR);
tempPix = pix = pix2;
dtype = CV_8UC4;
} else if (pix.d() <= 8 || pix.d() == 24) {
PIX pix2 = pix;
switch (pix.d()) {
case 1:
pix2 = pixConvert1To8(null, pix, (byte) 0, (byte) 255);
break;
case 2:
pix2 = pixConvert2To8(pix, (byte) 0, (byte) 85, (byte) 170, (byte) 255, 0);
break;
case 4:
pix2 = pixConvert4To8(pix, 0);
break;
case 8:
case 24:
pix2 = pix;
break;
default:
throw new IllegalStateException("Unrecognized pixel depth of " + pix.d());
}
tempPix = pix = pix2;
int channels = pix.d() / 8;
dtype = CV_8UC(channels);
Mat mat = new Mat(height, width, dtype, pix.data(), 4 * pix.wpl());
mat2 = new Mat(height, width, CV_8UC(channels));
// swap bytes if needed
int[] swap = {0, channels - 1, 1, channels - 2, 2, channels - 3, 3, channels - 4},
copy = {0, 0, 1, 1, 2, 2, 3, 3},
fromTo = channels > 1 && ByteOrder.nativeOrder().equals(ByteOrder.LITTLE_ENDIAN) ? swap : copy;
mixChannels(mat, 1, mat2, 1, fromTo, Math.min(channels, fromTo.length / 2));
} else if (pix.d() == 16) {
dtype = CV_16UC(pix.d() / 16);
} else if (pix.d() == 32) {
dtype = CV_32FC(pix.d() / 32);
}
mat2 = new Mat(height, width, dtype, pix.data());
if (tempPix != null) {
pixDestroy(tempPix);
}
return mat2;
}
@Override
public String[] getAllowedFormats() {
return ALLOWED_FORMATS;
}
/**
* Convert a file to a row vector
*
* @param filename the image to convert
* @return the flattened image
* @throws IOException if an error occurs creating the array
*/
public INDArray asRowVector(String filename) throws IOException {
return asRowVector(new File(filename));
}
/**
* Convert a file to a row vector
*
* @param f the image to convert
* @return the flattened image
* @throws IOException if an error occurs creating the array
*/
@Override
public INDArray asRowVector(File f) throws IOException {
return asMatrix(f).ravel();
}
/**
* Returns {@code asMatrix(image).ravel()}.
*
* @return {@code asMatrix(image).ravel()}.
* @throws IOException if an error occurs loading the image
* @see #asMatrix(Object)
*/
@Override
public INDArray asRowVector(InputStream is) throws IOException {
return asMatrix(is).ravel();
}
/**
* Returns {@code asMatrix(image).ravel()}.
*
* @param image the input image
* @return {@code asMatrix(image).ravel()}.
* @throws IOException if an error occurs loading the image
* @see #asMatrix(Object)
*/
public INDArray asRowVector(Object image) throws IOException {
return asMatrix(image).ravel();
}
/**
* Returns {@code asMatrix(image).ravel()}.
*
* @param image the input image
* @return {@code asMatrix(image).ravel()}.
* @throws IOException if an error occurs loading the image
* @see #asMatrix(Object)
*/
public INDArray asRowVector(Frame image) throws IOException {
return asMatrix(image).ravel();
}
/**
* Returns {@code asMatrix(image).ravel()}.
*
* @param image the input image
* @return {@code asMatrix(image).ravel()}.
* @throws IOException if an error occurs loading the image
* @see #asMatrix(Object)
*/
public INDArray asRowVector(Mat image) throws IOException {
INDArray arr = asMatrix(image);
return arr.reshape('c', 1, arr.length());
}
/**
* Returns {@code asMatrix(image).ravel()}.
*
* @param image the input image
* @return {@code asMatrix(image).ravel()}.
* @throws IOException if an error occurs creating the {@link INDArray}
* @see #asMatrix(Object)
*/
public INDArray asRowVector(org.opencv.core.Mat image) throws IOException {
INDArray arr = asMatrix(image);
return arr.reshape('c', 1, arr.length());
}
public INDArray asMatrix(String filename) throws IOException {
return asMatrix(new File(filename));
}
@Override
public INDArray asMatrix(File f) throws IOException {
try (BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f))) {
return asMatrix(bis);
}
}
@Override
public INDArray asMatrix(File f, boolean nchw) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public INDArray asMatrix(InputStream is) throws IOException {
Mat mat = streamToMat(is);
INDArray a;
if (this.multiPageMode != null) {
a = asMatrix(mat.data(), mat.cols());
} else {
Mat image = imdecode(mat, IMREAD_ANYDEPTH | IMREAD_ANYCOLOR);
if (image == null || image.empty()) {
PIX pix = pixReadMem(mat.data(), mat.cols());
if (pix == null) {
throw new IOException("Could not decode image from input stream");
}
image = convert(pix);
pixDestroy(pix);
}
a = asMatrix(image);
image.deallocate();
}
return a;
}
@Override
public INDArray asMatrix(InputStream inputStream, boolean nchw) throws IOException {
throw new UnsupportedOperationException();
}
/**
* Read the stream to the buffer, and return the number of bytes read
*
* @param is Input stream to read
* @return Mat with the buffer data as a row vector
* @throws IOException
*/
private Mat streamToMat(InputStream is) throws IOException {
if (buffer == null) {
buffer = IOUtils.toByteArray(is);
bufferMat = new Mat(buffer);
return bufferMat;
} else {
int numReadTotal = is.read(buffer);
//Need to know if all data has been read.
//(a) if numRead < buffer.length - got everything
//(b) if numRead >= buffer.length: we MIGHT have got everything (exact right size buffer) OR we need more data
if (numReadTotal < buffer.length) {
bufferMat.data().put(buffer, 0, numReadTotal);
bufferMat.cols(numReadTotal);
return bufferMat;
}
//Buffer is full; reallocate and keep reading
int numReadCurrent = numReadTotal;
while (numReadCurrent != -1) {
byte[] oldBuffer = buffer;
if (oldBuffer.length == Integer.MAX_VALUE) {
throw new IllegalStateException("Cannot read more than Integer.MAX_VALUE bytes");
}
//Double buffer, but allocate at least 1MB more
long increase = Math.max(buffer.length, MIN_BUFFER_STEP_SIZE);
int newBufferLength = (int) Math.min(Integer.MAX_VALUE, buffer.length + increase);
buffer = new byte[newBufferLength];
System.arraycopy(oldBuffer, 0, buffer, 0, oldBuffer.length);
numReadCurrent = is.read(buffer, oldBuffer.length, buffer.length - oldBuffer.length);
if (numReadCurrent > 0) {
numReadTotal += numReadCurrent;
}
}
bufferMat = new Mat(buffer);
return bufferMat;
}
}
@Override
public Image asImageMatrix(File f) throws IOException {
try (BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f))) {
return asImageMatrix(bis);
}
}
@Override
public Image asImageMatrix(File f, boolean nchw) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public Image asImageMatrix(InputStream is) throws IOException {
Mat mat = streamToMat(is);
Mat image = imdecode(mat, IMREAD_ANYDEPTH | IMREAD_ANYCOLOR);
if (image == null || image.empty()) {
PIX pix = pixReadMem(mat.data(), mat.cols());
if (pix == null) {
throw new IOException("Could not decode image from input stream");
}
image = convert(pix);
pixDestroy(pix);
}
INDArray a = asMatrix(image);
Image i = new Image(a, image.channels(), image.rows(), image.cols());
image.deallocate();
return i;
}
@Override
public Image asImageMatrix(InputStream inputStream, boolean nchw) throws IOException {
throw new UnsupportedOperationException();
}
/**
* Calls {@link AndroidNativeImageLoader#asMatrix(org.opencv.core.Mat)} or
* {@link Java2DNativeImageLoader#asMatrix(java.awt.image.BufferedImage)}.
*
* @param image as an android bitmap or {@link java.awt.image.BufferedImage}
* @return the matrix or null for unsupported object classes
* @throws IOException if an error occurs creating the {@link INDArray}
*/
public INDArray asMatrix(Object image) throws IOException {
INDArray array = null;
if (array == null) {
try {
array = new Java2DNativeImageLoader(this).asMatrix(image);
} catch (NoClassDefFoundError e) {
// ignore
}
}
return array;
}
protected void fillNDArray(Mat image, INDArray ret) {
long rows = image.rows();
long cols = image.cols();
long channels = image.channels();
if (ret.length() != rows * cols * channels) {
throw new ND4JIllegalStateException("INDArray provided to store image not equal to image: {channels: "
+ channels + ", rows: " + rows + ", columns: " + cols + "}");
}
Indexer idx = image.createIndexer(direct);
Pointer pointer = ret.data().pointer();
long[] stride = ret.stride();
boolean done = false;
PagedPointer pagedPointer = new PagedPointer(pointer, rows * cols * channels,
ret.data().offset() * Nd4j.sizeOfDataType(ret.data().dataType()));
if (pointer instanceof FloatPointer) {
FloatIndexer retidx = FloatIndexer.create(pagedPointer.asFloatPointer(),
new long[]{channels, rows, cols}, new long[]{stride[0], stride[1], stride[2]}, direct);
if (idx instanceof UByteIndexer) {
UByteIndexer ubyteidx = (UByteIndexer) idx;
for (long k = 0; k < channels; k++) {
for (long i = 0; i < rows; i++) {
for (long j = 0; j < cols; j++) {
retidx.put(k, i, j, ubyteidx.get(i, j, k));
}
}
}
done = true;
} else if (idx instanceof UShortIndexer) {
UShortIndexer ushortidx = (UShortIndexer) idx;
for (long k = 0; k < channels; k++) {
for (long i = 0; i < rows; i++) {
for (long j = 0; j < cols; j++) {
retidx.put(k, i, j, ushortidx.get(i, j, k));
}
}
}
done = true;
} else if (idx instanceof IntIndexer) {
IntIndexer intidx = (IntIndexer) idx;
for (long k = 0; k < channels; k++) {
for (long i = 0; i < rows; i++) {
for (long j = 0; j < cols; j++) {
retidx.put(k, i, j, intidx.get(i, j, k));
}
}
}
done = true;
} else if (idx instanceof FloatIndexer) {
FloatIndexer floatidx = (FloatIndexer) idx;
for (long k = 0; k < channels; k++) {
for (long i = 0; i < rows; i++) {
for (long j = 0; j < cols; j++) {
retidx.put(k, i, j, floatidx.get(i, j, k));
}
}
}
done = true;
}
retidx.release();
} else if (pointer instanceof DoublePointer) {
DoubleIndexer retidx = DoubleIndexer.create(pagedPointer.asDoublePointer(),
new long[]{channels, rows, cols}, new long[]{stride[0], stride[1], stride[2]}, direct);
if (idx instanceof UByteIndexer) {
UByteIndexer ubyteidx = (UByteIndexer) idx;
for (long k = 0; k < channels; k++) {
for (long i = 0; i < rows; i++) {
for (long j = 0; j < cols; j++) {
retidx.put(k, i, j, ubyteidx.get(i, j, k));
}
}
}
done = true;
} else if (idx instanceof UShortIndexer) {
UShortIndexer ushortidx = (UShortIndexer) idx;
for (long k = 0; k < channels; k++) {
for (long i = 0; i < rows; i++) {
for (long j = 0; j < cols; j++) {
retidx.put(k, i, j, ushortidx.get(i, j, k));
}
}
}
done = true;
} else if (idx instanceof IntIndexer) {
IntIndexer intidx = (IntIndexer) idx;
for (long k = 0; k < channels; k++) {
for (long i = 0; i < rows; i++) {
for (long j = 0; j < cols; j++) {
retidx.put(k, i, j, intidx.get(i, j, k));
}
}
}
done = true;
} else if (idx instanceof FloatIndexer) {
FloatIndexer floatidx = (FloatIndexer) idx;
for (long k = 0; k < channels; k++) {
for (long i = 0; i < rows; i++) {
for (long j = 0; j < cols; j++) {
retidx.put(k, i, j, floatidx.get(i, j, k));
}
}
}
done = true;
}
retidx.release();
}
if (!done) {
for (long k = 0; k < channels; k++) {
for (long i = 0; i < rows; i++) {
for (long j = 0; j < cols; j++) {
if (ret.rank() == 3) {
ret.putScalar(k, i, j, idx.getDouble(i, j, k));
} else if (ret.rank() == 4) {
ret.putScalar(1, k, i, j, idx.getDouble(i, j, k));
} else if (ret.rank() == 2) {
ret.putScalar(i, j, idx.getDouble(i, j));
} else
throw new ND4JIllegalStateException("NativeImageLoader expects 2D, 3D or 4D output array, but " + ret.rank() + "D array was given");
}
}
}
}
idx.release();
image.data();
Nd4j.getAffinityManager().tagLocation(ret, AffinityManager.Location.HOST);
}
public void asMatrixView(InputStream is, INDArray view) throws IOException {
Mat mat = streamToMat(is);
Mat image = imdecode(mat, IMREAD_ANYDEPTH | IMREAD_ANYCOLOR);
if (image == null || image.empty()) {
PIX pix = pixReadMem(mat.data(), mat.cols());
if (pix == null) {
throw new IOException("Could not decode image from input stream");
}
image = convert(pix);
pixDestroy(pix);
}
if (image == null)
throw new RuntimeException();
asMatrixView(image, view);
image.deallocate();
}
public void asMatrixView(String filename, INDArray view) throws IOException {
asMatrixView(new File(filename), view);
}
public void asMatrixView(File f, INDArray view) throws IOException {
try (BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f))) {
asMatrixView(bis, view);
}
}
public void asMatrixView(Mat image, INDArray view) throws IOException {
transformImage(image, view);
}
public void asMatrixView(org.opencv.core.Mat image, INDArray view) throws IOException {
transformImage(image, view);
}
public INDArray asMatrix(Frame image) throws IOException {
return asMatrix(converter.convert(image));
}
public INDArray asMatrix(org.opencv.core.Mat image) throws IOException {
INDArray ret = transformImage(image, null);
return ret.reshape(ArrayUtil.combine(new long[]{1}, ret.shape()));
}
public INDArray asMatrix(Mat image) throws IOException {
INDArray ret = transformImage(image, null);
return ret.reshape(ArrayUtil.combine(new long[]{1}, ret.shape()));
}
protected INDArray transformImage(org.opencv.core.Mat image, INDArray ret) throws IOException {
Frame f = converter.convert(image);
return transformImage(converter.convert(f), ret);
}
protected INDArray transformImage(Mat image, INDArray ret) throws IOException {
if (imageTransform != null && converter != null) {
ImageWritable writable = new ImageWritable(converter.convert(image));
writable = imageTransform.transform(writable);
image = converter.convert(writable.getFrame());
}
Mat image2 = null, image3 = null, image4 = null;
if (channels > 0 && image.channels() != channels) {
int code = -1;
switch (image.channels()) {
case 1:
switch ((int) channels) {
case 3:
code = CV_GRAY2BGR;
break;
case 4:
code = CV_GRAY2RGBA;
break;
}
break;
case 3:
switch ((int) channels) {
case 1:
code = CV_BGR2GRAY;
break;
case 4:
code = CV_BGR2RGBA;
break;
}
break;
case 4:
switch ((int) channels) {
case 1:
code = CV_RGBA2GRAY;
break;
case 3:
code = CV_RGBA2BGR;
break;
}
break;
}
if (code < 0) {
throw new IOException("Cannot convert from " + image.channels() + " to " + channels + " channels.");
}
image2 = new Mat();
cvtColor(image, image2, code);
image = image2;
}
if (centerCropIfNeeded) {
image3 = centerCropIfNeeded(image);
if (image3 != image) {
image = image3;
} else {
image3 = null;
}
}
image4 = scalingIfNeed(image);
if (image4 != image) {
image = image4;
} else {
image4 = null;
}
if (ret == null) {
int rows = image.rows();
int cols = image.cols();
int channels = image.channels();
ret = Nd4j.create(channels, rows, cols);
}
fillNDArray(image, ret);
image.data(); // dummy call to make sure it does not get deallocated prematurely
if (image2 != null) {
image2.deallocate();
}
if (image3 != null) {
image3.deallocate();
}
if (image4 != null) {
image4.deallocate();
}
return ret;
}
protected Mat centerCropIfNeeded(Mat img) {
int x = 0;
int y = 0;
int height = img.rows();
int width = img.cols();
int diff = Math.abs(width - height) / 2;
if (width > height) {
x = diff;
width = width - diff;
} else if (height > width) {
y = diff;
height = height - diff;
}
return img.apply(new Rect(x, y, width, height));
}
protected Mat scalingIfNeed(Mat image) {
return scalingIfNeed(image, height, width);
}
protected Mat scalingIfNeed(Mat image, long dstHeight, long dstWidth) {
Mat scaled = image;
if (dstHeight > 0 && dstWidth > 0 && (image.rows() != dstHeight || image.cols() != dstWidth)) {
resize(image, scaled = new Mat(), new Size(
(int) Math.min(dstWidth, Integer.MAX_VALUE),
(int) Math.min(dstHeight, Integer.MAX_VALUE)));
}
return scaled;
}
public ImageWritable asWritable(String filename) throws IOException {
return asWritable(new File(filename));
}
/**
* Convert a file to a {@link ImageWritable}
*
* @param is the {@link InputStream} to read
* @return {@link ImageWritable} representing the image
* @throws IOException if an error creating the image occurs
*/
public ImageWritable asWritable(InputStream is) throws IOException {
try (BufferedInputStream bis = new BufferedInputStream(is)) {
Mat mat = streamToMat(bis);
Mat image = imdecode(mat, IMREAD_ANYDEPTH | IMREAD_ANYCOLOR);
if (image == null || image.empty()) {
PIX pix = pixReadMem(mat.data(), mat.cols());
if (pix == null) {
throw new IOException("Could not decode image from input stream");
}
image = convert(pix);
pixDestroy(pix);
}
ImageWritable writable = new ImageWritable(converter.convert(image));
return writable;
}
}
/**
* Convert a file to a INDArray
*
* @param f the image to convert
* @return INDArray
* @throws IOException if an error occurs creating the image
*/
public ImageWritable asWritable(File f) throws IOException {
return asWritable(new FileInputStream(f));
}
/**
* Convert ImageWritable to INDArray
*
* @param writable ImageWritable to convert
* @return INDArray
* @throws IOException if an error occurs creating the image
*/
public INDArray asMatrix(ImageWritable writable) throws IOException {
Mat image = converter.convert(writable.getFrame());
return asMatrix(image);
}
/**
* @param array the input array
* @return {@code asFrame(array, -1)}.
*/
public Frame asFrame(INDArray array) {
return converter.convert(asMat(array));
}
/**
* Converts an INDArray to a JavaCV Frame. Only intended for images with rank 3.
*
* @param array to convert
* @param dataType from JavaCV (DEPTH_FLOAT, DEPTH_UBYTE, etc), or -1 to use same type as the INDArray
* @return data copied to a Frame
*/
public Frame asFrame(INDArray array, int dataType) {
return converter.convert(asMat(array, OpenCVFrameConverter.getMatDepth(dataType)));
}
/**
* Returns {@code asMat(array, -1)}.
*
* @param array the input array
* @return result for {@code asMat(array, -1)}
*/
public Mat asMat(INDArray array) {
return asMat(array, -1);
}
/**
* Converts an INDArray to an OpenCV Mat. Only intended for images with rank 3.
*
* @param array to convert
* @param dataType from OpenCV (CV_32F, CV_8U, etc), or -1 to use same type as the INDArray
* @return data copied to a Mat
*/
public Mat asMat(INDArray array, int dataType) {
if (array.rank() > 4 || (array.rank() > 3 && array.size(0) != 1)) {
throw new UnsupportedOperationException("Only rank 3 (or rank 4 with size(0) == 1) arrays supported");
}
int rank = array.rank();
long[] stride = array.stride();
long offset = array.data().offset();
Pointer pointer = array.data().pointer().position(offset);
long rows = array.size(rank == 3 ? 1 : 2);
long cols = array.size(rank == 3 ? 2 : 3);
long channels = array.size(rank == 3 ? 0 : 1);
boolean done = false;
if (dataType < 0) {
dataType = pointer instanceof DoublePointer ? CV_64F : CV_32F;
}
Mat mat = new Mat((int) Math.min(rows, Integer.MAX_VALUE), (int) Math.min(cols, Integer.MAX_VALUE),
CV_MAKETYPE(dataType, (int) Math.min(channels, Integer.MAX_VALUE)));
Indexer matidx = mat.createIndexer(direct);
Nd4j.getAffinityManager().ensureLocation(array, AffinityManager.Location.HOST);
if (pointer instanceof FloatPointer && dataType == CV_32F) {
FloatIndexer ptridx = FloatIndexer.create((FloatPointer) pointer, new long[]{channels, rows, cols},
new long[]{stride[rank == 3 ? 0 : 1], stride[rank == 3 ? 1 : 2], stride[rank == 3 ? 2 : 3]}, direct);
FloatIndexer idx = (FloatIndexer) matidx;
for (long k = 0; k < channels; k++) {
for (long i = 0; i < rows; i++) {
for (long j = 0; j < cols; j++) {
idx.put(i, j, k, ptridx.get(k, i, j));
}
}
}
done = true;
ptridx.release();
} else if (pointer instanceof DoublePointer && dataType == CV_64F) {
DoubleIndexer ptridx = DoubleIndexer.create((DoublePointer) pointer, new long[]{channels, rows, cols},
new long[]{stride[rank == 3 ? 0 : 1], stride[rank == 3 ? 1 : 2], stride[rank == 3 ? 2 : 3]}, direct);
DoubleIndexer idx = (DoubleIndexer) matidx;
for (long k = 0; k < channels; k++) {
for (long i = 0; i < rows; i++) {
for (long j = 0; j < cols; j++) {
idx.put(i, j, k, ptridx.get(k, i, j));
}
}
}
done = true;
ptridx.release();
}
if (!done) {
for (long k = 0; k < channels; k++) {
for (long i = 0; i < rows; i++) {
for (long j = 0; j < cols; j++) {
if (rank == 3) {
matidx.putDouble(new long[]{i, j, k}, array.getDouble(k, i, j));
} else {
matidx.putDouble(new long[]{i, j, k}, array.getDouble(0, k, i, j));
}
}
}
}
}
matidx.release();
return mat;
}
/**
* Read multipage tiff and load into INDArray
*
* @param bytes
* @return INDArray
* @throws IOException
*/
private INDArray asMatrix(BytePointer bytes, long length) throws IOException {
PIXA pixa;
pixa = pixaReadMemMultipageTiff(bytes, length);
INDArray data;
INDArray currentD;
INDArrayIndex[] index = null;
switch (this.multiPageMode) {
case MINIBATCH:
data = Nd4j.create(pixa.n(), 1, 1, pixa.pix(0).h(), pixa.pix(0).w());
break;
// case CHANNELS:
// data = Nd4j.create(1, pixa.n(), 1, pixa.pix(0).h(), pixa.pix(0).w());
// break;
case FIRST:
data = Nd4j.create(1, 1, 1, pixa.pix(0).h(), pixa.pix(0).w());
PIX pix = pixa.pix(0);
currentD = asMatrix(convert(pix));
pixDestroy(pix);
index = new INDArrayIndex[]{NDArrayIndex.point(0), NDArrayIndex.point(0), NDArrayIndex.point(0),
NDArrayIndex.all(), NDArrayIndex.all()};
data.put(index, currentD.get(NDArrayIndex.all(), NDArrayIndex.all(),
NDArrayIndex.all(), NDArrayIndex.all()));
return data;
default:
throw new UnsupportedOperationException("Unsupported MultiPageMode: " + multiPageMode);
}
for (int i = 0; i < pixa.n(); i++) {
PIX pix = pixa.pix(i);
currentD = asMatrix(convert(pix));
pixDestroy(pix);
switch (this.multiPageMode) {
case MINIBATCH:
index = new INDArrayIndex[]{NDArrayIndex.point(i), NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.all()};
break;
// case CHANNELS:
// index = new INDArrayIndex[]{NDArrayIndex.all(), NDArrayIndex.point(i), NDArrayIndex.all(), NDArrayIndex.all(),NDArrayIndex.all()};
// break;
default:
throw new UnsupportedOperationException("Unsupported MultiPageMode: " + multiPageMode);
}
data.put(index, currentD.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.all()));
}
return data;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-core/0.3.0/ai/konduit/serving/verticles/package-info.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.verticles;
/**
* This package contains {@link io.vertx.core.Verticle} implementations meant to be standalone servers.
* These classes can be invoked from the command line via {@link ai.konduit.serving.launcher.KonduitServingLauncher}
* <p>
* All verticles in this package implement the same idea of some form of scoring of arbitrary
* input over a network. The verticles have the same building blocks for the following workflow:
* <p>
* 1. network input (multi part or json) as a {@link io.vertx.core.buffer.Buffer}
* 2. 1 or more {@link ai.konduit.serving.pipeline.handlers.converter.multi.InputAdapter} for conversion
* 3. Output from the {@link ai.konduit.serving.pipeline.handlers.converter.multi.InputAdapter} input in to an {@link ai.konduit.serving.executioner.inference.InferenceExecutioner}
* 4. Output from the {@link ai.konduit.serving.executioner.inference.InferenceExecutioner} goes in to 1 or more {@link ai.konduit.serving.output.adapter.OutputAdapter}
* for return by the verticle to the user as easily consumable json or binary.
* <p>
* <p>
* Other notes:
* {@link ai.konduit.serving.executioner.inference.InferenceExecutioner} uses {@link ai.konduit.serving.model.loader.ModelLoader}
* implementations to load the model configurations from disk or a varied data source.
* <p>
* Implementations of {@link io.vertx.core.Verticle} doing inference should be using these as building blocks
* for I/O, scoring data.
* <p>
* Typically, extensions to the core may include new {@link ai.konduit.serving.pipeline.handlers.converter.multi.InputAdapter}
* and {@link ai.konduit.serving.output.adapter.OutputAdapter} for various use cases.
**/
|
0
|
java-sources/ai/konduit/serving/konduit-serving-deeplearning4j/0.3.0/ai/konduit/serving/models
|
java-sources/ai/konduit/serving/konduit-serving-deeplearning4j/0.3.0/ai/konduit/serving/models/deeplearning4j/DL4JModuleInfo.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.models.deeplearning4j;
import ai.konduit.serving.annotation.module.InheritRequiredDependencies;
import ai.konduit.serving.annotation.module.ModuleInfo;
@ModuleInfo("konduit-serving-deeplearning4j")
@InheritRequiredDependencies("konduit-serving-nd4j")
public class DL4JModuleInfo {
private DL4JModuleInfo(){ }
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-deeplearning4j/0.3.0/ai/konduit/serving/models
|
java-sources/ai/konduit/serving/konduit-serving-deeplearning4j/0.3.0/ai/konduit/serving/models/deeplearning4j/KonduitServingDeeplearning4jJsonMapping.java
|
package ai.konduit.serving.models.deeplearning4j;import ai.konduit.serving.pipeline.api.serde.JsonSubType;
import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping;
import ai.konduit.serving.pipeline.api.serde.JsonSubType;
import java.util.ArrayList;
import java.util.List;
//GENERATED CLASS DO NOT EDIT
public class KonduitServingDeeplearning4jJsonMapping implements JsonSubTypesMapping { @Override
public List<JsonSubType> getSubTypesMapping() {
List<JsonSubType> l = new ArrayList<>();
return l;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-deeplearning4j/0.3.0/ai/konduit/serving/models/deeplearning4j
|
java-sources/ai/konduit/serving/konduit-serving-deeplearning4j/0.3.0/ai/konduit/serving/models/deeplearning4j/step/DL4JPipelineStepRunnerFactory.java
|
/* ******************************************************************************
* Copyright (c) 2022 Konduit K.K.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package ai.konduit.serving.models.deeplearning4j.step;
import ai.konduit.serving.models.deeplearning4j.step.keras.KerasStep;
import ai.konduit.serving.pipeline.api.step.PipelineStep;
import ai.konduit.serving.pipeline.api.step.PipelineStepRunner;
import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory;
import org.nd4j.common.base.Preconditions;
public class DL4JPipelineStepRunnerFactory implements PipelineStepRunnerFactory {
@Override
public boolean canRun(PipelineStep pipelineStep) {
return pipelineStep instanceof DL4JStep || pipelineStep instanceof KerasStep;
}
@Override
public PipelineStepRunner create(PipelineStep pipelineStep) {
Preconditions.checkState(canRun(pipelineStep), "Unable to run pipeline step: %s", pipelineStep.getClass());
if(pipelineStep instanceof KerasStep){
KerasStep ps = (KerasStep) pipelineStep;
return new DL4JRunner(ps);
} else {
DL4JStep ps = (DL4JStep) pipelineStep;
return new DL4JRunner(ps);
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-deeplearning4j/0.3.0/ai/konduit/serving/models/deeplearning4j
|
java-sources/ai/konduit/serving/konduit-serving-deeplearning4j/0.3.0/ai/konduit/serving/models/deeplearning4j/step/DL4JRunner.java
|
/* ******************************************************************************
* Copyright (c) 2022 Konduit K.K.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package ai.konduit.serving.models.deeplearning4j.step;
import ai.konduit.serving.annotation.runner.CanRun;
import ai.konduit.serving.models.deeplearning4j.step.keras.KerasStep;
import ai.konduit.serving.pipeline.api.context.Context;
import ai.konduit.serving.pipeline.api.data.Data;
import ai.konduit.serving.pipeline.api.data.NDArray;
import ai.konduit.serving.pipeline.api.exception.ModelLoadingException;
import ai.konduit.serving.pipeline.api.protocol.URIResolver;
import ai.konduit.serving.pipeline.api.step.PipelineStep;
import ai.konduit.serving.pipeline.api.step.PipelineStepRunner;
import ai.konduit.serving.pipeline.impl.data.JData;
import lombok.extern.slf4j.Slf4j;
import org.deeplearning4j.nn.graph.ComputationGraph;
import org.deeplearning4j.nn.modelimport.keras.KerasModel;
import org.deeplearning4j.nn.modelimport.keras.exceptions.InvalidKerasConfigurationException;
import org.deeplearning4j.nn.modelimport.keras.exceptions.UnsupportedKerasConfigurationException;
import org.deeplearning4j.nn.modelimport.keras.utils.KerasModelBuilder;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.util.DL4JModelValidator;
import org.nd4j.common.base.Preconditions;
import org.nd4j.common.validation.ValidationResult;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.Collections;
import java.util.List;
import java.util.function.Function;
@Slf4j
@CanRun({DL4JStep.class, KerasStep.class})
public class DL4JRunner implements PipelineStepRunner {
public static final String DEFAULT_OUT_NAME_SINGLE = "default";
private DL4JStep step;
private KerasStep kStep;
private MultiLayerNetwork net;
private ComputationGraph graph;
public DL4JRunner(KerasStep step) {
this.kStep = step;
KerasModelBuilder b;
try{
File f = URIResolver.getFile(step.modelUri());
String path = f.isAbsolute() ? f.getAbsolutePath() : f.getPath();
b = new KerasModel().modelBuilder().modelHdf5Filename(path)
.enforceTrainingConfig(false);
} catch (IOException e){
throw new ModelLoadingException("Failed to load Keras model", e);
} catch (InvalidKerasConfigurationException | UnsupportedKerasConfigurationException e) {
throw new ModelLoadingException("Failed to load Keras model: model file is invalid or can't be loaded" +
" by DL4JRunner", e);
}
try {
graph = b.buildModel().getComputationGraph();
net = null;
} catch (UnsupportedKerasConfigurationException e){
throw new RuntimeException("Unsupported Keras layer found in model", e);
} catch (Throwable t) {
if (t.getMessage() != null && t.getMessage().toLowerCase().contains("sequential")) {
try {
net = b.buildSequential().getMultiLayerNetwork();
graph = null;
} catch (Throwable t2) {
throw new ModelLoadingException("Failed to load Keras Sequential model: model file is invalid or can't be loaded" +
" by DL4JRunner", t2);
}
} else {
throw new ModelLoadingException("Failed to load Keras model: model file is invalid or can't be loaded" +
" by DL4JRunner", t);
}
}
}
public DL4JRunner(DL4JStep step) {
this.step = step;
if(this.step.loaderClass() != null) {
//TODO this probably won't work for OSGi due to Class.forName
Function<String,Object> fn;
try{
Class<?> c = Class.forName(this.step.loaderClass());
fn = (Function<String, Object>) c.newInstance();
} catch (ClassNotFoundException e){
throw new ModelLoadingException("DL4JStep: loaderClass=\"" + this.step.loaderClass() + "\" was provided but no " +
"class with this name exists", e);
} catch (IllegalAccessException | InstantiationException e) {
throw new ModelLoadingException("DL4JStep: loaderClass=\"" + this.step.loaderClass() + "\" was provided but an " +
"instance of this class could not be constructed", e);
}
Object o = fn.apply(step.modelUri());
if(o instanceof MultiLayerNetwork){
net = (MultiLayerNetwork) o;
graph = null;
} else if(o instanceof ComputationGraph) {
net = null;
graph = (ComputationGraph) o;
} else {
throw new ModelLoadingException("DL4JStep: loaderClass=\"" + this.step.loaderClass() + "\" return " +
(o == null ? "null" : o.getClass().getName()) + " not a MultiLayerNetwork / ComputationGraph");
}
} else {
File f;
try {
f = URIResolver.getFile(step.modelUri());
} catch (IOException e) {
throw new ModelLoadingException("Failed to load Deeplearning4J model (MultiLayerNetwork or ComputationGraph) from URI " + step.modelUri(), e);
}
Preconditions.checkState(f.exists() && f.isFile(), "Could not load MultiLayerNetwork/ComputationGraph from URI {}, file path {}:" +
" file does not exist", step.modelUri(), f.getAbsolutePath());
ValidationResult vmln = DL4JModelValidator.validateMultiLayerNetwork(f);
ValidationResult vcg = DL4JModelValidator.validateComputationGraph(f);
boolean isMLN = vmln.isValid();
boolean isCG = !isMLN && vcg.isValid();
if(!(isMLN || isCG)){
StringBuilder sb = new StringBuilder("Model at URI " + step.modelUri() + " is not a valid MultiLayerNetwork or ComputationGraph model.\n");
sb.append("Attempt to load as MultiLayerNetwork: \n");
sb.append("Issues: ").append(vmln.getIssues()).append("\n");
if(vmln.getException() != null) {
StringWriter sw = new StringWriter();
vmln.getException().printStackTrace(new PrintWriter(sw));
sb.append(sw.toString());
sb.append("\n");
}
sb.append("Attempt to load as ComputationGraph: \n");
sb.append("Issues: ").append(vcg.getIssues());
if(vcg.getException() != null) {
StringWriter sw = new StringWriter();
vcg.getException().printStackTrace(new PrintWriter(sw));
sb.append(sw.toString());
sb.append("\n");
}
throw new IllegalStateException(sb.toString());
}
if (isMLN) {
try {
net = MultiLayerNetwork.load(f, false);
graph = null;
} catch (IOException e) {
throw new ModelLoadingException("Failed to load Deeplearning4J MultiLayerNetwork from URI " + step.modelUri(), e);
}
} else {
try {
graph = ComputationGraph.load(f, false);
net = null;
} catch (IOException e) {
throw new ModelLoadingException("Failed to load Deeplearning4J ComputationGraph from URI " + step.modelUri(), e);
}
}
}
Nd4j.getExecutioner().enableDebugMode(step.debugMode());
Nd4j.getExecutioner().enableVerboseMode(step.verboseMode());
}
@Override
public void close() {
try {
if (net != null) {
net.close();
} else {
graph.close();
}
} catch (Throwable t) {
log.warn("Error when closing model", t);
}
}
@Override
public PipelineStep getPipelineStep() {
return step != null ? step : kStep;
}
@Override
public Data exec(Context ctx, Data data) {
//First: Get array
//TODO HANDLE DIFFERENT NAMES (Not hardcoded)
int numInputs = net != null ? 1 : graph.getNumInputArrays();
Preconditions.checkArgument(numInputs == data.size(), "Expected %s inputs to DL4JStep but got Data instance with %s inputs (keys: %s)",
numInputs, data.size(), data.keys());
if (net != null) {
INDArray arr = getOnlyArray(data);
INDArray out;
synchronized (net) {
out = net.output(arr);
}
String outName = outputName();
return Data.singleton(outName, NDArray.create(out));
} else {
INDArray[] input;
if (numInputs == 1) {
input = new INDArray[]{getOnlyArray(data)};
} else {
//TODO make configurable input names/order
if (step.inputNames() != null) {
input = new INDArray[numInputs];
int i = 0;
for (String s : step.inputNames()) {
input[i++] = (INDArray) data.getNDArray(s).get(); //TODO FIX NDARRAY
}
} else {
//Configuration does not have names specified
//See if model input names matches data
List<String> networkInputs = graph.getConfiguration().getNetworkInputs();
if (data.hasAll(networkInputs)) {
input = new INDArray[numInputs];
int i = 0;
for (String s : networkInputs) {
input[i++] = (INDArray) data.getNDArray(s).get(); //TODO FIX NDARRAY
}
} else {
throw new IllegalStateException("Network has " + numInputs + " inputs, but no Data input names were specified." +
" Attempting to infer input names also failed: Model has input names " + networkInputs + " but Data object has keys " + data.keys());
}
}
}
INDArray[] out;
synchronized (graph) {
out = graph.output(input);
}
//Work out output names
List<String> outNames;
if (outputNames() != null) {
outNames = outputNames();
} else {
if (out.length == 1) {
outNames = Collections.singletonList(DEFAULT_OUT_NAME_SINGLE);
} else {
outNames = graph.getConfiguration().getNetworkOutputs();
}
}
Preconditions.checkState(outNames.size() == out.length);
JData.DataBuilder b = JData.builder();
for (int i = 0; i < out.length; i++) {
b.add(outNames.get(i), NDArray.create(out[i]));
}
return b.build();
}
}
private List<String> outputNames() {
return step != null ? step.outputNames() : kStep.outputNames();
}
private String outputName() {
if (step != null) {
return step.outputNames() == null || step.outputNames().isEmpty() ? DEFAULT_OUT_NAME_SINGLE : step.outputNames().get(0);
} else {
return kStep.outputNames() == null || kStep.outputNames().isEmpty() ? DEFAULT_OUT_NAME_SINGLE : kStep.outputNames().get(0);
}
}
private INDArray getOnlyArray(Data data) {
//TODO Fix NDArray
String key = data.keys().get(0);
NDArray array = data.getNDArray(key);
INDArray out = array.getAs(INDArray.class); //TOOD NO CAST
return out;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-deeplearning4j-config/0.3.0/ai/konduit/serving/models
|
java-sources/ai/konduit/serving/konduit-serving-deeplearning4j-config/0.3.0/ai/konduit/serving/models/deeplearning4j/DL4JConfigModuleInfo.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.models.deeplearning4j;
import ai.konduit.serving.annotation.module.InheritRequiredDependencies;
import ai.konduit.serving.annotation.module.ModuleInfo;
@ModuleInfo("konduit-serving-deeplearning4j-config")
public class DL4JConfigModuleInfo {
private DL4JConfigModuleInfo(){ }
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-deeplearning4j-config/0.3.0/ai/konduit/serving/models
|
java-sources/ai/konduit/serving/konduit-serving-deeplearning4j-config/0.3.0/ai/konduit/serving/models/deeplearning4j/KonduitServingDeeplearning4jJsonMapping.java
|
package ai.konduit.serving.models.deeplearning4j;import ai.konduit.serving.pipeline.api.serde.JsonSubType;
import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping;
import ai.konduit.serving.pipeline.api.serde.JsonSubType;
import java.util.ArrayList;
import java.util.List;
//GENERATED CLASS DO NOT EDIT
public class KonduitServingDeeplearning4jJsonMapping implements JsonSubTypesMapping { @Override
public List<JsonSubType> getSubTypesMapping() {
List<JsonSubType> l = new ArrayList<>();
l.add(new JsonSubType("DEEPLEARNING4J", ai.konduit.serving.models.deeplearning4j.step.DL4JStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class));
l.add(new JsonSubType("KERAS", ai.konduit.serving.models.deeplearning4j.step.keras.KerasStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class));
return l;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-deeplearning4j-config/0.3.0/ai/konduit/serving/models/deeplearning4j
|
java-sources/ai/konduit/serving/konduit-serving-deeplearning4j-config/0.3.0/ai/konduit/serving/models/deeplearning4j/step/DL4JStep.java
|
/* ******************************************************************************
* Copyright (c) 2022 Konduit K.K.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package ai.konduit.serving.models.deeplearning4j.step;
import ai.konduit.serving.annotation.json.JsonName;
import ai.konduit.serving.pipeline.api.step.PipelineStep;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import lombok.experimental.Tolerate;
import org.nd4j.shade.jackson.annotation.JsonProperty;
import java.util.Arrays;
import java.util.List;
@Data
@NoArgsConstructor
@Accessors(fluent = true)
@JsonName("DEEPLEARNING4J")
@Schema(description = "A pipeline step that configures a DL4J model that is to be executed.")
public class DL4JStep implements PipelineStep {
@Schema(description = "Specifies the location of a saved model file.")
private String modelUri;
@Schema(description = "A list of names of the input placeholders (mainly for DL4J - computation graph, with multiple inputs. Where values from the input data keys are mapped to " +
"the computation graph inputs).")
private List<String> inputNames;
@Schema(description = "A list of names of the output placeholders (mainly for DL4J - computation graph, with multiple outputs. Where the values of these output keys are mapped " +
"from the computation graph output - INDArray[] to data keys).")
private List<String> outputNames;
@Schema(description = "Optional, usually unnecessary. Specifies a class used to load the model if customization in how " +
"model loading is performed, instead of the usual MultiLayerNetwork.load or ComputationGraph.load methods. " +
"Must be a java.util.Function<String,MultiLayerNetwork> or java.util.Function<String,ComputationGraph>")
private String loaderClass;
@Schema(description = "Enable debug mode, defaults to false")
private boolean debugMode = false;
@Schema(description = "Enable verbose mode, defaults to false")
private boolean verboseMode = false;
public DL4JStep(@JsonProperty("modelUri") String modelUri,
@JsonProperty("inputNames") List<String> inputNames,
@JsonProperty("outputNames") List<String> outputNames,
@JsonProperty("debugMode") boolean debugMode,
@JsonProperty("verboseMode") boolean verboseMode){
this.modelUri = modelUri;
this.inputNames = inputNames;
this.outputNames = outputNames;
this.debugMode = debugMode;
this.verboseMode = verboseMode;
}
public DL4JStep(String modelUri,
List<String> inputNames,
List<String> outputNames
) {
this(modelUri,inputNames,outputNames,false,false);
}
@Tolerate
public DL4JStep inputNames(String... inputNames) {
return this.inputNames(Arrays.asList(inputNames));
}
@Tolerate
public DL4JStep outputNames(String... outputNames) {
return this.outputNames(Arrays.asList(outputNames));
}
public DL4JStep verboseMode(boolean verboseMode) {
this.verboseMode = verboseMode;
return this;
}
public DL4JStep debugMode(boolean debugMode) {
this.debugMode = debugMode;
return this;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-deeplearning4j-config/0.3.0/ai/konduit/serving/models/deeplearning4j/step
|
java-sources/ai/konduit/serving/konduit-serving-deeplearning4j-config/0.3.0/ai/konduit/serving/models/deeplearning4j/step/keras/KerasStep.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.models.deeplearning4j.step.keras;
import ai.konduit.serving.annotation.json.JsonName;
import ai.konduit.serving.models.deeplearning4j.step.DL4JStep;
import ai.konduit.serving.pipeline.api.step.PipelineStep;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import lombok.experimental.Tolerate;
import org.nd4j.shade.jackson.annotation.JsonProperty;
import java.util.Arrays;
import java.util.List;
@Data
@Accessors(fluent = true)
@NoArgsConstructor
@JsonName("KERAS")
@Schema(description = "A pipeline step that configures a Keras model that is to be executed.")
public class KerasStep implements PipelineStep {
@Schema(description = "Specifies the location of a saved model file.")
private String modelUri;
@Schema(description = "A list of names of the input placeholders (mainly for DL4J - computation graph, with multiple inputs. Where values from the input data keys are mapped to " +
"the computation graph inputs).")
private List<String> inputNames;
@Schema(description = "A list of names of the output placeholders (mainly for DL4J - computation graph, with multiple outputs. Where the values of these output keys are mapped " +
"from the computation graph output - INDArray[] to data keys).")
private List<String> outputNames;
public KerasStep(@JsonProperty("modelUri") String modelUri, @JsonProperty("inputNames") List<String> inputNames,
@JsonProperty("outputNames") List<String> outputNames){
this.modelUri = modelUri;
this.inputNames = inputNames;
this.outputNames = outputNames;
}
public KerasStep(String modelUri){
this.modelUri = modelUri;
}
@Tolerate
public KerasStep inputNames(String... inputNames) {
return this.inputNames(Arrays.asList(inputNames));
}
@Tolerate
public KerasStep outputNames(String... outputNames) {
return this.outputNames(Arrays.asList(outputNames));
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-endpoint/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-endpoint/0.3.0/ai/konduit/serving/endpoint/AssetServingEndpoint.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.endpoint;
import io.swagger.v3.oas.annotations.media.Schema;
import io.vertx.core.Handler;
import io.vertx.core.http.HttpMethod;
import io.vertx.core.http.impl.MimeMapping;
import io.vertx.ext.web.RoutingContext;
import org.apache.commons.io.FilenameUtils;
import java.util.List;
/**
*
*/
@Schema(description = "A base object for configuring a custom endpoint to serve assets.")
public abstract class AssetServingEndpoint implements Endpoint {
@Schema(description = "Endpoint http path.")
protected final String httpPath;
@Schema(description = "Asset file path.")
protected final String fileAssetPath;
public AssetServingEndpoint(String httpPath, String fileAssetPath){
this.httpPath = httpPath;
this.fileAssetPath = fileAssetPath;
}
@Override
public HttpMethod type() {
return HttpMethod.GET;
}
@Override
public String path() {
return httpPath;
}
@Override
public List<String> consumes() {
//Null for GET method
return null;
}
@Override
public List<String> produces() {
return null;
}
@Override
public Handler<RoutingContext> handler() {
return rc -> {
String path = rc.request().path();
path = path.substring(8); //Remove "/assets/", which is 8 characters
String mime;
String newPath;
if (path.contains("webjars")) {
newPath = "META-INF/resources/" + path.substring(path.indexOf("webjars"));
} else {
newPath = fileAssetPath + (path.startsWith("/") ? path.substring(1) : path);
}
mime = MimeMapping.getMimeTypeForFilename(FilenameUtils.getName(newPath));
//System.out.println("PATH: " + path + " - mime = " + mime);
rc.response()
.putHeader("content-type", mime)
.sendFile(newPath);
};
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-endpoint/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-endpoint/0.3.0/ai/konduit/serving/endpoint/Endpoint.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.endpoint;
import io.swagger.v3.oas.annotations.media.Schema;
import io.vertx.core.Handler;
import io.vertx.core.http.HttpMethod;
import io.vertx.ext.web.RoutingContext;
import java.util.List;
/**
* Endpoint represents a single custom HTTP endpoint, as part of a {@link HttpEndpoints} instance, as specified via
* InferenceConfiguration.<br>
* <b>>NOTE</b: The API for custom endpoints should be considered experimental and subject to change
*
* @author Alex Black
*/
@Schema(description = "An object that represents a single custom HTTP endpoints as specified via " +
"InferenceConfiguration. Note: The API for custom endpoints should be considered experimental " +
"and subject to change.")
public interface Endpoint {
/**
* @return The endpoint type - for example, GET, POST, etc
*/
HttpMethod type();
/**
* @return The path of the endpoint - for example "/my/custom/endpoint". May include path parameters
*/
String path();
/**
* @return The list of supported input MIME content types (see {@link io.netty.handler.codec.http.HttpHeaderValues}
*/
List<String> consumes();
/**
* @return The list of supported output MIME content types (see {@link io.netty.handler.codec.http.HttpHeaderValues}
*/
List<String> produces();
@Schema(description = "The endpoint type - for example, GET, POST, etc.")
default HttpMethod getType() {
return type();
}
@Schema(description = "The path of the endpoint - for example /my/custom/endpoint. May include path parameters.")
default String getPath() {
return path();
}
@Schema(description = "The list of supported input MIME content types.")
default List<String> getConsumes() {
return consumes();
}
@Schema(description = "The list of supported output MIME content types.")
default List<String> getProduces() {
return produces();
}
/**
* @return The Vert.x handler for this endpoint
*/
Handler<RoutingContext> handler();
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-endpoint/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-endpoint/0.3.0/ai/konduit/serving/endpoint/HttpEndpoints.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.endpoint;
import ai.konduit.serving.pipeline.api.pipeline.Pipeline;
import ai.konduit.serving.pipeline.api.pipeline.PipelineExecutor;
import java.util.List;
/**
* HttpEndpoint represents one or more custom HTTP endpoints, specified via InferenceConfiguration.
* Returns a (possibly null/empty) list of endpoints for the given {@link Pipeline} and {@link PipelineExecutor}<br>
* <b>>NOTE</b: The API API for custom endpoints should be considered experimental and subject to change
*
* @author Alex Black
*/
public interface HttpEndpoints {
/**
* @param p Pipeline for this server
* @param pe Pipeline executor for this server
* @return Return the list of custom endpoints for the given Pipeline/PipelineExecutor. May return null/empty
*/
List<Endpoint> endpoints(Pipeline p, PipelineExecutor pe);
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-gpu/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-gpu/0.3.0/ai/konduit/serving/gpu/GpuMetrics.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.gpu;
import io.micrometer.core.instrument.Gauge;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Tag;
import io.micrometer.core.instrument.binder.MeterBinder;
import io.micrometer.core.lang.NonNull;
import org.bytedeco.cuda.nvml.*;
import org.bytedeco.javacpp.BytePointer;
import java.nio.charset.StandardCharsets;
import static java.util.Collections.emptyList;
import static org.bytedeco.cuda.global.nvml.*;
/**
* See https://docs.nvidia.com/deploy/nvml-api/group__nvmlDeviceQueries.html#group__nvmlDeviceQueries_1gf91efb38dadf591dd0de32ef7f0fd423
* for information on the nvml queries used here.
*/
public class GpuMetrics implements MeterBinder {
private final Iterable<Tag> tags;
public GpuMetrics() {
this(emptyList());
}
public GpuMetrics(Iterable<Tag> tags) {
this.tags = tags;
}
@Override
public void bindTo(@NonNull MeterRegistry registry) {
checkReturn(nvmlInit_v2());
int[] resultArr = new int[1];
checkReturn(nvmlSystemGetCudaDriverVersion(resultArr));
int deviceCount;
checkReturn(nvmlDeviceGetCount_v2(resultArr));
deviceCount = resultArr[0];
Gauge.builder("system.gpu.count", () -> deviceCount)
.tags(tags)
.description("The number of gpus available in the system")
.register(registry);
System.out.format("Found %s GPU(s) in the system", deviceCount);
for (int i = 0; i < deviceCount; i++) {
nvmlDevice_st device = new nvmlDevice_st();
nvmlDeviceGetHandleByIndex_v2(i, device);
String gpuName;
try {
BytePointer namePointer = new BytePointer(NVML_DEVICE_NAME_BUFFER_SIZE);
checkReturn(nvmlDeviceGetName(device, namePointer, NVML_DEVICE_NAME_BUFFER_SIZE));
String gpuNameString = namePointer.getString(StandardCharsets.UTF_8);
gpuName = gpuNameString.substring(0, gpuNameString.indexOf(Character.MIN_VALUE));
} catch (Exception exception) {
gpuName = "GPU " + i;
System.out.format("Unable to resolve GPU name at index %s. Using name as: %s%n", i, gpuName);
exception.printStackTrace();
}
String gpuIndexAndName = i + "." + gpuName.replace(" ", ".").toLowerCase();
Gauge.builder(gpuIndexAndName + ".running.processes", () -> {
/*
* Note that the result array first entry has to be zero here.
* This will cause nvml to return the number of running graphics processes
* and nothing more.
*
* Note that we also don't call checkReturn here because it can return
* something that is not success.
* A success means zero processes are running.
* Anything else
*/
resultArr[0] = 0;
nvmlProcessInfo_v1_t processInfoT = new nvmlProcessInfo_v1_t();
int result = nvmlDeviceGetGraphicsRunningProcesses(device, resultArr, processInfoT);
if (result != NVML_ERROR_INSUFFICIENT_SIZE && result != NVML_SUCCESS) {
throw new IllegalStateException("Number of running processes query failed " + nvmlErrorString(result));
}
return resultArr[0];
})
.tags(tags)
.description(String.format("Number of running processes on GPU %s", i))
.register(registry);
Gauge.builder(gpuIndexAndName + ".percent.memory.usage", () -> {
/*
* From docs
* ---------
* unsigned int memory
* Percent of time over the past sample period during which global (device) memory was being read or written.
*/
nvmlUtilization_t nvmlUtilizationT = new nvmlUtilization_t();
if(checkReturn(nvmlDeviceGetUtilizationRates(device, nvmlUtilizationT)) == NVML_ERROR_NOT_SUPPORTED) {
return -1;
} else {
return nvmlUtilizationT.memory();
}
})
.tags(tags)
.description(String.format("Percent gpu %s memory usage", i))
.register(registry);
Gauge.builder(gpuIndexAndName + ".percent.gpu.usage", () -> {
/*
* From docs
* ---------
* unsigned int gpu
* Percent of time over the past sample period during which one or more kernels was executing on the GPU.
*/
nvmlUtilization_t nvmlUtilizationT = new nvmlUtilization_t();
if(checkReturn(nvmlDeviceGetUtilizationRates(device, nvmlUtilizationT)) == NVML_ERROR_NOT_SUPPORTED) {
return -1;
} else {
return nvmlUtilizationT.gpu();
}
})
.tags(tags)
.description(String.format("Percent gpu %s process usage", i))
.register(registry);
Gauge.builder(gpuIndexAndName + ".memory.usage.megabytes", () -> {
nvmlMemory_t memory = new nvmlMemory_t();
if(checkReturn(nvmlDeviceGetMemoryInfo(device, memory)) == NVML_ERROR_NOT_SUPPORTED) {
return -1;
} else {
return memory.used() / 1024 / 1024;
}
})
.tags(tags)
.description(String.format("Memory used on GPU %s", i))
.register(registry);
nvmlMemory_t memory = new nvmlMemory_t();
checkReturn(nvmlDeviceGetMemoryInfo(device, memory));
Gauge.builder(gpuIndexAndName + ".total.gpu.memory.megabytes", () -> memory.total() / 1024 / 1024)
.description(String.format("Total memory on GPU %s", i))
.register(registry);
Gauge.builder(gpuIndexAndName + ".temp.celcius", () -> {
if(checkReturn(nvmlDeviceGetTemperature(device, NVML_TEMPERATURE_GPU, resultArr)) == NVML_ERROR_NOT_SUPPORTED) {
return -1;
} else {
return resultArr[0];
}
})
.tags(tags)
.description(String.format("Temp on GPU %s", i))
.register(registry);
Gauge.builder(gpuIndexAndName + ".power.usage.milliwatts", () -> {
if(checkReturn(nvmlDeviceGetPowerUsage(device, resultArr)) == NVML_ERROR_NOT_SUPPORTED) {
return -1;
} else {
return resultArr[0];
}
})
.tags(tags)
.description(String.format("Power utilization by GPU %s", i))
.register(registry);
Gauge.builder(gpuIndexAndName + ".fan.speed.percent", () -> {
if(checkReturn(nvmlDeviceGetFanSpeed(device, resultArr)) == NVML_ERROR_NOT_SUPPORTED) {
return -1;
} else {
return resultArr[0];
}
})
.tags(tags)
.description(String.format("GPU %s fan speed", i))
.register(registry);
}
//nvmlShutdown();
}
private int checkReturn(int result) {
if(result == NVML_ERROR_NOT_SUPPORTED) {
return NVML_ERROR_NOT_SUPPORTED;
} else {
if (NVML_SUCCESS != result) {
throw new IllegalStateException(String.format("Failed NVML call with error code: %s. Error details: %s", result, nvmlErrorString(result)));
}
return result;
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-grpc/0.3.0/ai/konduit/serving/vertx/protocols
|
java-sources/ai/konduit/serving/konduit-serving-grpc/0.3.0/ai/konduit/serving/vertx/protocols/grpc/KonduitServingGrpcJsonMapping.java
|
package ai.konduit.serving.vertx.protocols.grpc;import ai.konduit.serving.pipeline.api.serde.JsonSubType;
import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping;
import ai.konduit.serving.pipeline.api.serde.JsonSubType;
import java.util.ArrayList;
import java.util.List;
//GENERATED CLASS DO NOT EDIT
public class KonduitServingGrpcJsonMapping implements JsonSubTypesMapping { @Override
public List<JsonSubType> getSubTypesMapping() {
List<JsonSubType> l = new ArrayList<>();
return l;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-grpc/0.3.0/ai/konduit/serving/vertx/protocols
|
java-sources/ai/konduit/serving/konduit-serving-grpc/0.3.0/ai/konduit/serving/vertx/protocols/grpc/VertxGrpcModuleInfo.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.vertx.protocols.grpc;
import ai.konduit.serving.annotation.module.ModuleInfo;
@ModuleInfo("konduit-serving-grpc")
public class VertxGrpcModuleInfo {
private VertxGrpcModuleInfo(){ }
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-grpc/0.3.0/ai/konduit/serving/vertx/protocols/grpc
|
java-sources/ai/konduit/serving/konduit-serving-grpc/0.3.0/ai/konduit/serving/vertx/protocols/grpc/api/GrpcService.java
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: grpc-service.proto
package ai.konduit.serving.vertx.protocols.grpc.api;
public final class GrpcService {
private GrpcService() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(com.google.protobuf.ExtensionRegistryLite) registry);
}
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\022grpc-service.proto\022\022ai.konduit.serving" +
"\0323ai/konduit/serving/pipeline/api/protob" +
"uf/data.proto2X\n\tInference\022K\n\007predict\022\036." +
"ai.konduit.serving.DataScheme\032\036.ai.kondu" +
"it.serving.DataScheme\"\000B:\n+ai.konduit.se" +
"rving.vertx.protocols.grpc.apiB\013GrpcServ" +
"iceb\006proto3"
};
descriptor = com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.getDescriptor(),
});
ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-grpc/0.3.0/ai/konduit/serving/vertx/protocols/grpc
|
java-sources/ai/konduit/serving/konduit-serving-grpc/0.3.0/ai/konduit/serving/vertx/protocols/grpc/api/InferenceGrpc.java
|
package ai.konduit.serving.vertx.protocols.grpc.api;
import static io.grpc.MethodDescriptor.generateFullMethodName;
import static io.grpc.stub.ClientCalls.asyncBidiStreamingCall;
import static io.grpc.stub.ClientCalls.asyncClientStreamingCall;
import static io.grpc.stub.ClientCalls.asyncServerStreamingCall;
import static io.grpc.stub.ClientCalls.asyncUnaryCall;
import static io.grpc.stub.ClientCalls.blockingServerStreamingCall;
import static io.grpc.stub.ClientCalls.blockingUnaryCall;
import static io.grpc.stub.ClientCalls.futureUnaryCall;
import static io.grpc.stub.ServerCalls.asyncBidiStreamingCall;
import static io.grpc.stub.ServerCalls.asyncClientStreamingCall;
import static io.grpc.stub.ServerCalls.asyncServerStreamingCall;
import static io.grpc.stub.ServerCalls.asyncUnaryCall;
import static io.grpc.stub.ServerCalls.asyncUnimplementedStreamingCall;
import static io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall;
/**
* <pre>
* The main grpc service definition.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler (version 1.20.0)",
comments = "Source: grpc-service.proto")
public final class InferenceGrpc {
private InferenceGrpc() {}
private static <T> io.grpc.stub.StreamObserver<T> toObserver(final io.vertx.core.Handler<io.vertx.core.AsyncResult<T>> handler) {
return new io.grpc.stub.StreamObserver<T>() {
private volatile boolean resolved = false;
@Override
public void onNext(T value) {
if (!resolved) {
resolved = true;
handler.handle(io.vertx.core.Future.succeededFuture(value));
}
}
@Override
public void onError(Throwable t) {
if (!resolved) {
resolved = true;
handler.handle(io.vertx.core.Future.failedFuture(t));
}
}
@Override
public void onCompleted() {
if (!resolved) {
resolved = true;
handler.handle(io.vertx.core.Future.succeededFuture());
}
}
};
}
public static final String SERVICE_NAME = "ai.konduit.serving.Inference";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme,
ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme> getPredictMethod;
public static io.grpc.MethodDescriptor<ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme,
ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme> getPredictMethod() {
io.grpc.MethodDescriptor<ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme, ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme> getPredictMethod;
if ((getPredictMethod = InferenceGrpc.getPredictMethod) == null) {
synchronized (InferenceGrpc.class) {
if ((getPredictMethod = InferenceGrpc.getPredictMethod) == null) {
InferenceGrpc.getPredictMethod = getPredictMethod =
io.grpc.MethodDescriptor.<ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme, ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(
"ai.konduit.serving.Inference", "predict"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme.getDefaultInstance()))
.setSchemaDescriptor(new InferenceMethodDescriptorSupplier("predict"))
.build();
}
}
}
return getPredictMethod;
}
/**
* Creates a new async stub that supports all call types for the service
*/
public static InferenceStub newStub(io.grpc.Channel channel) {
return new InferenceStub(channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static InferenceBlockingStub newBlockingStub(
io.grpc.Channel channel) {
return new InferenceBlockingStub(channel);
}
/**
* Creates a new ListenableFuture-style stub that supports unary calls on the service
*/
public static InferenceFutureStub newFutureStub(
io.grpc.Channel channel) {
return new InferenceFutureStub(channel);
}
/**
* Creates a new vertx stub that supports all call types for the service
*/
public static InferenceVertxStub newVertxStub(io.grpc.Channel channel) {
return new InferenceVertxStub(channel);
}
/**
* <pre>
* The main grpc service definition.
* </pre>
*/
public static abstract class InferenceImplBase implements io.grpc.BindableService {
/**
* <pre>
* predicts an output
* </pre>
*/
public void predict(ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme request,
io.grpc.stub.StreamObserver<ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme> responseObserver) {
asyncUnimplementedUnaryCall(getPredictMethod(), responseObserver);
}
@java.lang.Override public final io.grpc.ServerServiceDefinition bindService() {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getPredictMethod(),
asyncUnaryCall(
new MethodHandlers<
ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme,
ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme>(
this, METHODID_PREDICT)))
.build();
}
}
/**
* <pre>
* The main grpc service definition.
* </pre>
*/
public static final class InferenceStub extends io.grpc.stub.AbstractStub<InferenceStub> {
public InferenceStub(io.grpc.Channel channel) {
super(channel);
}
public InferenceStub(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected InferenceStub build(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
return new InferenceStub(channel, callOptions);
}
/**
* <pre>
* predicts an output
* </pre>
*/
public void predict(ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme request,
io.grpc.stub.StreamObserver<ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme> responseObserver) {
asyncUnaryCall(
getChannel().newCall(getPredictMethod(), getCallOptions()), request, responseObserver);
}
}
/**
* <pre>
* The main grpc service definition.
* </pre>
*/
public static final class InferenceBlockingStub extends io.grpc.stub.AbstractStub<InferenceBlockingStub> {
public InferenceBlockingStub(io.grpc.Channel channel) {
super(channel);
}
public InferenceBlockingStub(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected InferenceBlockingStub build(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
return new InferenceBlockingStub(channel, callOptions);
}
/**
* <pre>
* predicts an output
* </pre>
*/
public ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme predict(ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme request) {
return blockingUnaryCall(
getChannel(), getPredictMethod(), getCallOptions(), request);
}
}
/**
* <pre>
* The main grpc service definition.
* </pre>
*/
public static final class InferenceFutureStub extends io.grpc.stub.AbstractStub<InferenceFutureStub> {
public InferenceFutureStub(io.grpc.Channel channel) {
super(channel);
}
public InferenceFutureStub(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected InferenceFutureStub build(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
return new InferenceFutureStub(channel, callOptions);
}
/**
* <pre>
* predicts an output
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme> predict(
ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme request) {
return futureUnaryCall(
getChannel().newCall(getPredictMethod(), getCallOptions()), request);
}
}
/**
* <pre>
* The main grpc service definition.
* </pre>
*/
public static abstract class InferenceVertxImplBase implements io.grpc.BindableService {
/**
* <pre>
* predicts an output
* </pre>
*/
public void predict(ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme request,
io.vertx.core.Promise<ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme> response) {
asyncUnimplementedUnaryCall(getPredictMethod(), InferenceGrpc.toObserver(response));
}
@java.lang.Override public final io.grpc.ServerServiceDefinition bindService() {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getPredictMethod(),
asyncUnaryCall(
new VertxMethodHandlers<
ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme,
ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme>(
this, METHODID_PREDICT)))
.build();
}
}
/**
* <pre>
* The main grpc service definition.
* </pre>
*/
public static final class InferenceVertxStub extends io.grpc.stub.AbstractStub<InferenceVertxStub> {
public InferenceVertxStub(io.grpc.Channel channel) {
super(channel);
}
public InferenceVertxStub(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected InferenceVertxStub build(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
return new InferenceVertxStub(channel, callOptions);
}
/**
* <pre>
* predicts an output
* </pre>
*/
public void predict(ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme request,
io.vertx.core.Handler<io.vertx.core.AsyncResult<ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme>> response) {
asyncUnaryCall(
getChannel().newCall(getPredictMethod(), getCallOptions()), request, InferenceGrpc.toObserver(response));
}
}
private static final int METHODID_PREDICT = 0;
private static final class MethodHandlers<Req, Resp> implements
io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final InferenceImplBase serviceImpl;
private final int methodId;
MethodHandlers(InferenceImplBase serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_PREDICT:
serviceImpl.predict((ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme) request,
(io.grpc.stub.StreamObserver<ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
private static final class VertxMethodHandlers<Req, Resp> implements
io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final InferenceVertxImplBase serviceImpl;
private final int methodId;
VertxMethodHandlers(InferenceVertxImplBase serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_PREDICT:
serviceImpl.predict((ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme) request,
(io.vertx.core.Promise<ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme>) io.vertx.core.Promise.<ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme>promise().future().setHandler(ar -> {
if (ar.succeeded()) {
((io.grpc.stub.StreamObserver<ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme>) responseObserver).onNext(ar.result());
responseObserver.onCompleted();
} else {
responseObserver.onError(ar.cause());
}
}));
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
private static abstract class InferenceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier {
InferenceBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return ai.konduit.serving.vertx.protocols.grpc.api.GrpcService.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("Inference");
}
}
private static final class InferenceFileDescriptorSupplier
extends InferenceBaseDescriptorSupplier {
InferenceFileDescriptorSupplier() {}
}
private static final class InferenceMethodDescriptorSupplier
extends InferenceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final String methodName;
InferenceMethodDescriptorSupplier(String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (InferenceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new InferenceFileDescriptorSupplier())
.addMethod(getPredictMethod())
.build();
}
}
}
return result;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-grpc/0.3.0/ai/konduit/serving/vertx/protocols/grpc
|
java-sources/ai/konduit/serving/konduit-serving-grpc/0.3.0/ai/konduit/serving/vertx/protocols/grpc/verticle/InferenceVerticleGrpc.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.vertx.protocols.grpc.verticle;
import ai.konduit.serving.pipeline.api.data.Data;
import ai.konduit.serving.pipeline.impl.data.ProtoData;
import ai.konduit.serving.pipeline.impl.data.protobuf.DataProtoMessage.DataScheme;
import ai.konduit.serving.vertx.protocols.grpc.api.InferenceGrpc;
import ai.konduit.serving.pipeline.settings.constants.EnvironmentConstants;
import ai.konduit.serving.vertx.verticle.InferenceVerticle;
import io.grpc.stub.StreamObserver;
import io.vertx.core.Promise;
import io.vertx.core.impl.ContextInternal;
import io.vertx.core.json.JsonObject;
import io.vertx.grpc.VertxServer;
import io.vertx.grpc.VertxServerBuilder;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class InferenceVerticleGrpc extends InferenceVerticle {
@Override
public void start(Promise<Void> startPromise) {
vertx.executeBlocking(handler -> {
try {
initialize();
handler.complete();
} catch (Exception exception) {
handler.fail(exception);
startPromise.fail(exception);
}
}, resultHandler -> {
if(resultHandler.failed()) {
if(resultHandler.cause() != null)
startPromise.fail(resultHandler.cause());
else {
startPromise.fail("Failed to start. Unknown cause.");
}
}
else {
int port;
String portEnvValue = System.getenv(EnvironmentConstants.KONDUIT_SERVING_PORT);
if (portEnvValue != null) {
try {
port = Integer.parseInt(portEnvValue);
} catch (NumberFormatException exception) {
log.error("Environment variable \"{}={}\" isn't a valid port number.",
EnvironmentConstants.KONDUIT_SERVING_PORT, portEnvValue);
startPromise.fail(exception);
return;
}
} else {
port = inferenceConfiguration.port();
}
if (port < 0 || port > 0xFFFF) {
startPromise.fail(new Exception("Valid port range is 0 <= port <= 65535. The given port was " + port));
return;
}
VertxServer rpcServer = VertxServerBuilder
.forAddress(vertx, inferenceConfiguration.host(), inferenceConfiguration.port())
.addService(new InferenceGrpc.InferenceImplBase() {
@Override
public void predict(DataScheme request, StreamObserver<DataScheme> responseObserver) {
try {
Data output = pipelineExecutor.exec(ProtoData.fromBytes(request.toByteArray()));
responseObserver.onNext(DataScheme.parseFrom(output.asBytes()));
responseObserver.onCompleted();
} catch (Throwable throwable) {
log.error("Failed to process the pipeline with the input data", throwable);
responseObserver.onError(throwable);
}
}
})
.build();
rpcServer.start(handler -> {
if(handler.succeeded()) {
int actualPort = rpcServer.getPort();
inferenceConfiguration.port(actualPort);
try {
((ContextInternal) context).getDeployment()
.deploymentOptions()
.setConfig(new JsonObject(inferenceConfiguration.toJson()));
long pid = getPid();
saveInspectionDataIfRequired(pid);
log.info("Inference gRPC server is listening on host: '{}'", inferenceConfiguration.host());
log.info("Inference gRPC server started on port {} with {} pipeline steps", actualPort, pipeline.size());
startPromise.complete();
} catch (Throwable throwable) {
startPromise.fail(throwable);
}
} else {
startPromise.fail(handler.cause());
}
});
}
});
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-http/0.3.0/ai/konduit/serving/vertx/protocols
|
java-sources/ai/konduit/serving/konduit-serving-http/0.3.0/ai/konduit/serving/vertx/protocols/http/KonduitServingHttpJsonMapping.java
|
package ai.konduit.serving.vertx.protocols.http;import ai.konduit.serving.pipeline.api.serde.JsonSubType;
import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping;
import ai.konduit.serving.pipeline.api.serde.JsonSubType;
import java.util.ArrayList;
import java.util.List;
//GENERATED CLASS DO NOT EDIT
public class KonduitServingHttpJsonMapping implements JsonSubTypesMapping { @Override
public List<JsonSubType> getSubTypesMapping() {
List<JsonSubType> l = new ArrayList<>();
return l;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-http/0.3.0/ai/konduit/serving/vertx/protocols
|
java-sources/ai/konduit/serving/konduit-serving-http/0.3.0/ai/konduit/serving/vertx/protocols/http/VertxHttpModuleInfo.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.vertx.protocols.http;
import ai.konduit.serving.annotation.module.ModuleInfo;
@ModuleInfo("konduit-serving-http")
public class VertxHttpModuleInfo {
private VertxHttpModuleInfo(){ }
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-http/0.3.0/ai/konduit/serving/vertx/protocols/http
|
java-sources/ai/konduit/serving/konduit-serving-http/0.3.0/ai/konduit/serving/vertx/protocols/http/api/ErrorResponse.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.vertx.protocols.http.api;
import ai.konduit.serving.pipeline.util.ObjectMappers;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serializable;
/**
* Class to represent the error response details when the inference HTTP API fails at a certain point.
*/
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
@Schema(description = "An object specifying error information for anything that doesn't happen according to plan while " +
"sending inference requests to the konduit serving http server.")
public class ErrorResponse implements Serializable {
@Schema(description = "Error code associated with the error object.")
private HttpApiErrorCode errorCode;
@Schema(description = "The message associated with the error.")
private String errorMessage;
public static ErrorResponse fromJson(String jsonString) {
return ObjectMappers.fromJson(jsonString, ErrorResponse.class);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-http/0.3.0/ai/konduit/serving/vertx/protocols/http
|
java-sources/ai/konduit/serving/konduit-serving-http/0.3.0/ai/konduit/serving/vertx/protocols/http/api/HttpApiErrorCode.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.vertx.protocols.http.api;
import ai.konduit.serving.pipeline.api.pipeline.Pipeline;
import io.swagger.v3.oas.annotations.media.Schema;
@Schema(description = "An enum specifying the type of error occured on the konduit serving http server. " +
"DATA_PARSING_ERROR -> If the incoming data cannot be parsed " +
"MISSING_OR_EMPTY_CONTENT_TYPE_HEADER -> If the requests has no Content-Type header " +
"INVALID_CONTENT_TYPE_HEADER -> If the Content-Type header has an invalid value. Currently it should be either application/json or application/octet-stream " +
"MISSING_OR_EMPTY_ACCEPT_HEADER -> If the request has no Accept header " +
"INVALID_ACCEPT_HEADER -> If the Accept header has an invalid value. Currently it should be either application/json or application/octet-stream " +
"PIPELINE_PROCESSING_ERROR -> If there's an error while processing the data through the pipeline.")
public enum HttpApiErrorCode {
/**
* If the incoming data cannot be parsed
*/
DATA_PARSING_ERROR,
/**
* If the requests has no Content-Type header
*/
MISSING_OR_EMPTY_CONTENT_TYPE_HEADER,
/**
* If the "Content-Type" header has an invalid value. Currently it should be either application/json or application/octet-stream
*/
INVALID_CONTENT_TYPE_HEADER,
/**
* If the request has no "Accept" header
*/
MISSING_OR_EMPTY_ACCEPT_HEADER,
/**
* If the "Accept" header has an invalid value. Currently it should be either application/json or application/octet-stream
*/
INVALID_ACCEPT_HEADER,
/**
* If there's an error while processing the data through the {@link Pipeline}.
*/
PIPELINE_PROCESSING_ERROR
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-http/0.3.0/ai/konduit/serving/vertx/protocols/http
|
java-sources/ai/konduit/serving/konduit-serving-http/0.3.0/ai/konduit/serving/vertx/protocols/http/api/InferenceHttpApi.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.vertx.protocols.http.api;
import ai.konduit.serving.data.nd4j.format.ND4JConverters;
import ai.konduit.serving.pipeline.api.data.Data;
import ai.konduit.serving.pipeline.api.data.Image;
import ai.konduit.serving.pipeline.api.pipeline.PipelineExecutor;
import ai.konduit.serving.pipeline.impl.format.JavaImageFactory;
import ai.konduit.serving.pipeline.registry.ImageFactoryRegistry;
import ai.konduit.serving.pipeline.registry.NDArrayConverterRegistry;
import io.micrometer.core.instrument.Counter;
import io.micrometer.core.instrument.Gauge;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Tag;
import io.vertx.core.buffer.Buffer;
import io.vertx.ext.web.FileUpload;
import io.vertx.ext.web.RoutingContext;
import lombok.AllArgsConstructor;
import lombok.Getter;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.nd4j.shade.guava.base.Strings;
import javax.imageio.ImageIO;
import java.io.File;
import java.nio.charset.StandardCharsets;
import static io.netty.handler.codec.http.HttpHeaderValues.*;
import static io.vertx.core.http.HttpHeaders.ACCEPT;
import static io.vertx.core.http.HttpHeaders.CONTENT_TYPE;
@AllArgsConstructor
@Getter
public class InferenceHttpApi {
private static double requestTime = -1.0;
private static double pipelineTime = -1.0;
protected static Gauge requestTimeGuage = null;
protected static Gauge pipelineTimeGuage = null;
protected static Gauge requestThroughputGuage = null;
protected static Counter requestsHandledCounter = null;
protected final PipelineExecutor pipelineExecutor;
protected static MeterRegistry registry = null;
static {
ImageFactoryRegistry.addFactory(new JavaImageFactory());
NDArrayConverterRegistry.addConverter(new ND4JConverters.Nd4jToSerializedConverter());
NDArrayConverterRegistry.addConverter(new ND4JConverters.SerializedToNd4jArrConverter());
}
public static Data extractData(String contentType, RoutingContext ctx) {
try {
if (contentType.contains(APPLICATION_JSON.toString())) {
return Data.fromJson(ctx.getBodyAsString(StandardCharsets.UTF_8.name()));
} else if (contentType.contains(APPLICATION_OCTET_STREAM.toString())) {
return Data.fromBytes(ctx.getBody().getBytes());
} else if(contentType.contains(MULTIPART_FORM_DATA.toString())) {
StringBuilder stringBuilder = new StringBuilder("{");
ctx.request().formAttributes().forEach(entry -> stringBuilder.append(String.format(",\"%s\":%s", entry.getKey(), entry.getValue().startsWith("[") ? entry.getValue() : String.format("\"%s\"", entry.getValue()))));
stringBuilder.append("}");
Data data = Data.fromJson(stringBuilder.toString().replaceFirst(",",""));
for(FileUpload fileUpload: ctx.fileUploads()) {
if(StringUtils.containsIgnoreCase(fileUpload.contentType(), "image")) {
data.put(fileUpload.name(), Image.create(ImageIO.read(new File(fileUpload.uploadedFileName()))));
} else {
data.put(fileUpload.name(), FileUtils.readFileToString(new File(fileUpload.uploadedFileName()), StandardCharsets.UTF_8));
}
}
return data;
} else {
throw new KonduitServingHttpException(HttpApiErrorCode.INVALID_CONTENT_TYPE_HEADER,
String.format("Invalid Content-Type header %s. Should be one of [application/json, application/octet-stream, multipart/form-data]", contentType));
}
} catch (Exception exception) {
throw new KonduitServingHttpException(HttpApiErrorCode.DATA_PARSING_ERROR,
String.format("%s. More Details: %s",
exception.toString(),
exception.getCause() != null ? exception.getCause().getMessage() : "null"));
}
}
//TODO: add swagger related annotations to this method or update this class for better swagger annotations support
public void predict(RoutingContext ctx) {
double requestTimeStart = (double) System.nanoTime();
String contentType = ctx.request().headers().get(CONTENT_TYPE);
String accept = ctx.request().headers().get(ACCEPT);
if(Strings.isNullOrEmpty(contentType)) {
throw new KonduitServingHttpException(HttpApiErrorCode.MISSING_OR_EMPTY_CONTENT_TYPE_HEADER,
"Content-Type header should not be null. Possible values are: [application/json, application/octet-stream, multipart/form-data]");
}
if(Strings.isNullOrEmpty(accept)) {
throw new KonduitServingHttpException(HttpApiErrorCode.MISSING_OR_EMPTY_ACCEPT_HEADER,
"Accept header should not be null. Possible values are: [application/json, application/octet-stream]");
}
Data input = extractData(contentType, ctx);
Data output;
try {
double pipelineTimeStart = (double) System.nanoTime();
output = pipelineExecutor.exec(input);
double pipelineTimeEnd = (double) System.nanoTime();
pipelineTime = pipelineTimeEnd - pipelineTimeStart;
} catch (Exception exception) {
throw new KonduitServingHttpException(HttpApiErrorCode.PIPELINE_PROCESSING_ERROR, exception);
}
if(accept.contains(APPLICATION_JSON.toString())) {
ctx.response()
.setStatusCode(200)
.putHeader(CONTENT_TYPE, APPLICATION_JSON.toString())
.end(output.toJson(), StandardCharsets.UTF_8.name());
} else if(accept.contains(APPLICATION_OCTET_STREAM.toString())) {
ctx.response()
.setStatusCode(200)
.putHeader(CONTENT_TYPE, APPLICATION_OCTET_STREAM.toString())
.end(Buffer.buffer(output.asBytes()));
} else {
throw new KonduitServingHttpException(HttpApiErrorCode.INVALID_ACCEPT_HEADER,
String.format("Invalid Accept header %s. Should be one of [application/json, application/octet-stream]", accept));
}
if(registry != null) {
requestsHandledCounter.increment();
}
double requestTimeEnd = (double) System.nanoTime();
requestTime = requestTimeEnd - requestTimeStart;
}
public static void setMetrics(MeterRegistry registry, Iterable<Tag> tags) {
if(registry != null) {
InferenceHttpApi.registry = registry;
requestTimeGuage = Gauge.builder("request.time.ms", () -> requestTime / 10e6).tags(tags).register(registry);
pipelineTimeGuage = Gauge.builder("pipeline.time.ms", () -> pipelineTime / 10e6).tags(tags).register(registry);
requestThroughputGuage = Gauge.builder("request.throughput.reqps", () -> 1 / requestTime * 10e9).tags(tags).register(registry);
requestsHandledCounter = registry.counter("requests.handled", tags);
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-http/0.3.0/ai/konduit/serving/vertx/protocols/http
|
java-sources/ai/konduit/serving/konduit-serving-http/0.3.0/ai/konduit/serving/vertx/protocols/http/api/KonduitServingHttpException.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.vertx.protocols.http.api;
/**
* This class represents the exceptions which occurs when the inference HTTP API fails at some point
*/
public class KonduitServingHttpException extends IllegalStateException {
/**
* The error response object associated with the excpetion.
*/
private final ErrorResponse errorResponse;
public KonduitServingHttpException(HttpApiErrorCode errorCode, Throwable throwable) {
super(throwable);
errorResponse = ErrorResponse.builder().errorCode(errorCode)
.errorMessage(throwable.getMessage() != null ? throwable.getMessage() : throwable.toString())
.build();
}
public KonduitServingHttpException(HttpApiErrorCode errorCode, String errorMessage) {
super(String.format("Error Code: %s%n Error Message: %s", errorCode.name(), errorMessage));
errorResponse = ErrorResponse.builder().errorCode(errorCode).errorMessage(errorMessage).build();
}
/**
* Get the error response object.
*/
public ErrorResponse getErrorResponse() {
return this.errorResponse;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-http/0.3.0/ai/konduit/serving/vertx/protocols/http
|
java-sources/ai/konduit/serving/konduit-serving-http/0.3.0/ai/konduit/serving/vertx/protocols/http/verticle/InferenceVerticleHttp.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.vertx.protocols.http.verticle;
import ai.konduit.serving.endpoint.Endpoint;
import ai.konduit.serving.endpoint.HttpEndpoints;
import ai.konduit.serving.pipeline.api.pipeline.Pipeline;
import ai.konduit.serving.pipeline.api.pipeline.PipelineExecutor;
import ai.konduit.serving.pipeline.impl.metrics.MetricsProvider;
import ai.konduit.serving.pipeline.registry.MicrometerRegistry;
import ai.konduit.serving.pipeline.settings.DirectoryFetcher;
import ai.konduit.serving.pipeline.settings.constants.EnvironmentConstants;
import ai.konduit.serving.pipeline.util.ObjectMappers;
import ai.konduit.serving.vertx.protocols.http.api.ErrorResponse;
import ai.konduit.serving.vertx.protocols.http.api.HttpApiErrorCode;
import ai.konduit.serving.vertx.protocols.http.api.InferenceHttpApi;
import ai.konduit.serving.vertx.protocols.http.api.KonduitServingHttpException;
import ai.konduit.serving.vertx.verticle.InferenceVerticle;
import com.google.common.base.Strings;
import io.micrometer.core.instrument.Counter;
import io.micrometer.core.instrument.ImmutableTag;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Tag;
import io.micrometer.core.instrument.binder.jvm.JvmMemoryMetrics;
import io.micrometer.core.instrument.binder.system.ProcessorMetrics;
import io.vertx.core.Handler;
import io.vertx.core.Promise;
import io.vertx.core.http.HttpMethod;
import io.vertx.core.http.HttpServerOptions;
import io.vertx.core.http.HttpVersion;
import io.vertx.core.impl.ContextInternal;
import io.vertx.core.json.JsonObject;
import io.vertx.core.net.PemKeyCertOptions;
import io.vertx.core.net.SelfSignedCertificate;
import io.vertx.ext.web.Route;
import io.vertx.ext.web.Router;
import io.vertx.ext.web.RoutingContext;
import io.vertx.ext.web.handler.BodyHandler;
import io.vertx.ext.web.handler.StaticHandler;
import io.vertx.micrometer.MicrometerMetricsOptions;
import io.vertx.micrometer.VertxPrometheusOptions;
import io.vertx.micrometer.backends.BackendRegistries;
import lombok.extern.slf4j.Slf4j;
import java.io.File;
import java.lang.reflect.InvocationTargetException;
import java.util.*;
import static ai.konduit.serving.pipeline.settings.KonduitSettings.getServingId;
import static io.netty.handler.codec.http.HttpHeaderValues.*;
import static io.vertx.core.http.HttpHeaders.CONTENT_TYPE;
@Slf4j
public class InferenceVerticleHttp extends InferenceVerticle {
@Override
public void start(Promise<Void> startPromise) {
vertx.executeBlocking(handler -> {
try {
initialize();
handler.complete();
} catch (Exception exception) {
handler.fail(exception);
startPromise.fail(exception);
}
}, resultHandler -> {
if (resultHandler.failed()) {
if (resultHandler.cause() != null)
startPromise.fail(resultHandler.cause());
else {
startPromise.fail("Failed to start. Unknown cause.");
}
} else {
int port;
String portEnvValue = System.getenv(EnvironmentConstants.KONDUIT_SERVING_PORT);
if (portEnvValue != null) {
try {
port = Integer.parseInt(portEnvValue);
} catch (NumberFormatException exception) {
log.error("Environment variable \"{}={}\" isn't a valid port number.",
EnvironmentConstants.KONDUIT_SERVING_PORT, portEnvValue);
startPromise.fail(exception);
return;
}
} else {
port = inferenceConfiguration.port();
}
if (port < 0 || port > 0xFFFF) {
startPromise.fail(new Exception("Valid port range is 0 <= port <= 65535. The given port was " + port));
return;
}
vertx.createHttpServer(createOptions(inferenceConfiguration.port(),
inferenceConfiguration.useSsl(),
inferenceConfiguration.sslKeyPath(),
inferenceConfiguration.sslCertificatePath()))
.requestHandler(createRouter())
.exceptionHandler(throwable -> log.error("Error occurred during http request.", throwable))
.listen(port, inferenceConfiguration.host(), handler -> {
if (handler.failed()) {
startPromise.fail(handler.cause());
} else {
int actualPort = handler.result().actualPort();
inferenceConfiguration.port(actualPort);
try {
((ContextInternal) context).getDeployment()
.deploymentOptions()
.setConfig(new JsonObject(inferenceConfiguration.toJson()));
long pid = getPid();
saveInspectionDataIfRequired(pid);
log.info("Inference HTTP server is listening on host: '{}'", inferenceConfiguration.host());
log.info("Inference HTTP server started on port {} with {} pipeline steps", actualPort, pipeline.size());
startPromise.complete();
} catch (Throwable throwable) {
startPromise.fail(throwable);
}
}
});
}
});
}
private HttpServerOptions createOptions(int port, boolean useSsl, String sslKeyPath, String sslCertificatePath) {
HttpServerOptions httpServerOptions = new HttpServerOptions()
.setPort(port)
.setHost("0.0.0.0")
.setSslHandshakeTimeout(0)
.setCompressionSupported(true)
.setTcpKeepAlive(true)
.setTcpNoDelay(true)
.setAlpnVersions(Arrays.asList(HttpVersion.HTTP_1_0, HttpVersion.HTTP_1_1, HttpVersion.HTTP_2))
.setUseAlpn(false)
.setSsl(useSsl);
if (useSsl) {
if (Strings.isNullOrEmpty(sslKeyPath) || Strings.isNullOrEmpty(sslCertificatePath)) {
if (Strings.isNullOrEmpty(sslKeyPath)) {
log.warn("No pem key file specified for SSL.");
}
if (Strings.isNullOrEmpty(sslCertificatePath)) {
log.warn("No pem certificate file specified for SSL.");
}
log.info("Using an auto generated self signed pem key and certificate with SSL.");
httpServerOptions.setKeyCertOptions(SelfSignedCertificate.create().keyCertOptions());
} else {
sslKeyPath = new File(sslKeyPath).getAbsolutePath();
sslCertificatePath = new File(sslCertificatePath).getAbsolutePath();
log.info("Using SSL with PEM Key: {} and certificate {}.", sslKeyPath, sslCertificatePath);
httpServerOptions.setPemKeyCertOptions(new PemKeyCertOptions().setKeyPath(sslKeyPath).setCertPath(sslCertificatePath));
}
}
return httpServerOptions;
}
public Router createRouter() {
Router inferenceRouter = Router.router(vertx);
ServiceLoader<MetricsProvider> sl = ServiceLoader.load(MetricsProvider.class);
Iterator<MetricsProvider> iterator = sl.iterator();
MetricsProvider metricsProvider = null;
if (iterator.hasNext()) {
metricsProvider = iterator.next();
}
Object endpoint = metricsProvider == null ? null : metricsProvider.getEndpoint();
MeterRegistry registry = null;
Iterable<Tag> tags = Collections.singletonList(new ImmutableTag("servingId", getServingId()));
if (endpoint != null) {
registry = MicrometerRegistry.getRegistry();
log.info("MetricsProvider implementation detected, adding endpoint /metrics");
MicrometerMetricsOptions micrometerMetricsOptions = new MicrometerMetricsOptions()
.setMicrometerRegistry(registry)
.setPrometheusOptions(new VertxPrometheusOptions().setEnabled(true));
BackendRegistries.setupBackend(micrometerMetricsOptions);
new JvmMemoryMetrics(tags).bindTo(registry);
new ProcessorMetrics(tags).bindTo(registry);
// For scraping GPU metrics
try {
Class<?> gpuMetricsClass = Class.forName("ai.konduit.serving.gpu.GpuMetrics");
Object instance = gpuMetricsClass.getConstructor(Iterable.class).newInstance(tags);
gpuMetricsClass.getMethod("bindTo", MeterRegistry.class).invoke(instance, registry);
} catch (Exception exception) {
log.info("No GPU binaries found. Selecting and scraping only CPU metrics.");
log.debug("Error while finding GPU binaries (ignore this if not running on a system with GPUs): ", exception);
}
Counter serverUpTimeCounter = registry.counter("server.up.time", tags);
vertx.setPeriodic(5000, l -> serverUpTimeCounter.increment(5.0));
inferenceRouter.get("/metrics").handler((Handler<RoutingContext>) endpoint)
.failureHandler(failureHandler -> {
if (failureHandler.failure() != null) {
log.error("Failed to scrape metrics", failureHandler.failure());
}
failureHandler.response()
.setStatusCode(500)
.end(failureHandler.failure().toString());
});
}
inferenceRouter.post().handler(BodyHandler.create()
.setUploadsDirectory(DirectoryFetcher.getFileUploadsDir().getAbsolutePath())
.setDeleteUploadedFilesOnEnd(true)
.setMergeFormAttributes(true))
.failureHandler(failureHandler -> {
Throwable throwable = failureHandler.failure();
int statusCode = failureHandler.statusCode();
if (statusCode == 404) {
log.warn("404 at route {}" + failureHandler.request().path());
} else if (failureHandler.failed()) {
if (throwable != null) {
log.error("Request failed with cause ", throwable);
} else {
log.error("Request failed with unknown cause.");
}
}
if (throwable instanceof KonduitServingHttpException) {
sendErrorResponse(failureHandler, ((KonduitServingHttpException) throwable).getErrorResponse());
} else {
failureHandler.response()
.setStatusCode(500)
.end(throwable != null ? throwable.toString() : "Internal Server Exception");
}
});
InferenceHttpApi.setMetrics(registry, tags);
InferenceHttpApi inferenceHttpApi = new InferenceHttpApi(pipelineExecutor);
inferenceRouter.post("/predict")
.consumes(APPLICATION_JSON.toString())
.consumes(APPLICATION_OCTET_STREAM.toString())
.consumes(MULTIPART_FORM_DATA.toString())
.produces(APPLICATION_JSON.toString())
.produces(APPLICATION_OCTET_STREAM.toString())
.handler(inferenceHttpApi::predict);
File staticContentRoot = new File(inferenceConfiguration.staticContentRoot());
if (staticContentRoot.exists() && staticContentRoot.isDirectory()) {
log.info("Serving static content from {}, on URL: {} with index page: {}",
staticContentRoot.getAbsolutePath(),
inferenceConfiguration.staticContentUrl(),
inferenceConfiguration.staticContentIndexPage());
inferenceRouter
.route(inferenceConfiguration.staticContentUrl())
.method(HttpMethod.GET)
.produces("application/html")
.handler(handler -> {
String rootUrl = inferenceConfiguration.staticContentUrl().replaceFirst("\\*", "").replaceFirst("/", "");
String absoluteFilePath = null;
if(handler.request().path().equals(rootUrl + "/")) {
absoluteFilePath = new File(String.format("%s%s", rootUrl, inferenceConfiguration.staticContentIndexPage())).getAbsolutePath();
} else {
String fileSubPath = handler.request().path().replaceFirst(rootUrl, "");
absoluteFilePath = new File(String.format("%s%s", inferenceConfiguration.staticContentRoot(), fileSubPath)).getAbsolutePath();
}
log.info("Serving file: {}", absoluteFilePath);
handler.response().sendFile(absoluteFilePath);
})
.failureHandler(failureHandler -> {
log.error("Issues while serving static content...", failureHandler.failure());
failureHandler.response().setStatusCode(500).end(String.format("<p>%s</p>", failureHandler.failure().getMessage()));
});
}
//Custom endpoints:
if (inferenceConfiguration.customEndpoints() != null && !inferenceConfiguration.customEndpoints().isEmpty()) {
addCustomEndpoints(inferenceHttpApi, inferenceRouter);
}
return inferenceRouter;
}
private void sendErrorResponse(RoutingContext ctx, ErrorResponse errorResponse) {
sendErrorResponse(ctx, errorResponse.getErrorCode(), errorResponse.getErrorMessage());
}
private void sendErrorResponse(RoutingContext ctx, HttpApiErrorCode errorCode, String errorMessage) {
ctx.response()
.setStatusCode(500)
.putHeader(CONTENT_TYPE, APPLICATION_JSON.toString())
.end(ObjectMappers.toJson(ErrorResponse.builder()
.errorCode(errorCode)
.errorMessage(errorMessage)
.build()));
}
private void addCustomEndpoints(InferenceHttpApi inferenceHttpApi, Router inferenceRouter) {
List<String> e = inferenceConfiguration.customEndpoints();
PipelineExecutor pe = inferenceHttpApi.getPipelineExecutor();
Pipeline p = pe.getPipeline();
for (String s : e) {
//TODO this won't work for OSGi!
Class<?> c;
try {
c = Class.forName(s);
} catch (ClassNotFoundException ex) {
log.error("Error loading custom endpoint for class {}: class not found. Skipping this endpoint", s, ex);
continue;
}
if (!HttpEndpoints.class.isAssignableFrom(c)) {
log.error("Error loading custom endpoint for class {}: class does not implement ai.konduit.serving.endpoint.HttpEndpoint. Skipping this endpoint", s);
continue;
}
HttpEndpoints h;
try {
h = (HttpEndpoints) c.getConstructor().newInstance();
} catch (NoSuchMethodException | SecurityException ex) {
log.error("Error loading custom endpoint for class {}: no zero-arg contsructor was found/accessible. Skipping this endpoint", s, ex);
continue;
} catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) {
log.error("Error loading custom endpoint for class {}: error creating new instance of class. Skipping this endpoint", s, ex);
continue;
}
List<Endpoint> endpoints;
try {
endpoints = h.endpoints(p, pe);
} catch (Throwable t) {
log.error("Error loading custom endpoint for class {}: error getting endpoints via HttpEndpoint.endpoints(Pipeline, PipelineExecutor). Skipping this endpoint", s, t);
continue;
}
if (endpoints != null && !endpoints.isEmpty()) { //May be null/empty if endpoint is pipeline-specific, and not applicable for this pipeline
for (Endpoint ep : endpoints) {
try {
String path = ep.path();
if (!path.startsWith("/"))
path = "/" + path;
Route r = inferenceRouter.route(ep.type(), path);
if (ep.consumes() != null && !ep.consumes().isEmpty()) {
for (String consume : ep.consumes()) {
r.consumes(consume);
}
}
if (ep.produces() != null && !ep.produces().isEmpty()) {
for (String produces : ep.produces()) {
r.produces(produces);
}
}
r.handler(ep.handler());
} catch (Throwable t) {
log.error("Error loading custom endpoint for class {}: error creating route in Vert.x. Skipping this endpoint: {}", s, ep, t);
}
}
}
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/ImageModuleInfo.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.data.image;
import ai.konduit.serving.annotation.module.ModuleInfo;
@ModuleInfo("konduit-serving-image")
public class ImageModuleInfo {
private ImageModuleInfo(){}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/KonduitServingImageJsonMapping.java
|
package ai.konduit.serving.data.image;import ai.konduit.serving.pipeline.api.serde.JsonSubType;
import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping;
import ai.konduit.serving.pipeline.api.serde.JsonSubType;
import java.util.ArrayList;
import java.util.List;
//GENERATED CLASS DO NOT EDIT
public class KonduitServingImageJsonMapping implements JsonSubTypesMapping { @Override
public List<JsonSubType> getSubTypesMapping() {
List<JsonSubType> l = new ArrayList<>();
return l;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/convert/ImageToNDArray.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.data.image.convert;
import ai.konduit.serving.data.image.convert.config.AspectRatioHandling;
import ai.konduit.serving.data.image.convert.config.NDChannelLayout;
import ai.konduit.serving.data.image.convert.config.NDFormat;
import ai.konduit.serving.data.image.util.ImageUtils;
import ai.konduit.serving.pipeline.api.data.BoundingBox;
import ai.konduit.serving.pipeline.api.data.Image;
import ai.konduit.serving.pipeline.api.data.NDArray;
import ai.konduit.serving.pipeline.api.data.NDArrayType;
import ai.konduit.serving.pipeline.impl.data.ndarray.SerializedNDArray;
import com.google.common.primitives.Longs;
import org.bytedeco.javacpp.Loader;
import org.bytedeco.javacpp.indexer.*;
import org.bytedeco.opencv.opencv_core.Mat;
import org.bytedeco.opencv.opencv_core.Rect;
import org.bytedeco.opencv.opencv_core.Size;
import org.nd4j.common.base.Preconditions;
import org.nd4j.common.primitives.Pair;
import java.nio.*;
/**
* Utility method for converting Image objects to NDArrays.
* See {@link ImageToNDArrayConfig} for more details
*
* @author Alex Black
*/
public class ImageToNDArray {
private ImageToNDArray() {
}
/**
* Convert the provided image to a NDArray, according to the specified configuration<br>
* See {@link ImageToNDArrayConfig} for more details
*
* @param image Image to convert
* @param config Configuration to use
* @return The Image converted to an NDArray
*/
public static NDArray convert(Image image, ImageToNDArrayConfig config) {
return convert(image, config, false).getFirst();
}
public static Pair<NDArray,BoundingBox> convertWithMetadata(Image image, ImageToNDArrayConfig config) {
return convert(image, config, true);
}
public static BoundingBox getCropRegion(Image image, ImageToNDArrayConfig config){
int imgH = image.height();
int imgW = image.width();
return getCropRegion(imgW, imgH, config);
}
public static BoundingBox getCropRegion(int imgW, int imgH, ImageToNDArrayConfig config){
Integer outH = config.height();
Integer outW = config.width();
if (outH == null)
outH = imgH;
if (outW == null)
outW = imgW;
//Resize if necessary
boolean correctSize = outH == imgH && outW == imgW;
if (!correctSize) {
AspectRatioHandling h = config.aspectRatioHandling();
if (h == AspectRatioHandling.CENTER_CROP) {
return centerCropBB(imgH, imgW, outH, outW);
} else if (h == AspectRatioHandling.PAD) {
throw new UnsupportedOperationException("Not yet implemented");
} else if (h == AspectRatioHandling.STRETCH) {
return BoundingBox.createXY(0.0, 1.0, 0.0, 1.0);
} else {
throw new UnsupportedOperationException("Not supported image conversion: " + h);
}
} else {
return BoundingBox.createXY(0.0, 1.0, 0.0, 1.0);
}
}
public static long[] getOutputShape(ImageToNDArrayConfig config){
int rank = config.includeMinibatchDim() ? 4 : 3;
long[] out = new long[rank];
out[0] = 1;
if(config.format() == NDFormat.CHANNELS_FIRST){
out[1] = config.channelLayout().numChannels();
out[2] = config.height();
out[3] = config.width();
} else {
out[1] = config.height();
out[2] = config.width();
out[3] = config.channelLayout().numChannels();
}
return out;
}
protected static Pair<NDArray,BoundingBox> convert(Image image, ImageToNDArrayConfig config, boolean withMeta) {
BoundingBox bbMeta = null;
Integer outH = config.height();
Integer outW = config.width();
if (outH == null)
outH = image.height();
if (outW == null)
outW = image.width();
//Resize if necessary
boolean correctSize = outH == image.height() && outW == image.width();
Mat m = image.getAs(Mat.class);
if (!correctSize) {
AspectRatioHandling h = config.aspectRatioHandling();
if (h == AspectRatioHandling.CENTER_CROP) {
Pair<Mat,BoundingBox> p = centerCrop(m, outH, outW, withMeta); //new Mat(m, crop);;
Mat cropped = p.getFirst();
if (cropped.cols() == outW && cropped.rows() == outH) {
m = cropped;
} else {
Mat resized = new Mat();
org.bytedeco.opencv.global.opencv_imgproc.resize(cropped, resized, new Size(outW, outH));
m = resized;
}
if(withMeta){
bbMeta = p.getSecond();
}
} else if (h == AspectRatioHandling.PAD) {
throw new UnsupportedOperationException("Not yet implemented");
} else if (h == AspectRatioHandling.STRETCH) {
Mat resized = new Mat();
org.bytedeco.opencv.global.opencv_imgproc.resize(m, resized, new Size(outW, outH));
m = resized;
if(withMeta){
bbMeta = BoundingBox.createXY(0.0, 1.0, 0.0, 1.0);
}
} else {
throw new UnsupportedOperationException("Not supported image conversion: " + h);
}
} else {
if(withMeta){
bbMeta = BoundingBox.createXY(0.0, 1.0, 0.0, 1.0);
}
}
m = convertColor(m, config);
Preconditions.checkState(m.channels() <= 3,"Channels must not be greater than 3!");
ByteBuffer bb = toFloatBuffer(m, config);
if (config.dataType() != NDArrayType.FLOAT) //TODO there are likely more efficient ways than this!
bb = ImageUtils.cast(bb, NDArrayType.FLOAT, config.dataType());
int ch = config.channelLayout().numChannels();
long[] shape;
if (config.format() == NDFormat.CHANNELS_FIRST) {
shape = new long[]{ch, outH, outW};
} else {
shape = new long[]{outH, outW, ch};
}
/* INDArray arrCreate = Nd4j.create(ND4JUtil.typeNDArrayTypeToNd4j(config.dataType()),shape, Nd4j.getStrides(shape),'c');
*//**
* Note this logic assumes 3d arrays.
* Postpone reshape till after the data is filled in
* since the new shape is just the minibatch size.
*//*
ImageUtils.fillNDArray(m,true,arrCreate);
ByteBuffer bb = arrCreate.data().asNio();*/
if(config.includeMinibatchDim()) {
shape = Longs.concat(new long[]{1},shape);
}
SerializedNDArray arr = new SerializedNDArray(config.dataType(), shape, bb);
return new Pair<>(NDArray.create(arr), bbMeta);
}
public static Pair<Mat,BoundingBox> centerCrop(Mat image, int outH, int outW, boolean withBB) {
int imgH = image.rows();
int imgW = image.cols();
double aspectIn = image.cols() / (double)image.rows();
double aspectOut = outW / (double)outH;
int croppedW;
int croppedH;
int x0;
int x1;
int y0;
int y1;
if(aspectIn == aspectOut){
//No crop necessary
return new Pair<>(image, BoundingBox.createXY(0.0, 1.0, 0.0, 1.0));
} else if(aspectIn > aspectOut){
//Need to crop from width dimension
croppedW = (int)(aspectOut * image.rows());
croppedH = imgH;
int delta = imgW - croppedW;
x0 = delta / 2;
y0 = 0;
} else {
//Need to crop from the height dimension
croppedW = imgW;
croppedH = (int)(image.cols() / aspectOut);
int delta = imgH - croppedH;
x0 = 0;
y0 = delta / 2;
}
Rect crop = new Rect(x0, y0, croppedW, croppedH);
BoundingBox bb = null;
if(withBB){
bb = centerCropBB(imgH, imgW, outH, outW);
}
Mat out = image.apply(crop);
return new Pair<>(out, bb);
}
protected static BoundingBox centerCropBB(int imgH, int imgW, int outH, int outW){
double aspectIn = imgW / (double)imgH;
double aspectOut = outW / (double)outH;
int croppedW;
int croppedH;
int x0;
int x1;
int y0;
int y1;
if(aspectIn == aspectOut){
//No crop necessary
return BoundingBox.createXY(0.0, 1.0, 0.0, 1.0);
} else if(aspectIn > aspectOut){
//Need to crop from width dimension
croppedW = (int)(aspectOut * imgH);
croppedH = imgH;
int delta = imgW - croppedW;
x0 = delta / 2;
x1 = imgW - (delta/2);
y0 = 0;
y1 = imgH;
} else {
//Need to crop from the height dimension
croppedW = imgW;
croppedH = (int)(imgW / aspectOut);
int delta = imgH - croppedH;
x0 = 0;
x1 = imgW;
y0 = delta / 2;
y1 = imgH - (delta/2);
}
double dx1 = x0 / (double)imgW;
double dx2 = x1 / (double)imgW;
double dy1 = y0 / (double)imgH;
double dy2 = y1 / (double)imgH;
return BoundingBox.createXY(dx1, dx2, dy1, dy2);
}
protected static Mat convertColor(Mat m, ImageToNDArrayConfig config) {
int ch = config.channelLayout().numChannels();
if (ch != 3 && ch != 1) {
throw new UnsupportedOperationException("Not yet implemented: Channels != 3 support");
}
//TODO - Actually convert color!
return m;
}
protected static ByteBuffer toFloatBuffer(Mat m, ImageToNDArrayConfig config) {
Preconditions.checkState(config.channelLayout() == NDChannelLayout.RGB ||
config.channelLayout() == NDChannelLayout.BGR ||
config.channelLayout() == NDChannelLayout.GRAYSCALE,
"Only GRAYSCALE, RGB and BGR conversion implement so far");
Preconditions.checkState(config.dataType() != NDArrayType.BOOL && config.dataType() != NDArrayType.UTF8,
"%s datatype is not supported for ImageToNDArray", config.dataType());
boolean direct = !Loader.getPlatform().startsWith("android");
//By default, Mat stores values in channels first format - CHW
int h = m.rows();
int w = m.cols();
int ch = m.channels();
int lengthElements = h * w * ch;
int lengthBytes = lengthElements * 4;
ByteBuffer bb = direct ? ByteBuffer.allocateDirect(lengthBytes).order(ByteOrder.LITTLE_ENDIAN) : ByteBuffer.allocate(lengthBytes).order(ByteOrder.LITTLE_ENDIAN);
FloatBuffer fb = bb.asFloatBuffer();
boolean rgb = config.channelLayout() == NDChannelLayout.RGB;
ImageUtils.FloatNormalizer f = ImageUtils.getFloatNormalizer(config, rgb);
Indexer imgIdx = m.createIndexer(direct);
if (imgIdx instanceof UByteIndexer) {
UByteIndexer ubIdx = (UByteIndexer) imgIdx;
if (config.format() == NDFormat.CHANNELS_FIRST) {
if (rgb) {
//Mat is HWC in BGR, we want (N)CHW in RGB format
int[] rgbToBgr = {2, 1, 0};
for (int c = 0; c < rgbToBgr.length; c++) {
for (int y = 0; y < h; y++) {
for (int x = 0; x < w; x++) {
//int idxBGR = (ch * w * y) + (ch * x) + rgbToBgr[c];
int v = ubIdx.get(y,x,rgbToBgr[c]);
float normalized = f.normalize(v, c);
fb.put(normalized);
}
}
}
} else {
UByteRawIndexer uByteRawIndexer = (UByteRawIndexer) ubIdx;
//Mat is HWC in BGR, we want (N)CHW in BGR format
for (int c = 0; c < 3; c++) {
for (int y = 0; y < h; y++) {
for (int x = 0; x < w; x++) {
int idxBGR = (ch * w * y) + (ch * x) + c;
int v = uByteRawIndexer.getRaw(idxBGR);
float normalized = f.normalize(v, c);
fb.put(normalized);
}
}
}
}
} else {
if (rgb) {
//Mat is HWC in BGR, we want (N)HWC in RGB format
fb.position(0);
UByteRawIndexer uByteRawIndexer = (UByteRawIndexer) ubIdx;
for (int i = 0; i < lengthElements; i += 3) {
int b = uByteRawIndexer.getRaw(i);
int g = uByteRawIndexer.getRaw(i + 1);
int r = uByteRawIndexer.getRaw(i + 2);
fb.put(f.normalize(r, 0));
fb.put(f.normalize(g, 1));
fb.put(f.normalize(b, 2));
}
} else {
//Mat is HWC in BGR, we want (N)HWC in BGR format
UByteRawIndexer uByteRawIndexer = (UByteRawIndexer) ubIdx;
for (int i = 0; i < lengthElements; i++) {
float normalized = f.normalize(uByteRawIndexer.getRaw(i), i % 3);
fb.put(normalized);
}
}
}
} else {
throw new RuntimeException("Not yet implemented: " + imgIdx.getClass());
}
return bb;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/data/FrameImage.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.data.image.data;
import ai.konduit.serving.pipeline.impl.data.image.base.BaseImage;
import org.bytedeco.javacv.Frame;
public class FrameImage extends BaseImage<Frame> {
public FrameImage(Frame image) {
super(image);
}
@Override
public int height() {
return image.imageHeight;
}
@Override
public int width() {
return image.imageWidth;
}
@Override
public int channels() {
return image.imageChannels;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/data/MatImage.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.data.image.data;
import ai.konduit.serving.pipeline.impl.data.image.base.BaseImage;
import org.bytedeco.opencv.opencv_core.Mat;
public class MatImage extends BaseImage<Mat> {
public MatImage(Mat image) {
super(image);
}
@Override
public int height() {
return image.rows();
}
@Override
public int width() {
return image.cols();
}
@Override
public int channels() {
return image.channels();
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/format/JavaCVImageConverters.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.data.image.format;
import ai.konduit.serving.pipeline.api.data.Image;
import ai.konduit.serving.pipeline.impl.data.image.Bmp;
import ai.konduit.serving.pipeline.impl.data.image.Jpeg;
import ai.konduit.serving.pipeline.impl.data.image.Png;
import ai.konduit.serving.pipeline.impl.data.image.base.BaseImageFile;
import ai.konduit.serving.pipeline.impl.format.JavaImageConverters;
import org.bytedeco.javacpp.BytePointer;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.OpenCVFrameConverter;
import org.bytedeco.opencv.global.opencv_core;
import org.bytedeco.opencv.global.opencv_imgcodecs;
import org.bytedeco.opencv.global.opencv_imgproc;
import org.bytedeco.opencv.opencv_core.CvArr;
import org.bytedeco.opencv.opencv_core.IplImage;
import org.bytedeco.opencv.opencv_core.Mat;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import static org.bytedeco.opencv.global.opencv_imgproc.*;
import static org.bytedeco.opencv.helper.opencv_imgcodecs.cvLoadImage;
public class JavaCVImageConverters {
private JavaCVImageConverters(){ }
public static class FrameToMatConverter extends JavaImageConverters.BaseConverter {
protected OpenCVFrameConverter.ToMat converter = new OpenCVFrameConverter.ToMat();
public FrameToMatConverter() {
super(Frame.class, Mat.class);
}
@Override
protected <T> T doConversion(Image from, Class<T> to) {
Frame f = (Frame) from.get();
Mat m = converter.convert(f);
return (T)m;
}
}
public static abstract class OpenCVMatToAnyConverter extends JavaImageConverters.BaseConverter {
final String ext;
public OpenCVMatToAnyConverter(Class<?> other, String ext) {
super(Mat.class, other);
this.ext = ext;
}
@Override
protected <T> T doConversion(Image from, Class<T> to) {
Mat m = (Mat) from.get();
BytePointer out = new BytePointer();
org.bytedeco.opencv.global.opencv_imgcodecs.imencode(ext, m, out);
out.position(0);
return fromByteBuffer(out.asByteBuffer());
}
protected abstract <T> T fromByteBuffer(ByteBuffer byteBuffer);
}
public static class OpenCVAnyToMatConverter extends JavaImageConverters.BaseConverter {
public OpenCVAnyToMatConverter(Class<?> other) {
super(other, Mat.class);
}
@Override
protected <T> T doConversion(Image from, Class<T> to) {
BaseImageFile p = (BaseImageFile) from.get();
ByteBuffer fileBytes = p.getFileBytes();
Buffer bufferCast = (Buffer) fileBytes;
bufferCast.position(0);
Mat m = new Mat(new BytePointer(fileBytes), false);
Mat out = org.bytedeco.opencv.global.opencv_imgcodecs.imdecode(m, opencv_imgcodecs.IMREAD_UNCHANGED);
Mat ret = out.clone();
//strip alpha channel if exists
if(out.channels() > 3) {
cvtColor(out,ret,CV_BGRA2BGR);
}
return (T) ret;
}
}
public static class MatToFrameConverter extends JavaImageConverters.BaseConverter {
protected OpenCVFrameConverter.ToMat converter = new OpenCVFrameConverter.ToMat();
public MatToFrameConverter() {
super(Mat.class, Frame.class);
}
@Override
protected <T> T doConversion(Image from, Class<T> to) {
Mat m = (Mat) from.get();
Frame f = converter.convert(m);
return (T)f;
}
}
public static class MatToPng extends OpenCVMatToAnyConverter {
public MatToPng() {
super(Png.class, ".png");
}
@Override
protected <T> T fromByteBuffer(ByteBuffer byteBuffer) {
return (T) new Png(byteBuffer);
}
}
public static class MatToJpeg extends OpenCVMatToAnyConverter {
public MatToJpeg() {
super(Jpeg.class, ".jpg");
}
@Override
protected <T> T fromByteBuffer(ByteBuffer byteBuffer) {
return (T) new Jpeg(byteBuffer);
}
}
public static class MatToBmp extends OpenCVMatToAnyConverter {
public MatToBmp() {
super(Bmp.class, ".bmp");
}
@Override
protected <T> T fromByteBuffer(ByteBuffer byteBuffer) {
return (T) new Bmp(byteBuffer);
}
}
public static class PngToMat extends OpenCVAnyToMatConverter { public PngToMat() {
super(Png.class);
}}
public static class JpegToMat extends OpenCVAnyToMatConverter { public JpegToMat() {
super(Jpeg.class);
}}
public static class BmpToMat extends OpenCVAnyToMatConverter { public BmpToMat() {
super(Bmp.class);
}}
public static class FrameToPng extends JavaImageConverters.BaseConverter {
public FrameToPng() {
super(Frame.class, Png.class);
}
@Override
protected <T> T doConversion(Image from, Class<T> to) {
//Frame -> Mat -> Png. Is there a more efficient way?
Frame f = (Frame) from.get();
Mat m = Image.create(f).getAs(Mat.class);
return (T) Image.create(m).getAs(Png.class);
}
}
public static class PngToFrame extends JavaImageConverters.BaseConverter {
public PngToFrame() {
super(Png.class, Frame.class);
}
@Override
protected <T> T doConversion(Image from, Class<T> to) {
//Png -> Mat -> Frame. Is there a more efficient way?
Png p = (Png)from.get();
Mat m = Image.create(p).getAs(Mat.class);
return (T) Image.create(m).getAs(Frame.class);
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/format/JavaCVImageFactory.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.data.image.format;
import ai.konduit.serving.data.image.data.FrameImage;
import ai.konduit.serving.data.image.data.MatImage;
import ai.konduit.serving.pipeline.api.data.Image;
import ai.konduit.serving.pipeline.api.format.ImageFactory;
import org.bytedeco.javacv.Frame;
import org.bytedeco.opencv.opencv_core.Mat;
import org.nd4j.common.base.Preconditions;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
public class JavaCVImageFactory implements ImageFactory {
private static Set<Class<?>> SUPPORTED_TYPES = new HashSet<>();
static {
SUPPORTED_TYPES.add(Frame.class);
SUPPORTED_TYPES.add(Mat.class);
}
@Override
public Set<Class<?>> supportedTypes() {
return Collections.unmodifiableSet(SUPPORTED_TYPES);
}
@Override
public boolean canCreateFrom(Object o) {
return SUPPORTED_TYPES.contains(o.getClass());
}
@Override
public Image create(Object o) {
Preconditions.checkState(canCreateFrom(o), "Unable to create Image from object of type %s", o.getClass());
if(o instanceof Frame){
return new FrameImage((Frame) o);
} else if(o instanceof Mat){
return new MatImage((Mat) o);
} else {
throw new IllegalStateException("Unable to create image from format " + o.getClass());
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/bb
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/bb/draw/DrawBoundingBoxRunner.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.data.image.step.bb.draw;
import ai.konduit.serving.annotation.runner.CanRun;
import ai.konduit.serving.data.image.convert.ImageToNDArray;
import ai.konduit.serving.data.image.convert.ImageToNDArrayConfig;
import ai.konduit.serving.data.image.util.ColorConstants;
import ai.konduit.serving.data.image.util.ColorUtil;
import ai.konduit.serving.pipeline.api.context.Context;
import ai.konduit.serving.pipeline.api.data.BoundingBox;
import ai.konduit.serving.pipeline.api.data.Data;
import ai.konduit.serving.pipeline.api.data.Image;
import ai.konduit.serving.pipeline.api.data.ValueType;
import ai.konduit.serving.pipeline.api.step.PipelineStep;
import ai.konduit.serving.pipeline.api.step.PipelineStepRunner;
import ai.konduit.serving.pipeline.util.DataUtils;
import lombok.NonNull;
import org.bytedeco.opencv.opencv_core.Mat;
import org.bytedeco.opencv.opencv_core.Rect;
import org.bytedeco.opencv.opencv_core.Scalar;
import org.bytedeco.opencv.opencv_core.Size;
import org.nd4j.common.base.Preconditions;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@CanRun(DrawBoundingBoxStep.class)
public class DrawBoundingBoxRunner implements PipelineStepRunner {
protected final DrawBoundingBoxStep step;
public DrawBoundingBoxRunner(@NonNull DrawBoundingBoxStep step){
this.step = step;
}
@Override
public void close() {
}
@Override
public PipelineStep getPipelineStep() {
return step;
}
@Override
public Data exec(Context ctx, Data data) {
String imgName = step.imageName();
String bboxName = step.bboxName();
if(imgName == null){
String errMultipleKeys = "Image field name was not provided and could not be inferred: multiple image fields exist: %s and %s";
String errNoKeys = "Image field name was not provided and could not be inferred: no image fields exist";
imgName = DataUtils.inferField(data, ValueType.IMAGE, false, errMultipleKeys, errNoKeys);
}
if(bboxName == null){
String errMultipleKeys = "Bounding box field name was not provided and could not be inferred: multiple bounding box fields exist: %s and %s";
String errNoKeys = "Bounding box field name was not provided and could not be inferred: no bounding box fields exist";
bboxName = DataUtils.inferField(data, ValueType.BOUNDING_BOX, false, errMultipleKeys, errNoKeys);
}
Image i = data.getImage(imgName);
ValueType vt = data.type(bboxName);
List<BoundingBox> list;
if(vt == ValueType.BOUNDING_BOX){
list = Collections.singletonList(data.getBoundingBox(bboxName));
} else if(vt == ValueType.LIST){
if(data.listType(bboxName) == ValueType.BOUNDING_BOX) {
list = data.getListBoundingBox(bboxName);
} else {
throw new IllegalStateException("Data[" + bboxName + "] is List<" + data.listType(bboxName) + "> not List<BoundingBox>");
}
} else {
throw new IllegalStateException("Data[" + bboxName + "] is neither a BoundingBox or List<BoundingBox> - is " + vt);
}
Mat m = i.getAs(Mat.class).clone(); //Make a copy to avoid modiying input data that might be used elsewhere (if in graph pipeline)
Map<String,String> cc = step.classColors();
String dc = step.color();
Mat scaled = scaleIfRequired(m);
int thickness = Math.max(1, step.lineThickness());
ImageToNDArrayConfig im2ndConf = step.imageToNDArrayConfig();
for(BoundingBox bb : list) {
bb = accountForCrop(i, bb, im2ndConf);
Scalar color;
if (step.classColors() == null && step.color() == null) {
//No color specified - use default color
color = Scalar.GREEN;
} else {
if (cc != null && bb.label() != null && cc.containsKey(bb.label())){
String s = cc.get(bb.label());
color = ColorUtil.stringToColor(s);
} else if(dc != null){
color = ColorUtil.stringToColor(dc);
} else {
color = Scalar.GREEN;
}
}
double x1 = Math.min(bb.x1(), bb.x2());
double y1 = Math.min(bb.y1(), bb.y2());
int x = (int)(x1 * scaled.cols());
int y = (int)(y1 * scaled.rows());
int h = (int) Math.round(bb.height() * scaled.rows());
int w = (int)Math.round(bb.width() * scaled.cols());
Rect r = new Rect(x, y, w, h);
org.bytedeco.opencv.global.opencv_imgproc.rectangle(scaled, r, color, thickness, 8, 0);
}
if(im2ndConf != null && step.drawCropRegion()){
BoundingBox bb = ImageToNDArray.getCropRegion(i, im2ndConf);
Scalar color;
if (step.cropRegionColor() == null) {
//No color specified - use default color
color = Scalar.BLUE;
} else {
color = ColorUtil.stringToColor(step.cropRegionColor());
}
int x = (int)(bb.x1() * scaled.cols());
int y = (int)(bb.y1() * scaled.rows());
int h = (int)(bb.height() * scaled.rows());
int w = (int)(bb.width() * scaled.cols());
Rect r = new Rect(x, y, w, h);
org.bytedeco.opencv.global.opencv_imgproc.rectangle(scaled, r, color, thickness, 8, 0);
}
return Data.singleton(imgName, Image.create(scaled));
}
protected Mat scaleIfRequired(Mat m){
if(step.scale() != null && step.scale() != DrawBoundingBoxStep.Scale.NONE){
boolean scaleRequired = false;
int newH = 0;
int newW = 0;
if(step.scale() == DrawBoundingBoxStep.Scale.AT_LEAST){
if(m.rows() < step.resizeH() || m.cols() < step.resizeW()){
scaleRequired = true;
double ar = m.cols() / (double)m.rows();
if(m.rows() < step.resizeH() && m.cols() >= step.resizeW()){
//Scale height
newW = step.resizeW();
newH = (int)(newW / ar);
} else if(m.rows() > step.resizeH() && m.cols() < step.resizeW()){
//Scale width
newH = step.resizeH();
newW = (int) (ar * newH);
} else {
//Scale both dims...
if((int)(step.resizeW() / ar) < step.resizeH()){
//Scale height
newW = step.resizeW();
newH = (int)(newW / ar);
} else {
//Scale width
newH = step.resizeH();
newW = (int) (ar * newH);
}
}
}
} else if(step.scale() == DrawBoundingBoxStep.Scale.AT_MOST){
Preconditions.checkState(step.resizeH() > 0 && step.resizeW() > 0, "Invalid resize: resizeH=%s, resizeW=%s", step.resizeH(), step.resizeW());
if(m.rows() > step.resizeH() || m.cols() > step.resizeW()){
scaleRequired = true;
double ar = m.cols() / (double)m.rows();
if(m.rows() > step.resizeH() && m.cols() <= step.resizeW()){
//Scale height
newW = step.resizeW();
newH = (int)(newW / ar);
} else if(m.rows() < step.resizeH() && m.cols() > step.resizeW()){
//Scale width
newH = step.resizeH();
newW = (int) (ar * newH);
} else {
//Scale both dims...
if((int)(step.resizeW() / ar) > step.resizeH()){
//Scale height
newW = step.resizeW();
newH = (int)(newW / ar);
} else {
//Scale width
newH = step.resizeH();
newW = (int) (ar * newH);
}
}
}
}
if(scaleRequired){
Mat resized = new Mat();
org.bytedeco.opencv.global.opencv_imgproc.resize(m, resized, new Size(newH, newW));
return resized;
} else {
return m;
}
} else {
return m;
}
}
protected BoundingBox accountForCrop(Image image, BoundingBox bbox, ImageToNDArrayConfig config){
if(config == null)
return bbox;
BoundingBox cropRegion = ImageToNDArray.getCropRegion(image, config);
double cropWidth = cropRegion.width();
double cropHeight = cropRegion.height();
double x1 = cropRegion.x1() + cropWidth * bbox.x1();
double x2 = cropRegion.x1() + cropWidth * bbox.x2();
double y1 = cropRegion.y1() + cropHeight * bbox.y1();
double y2 = cropRegion.y1() + cropHeight * bbox.y2();
return BoundingBox.createXY(x1, x2, y1, y2, bbox.label(), bbox.probability());
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/bb
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/bb/draw/DrawBoundingBoxStepRunnerFactory.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.data.image.step.bb.draw;
import ai.konduit.serving.pipeline.api.step.PipelineStep;
import ai.konduit.serving.pipeline.api.step.PipelineStepRunner;
import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory;
import org.nd4j.common.base.Preconditions;
public class DrawBoundingBoxStepRunnerFactory implements PipelineStepRunnerFactory {
@Override
public boolean canRun(PipelineStep pipelineStep) {
return pipelineStep instanceof DrawBoundingBoxStep;
}
@Override
public PipelineStepRunner create(PipelineStep pipelineStep) {
Preconditions.checkState(canRun(pipelineStep), "Unable to run step: %s", pipelineStep);
return new DrawBoundingBoxRunner((DrawBoundingBoxStep) pipelineStep);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/bb
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/bb/extract/ExtractBoundingBoxRunner.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.data.image.step.bb.extract;
import ai.konduit.serving.annotation.runner.CanRun;
import ai.konduit.serving.data.image.convert.ImageToNDArray;
import ai.konduit.serving.data.image.convert.ImageToNDArrayConfig;
import ai.konduit.serving.data.image.util.ImageUtils;
import ai.konduit.serving.pipeline.api.context.Context;
import ai.konduit.serving.pipeline.api.data.BoundingBox;
import ai.konduit.serving.pipeline.api.data.Data;
import ai.konduit.serving.pipeline.api.data.Image;
import ai.konduit.serving.pipeline.api.data.ValueType;
import ai.konduit.serving.pipeline.api.step.PipelineStep;
import ai.konduit.serving.pipeline.api.step.PipelineStepRunner;
import ai.konduit.serving.pipeline.util.DataUtils;
import lombok.NonNull;
import org.bytedeco.opencv.opencv_core.Mat;
import org.bytedeco.opencv.opencv_core.Rect;
import org.bytedeco.opencv.opencv_core.Size;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
@CanRun(ExtractBoundingBoxStep.class)
public class ExtractBoundingBoxRunner implements PipelineStepRunner {
protected final ExtractBoundingBoxStep step;
public ExtractBoundingBoxRunner(@NonNull ExtractBoundingBoxStep step){
this.step = step;
}
@Override
public void close() {
}
@Override
public PipelineStep getPipelineStep() {
return step;
}
@Override
public Data exec(Context ctx, Data data) {
String imgName = step.imageName(); //TODO find if null
String bboxName = step.bboxName(); //TODO find if null
if(imgName == null){
String errMultipleKeys = "Image field name was not provided and could not be inferred: multiple image fields exist: %s and %s";
String errNoKeys = "Image field name was not provided and could not be inferred: no image fields exist";
imgName = DataUtils.inferField(data, ValueType.IMAGE, false, errMultipleKeys, errNoKeys);
}
if(bboxName == null){
String errMultipleKeys = "Bounding box field name was not provided and could not be inferred: multiple BoundingBox (or List<BoundingBox>) fields exist: %s and %s";
String errNoKeys = "Bounding box field name was not provided and could not be inferred: no BoundingBox (or List<BoundingBox>) fields exist";
bboxName = DataUtils.inferField(data, ValueType.BOUNDING_BOX, true, errMultipleKeys, errNoKeys);
}
Image i = data.getImage(imgName);
ValueType vt = data.type(bboxName);
List<BoundingBox> list;
boolean singleValue;
if(vt == ValueType.BOUNDING_BOX){
list = Collections.singletonList(data.getBoundingBox(bboxName));
singleValue = true;
} else if(vt == ValueType.LIST){
if(data.listType(bboxName) == ValueType.BOUNDING_BOX) {
list = data.getListBoundingBox(bboxName);
} else {
throw new IllegalStateException("Data[" + bboxName + "] is List<" + data.listType(bboxName) + "> not List<BoundingBox>");
}
singleValue = false;
} else {
throw new IllegalStateException("Data[" + bboxName + "] is neither a BoundingBox or List<BoundingBox> - is " + vt);
}
ImageToNDArrayConfig im2ndConf = step.imageToNDArrayConfig();
Mat img = i.getAs(Mat.class);
List<Image> out = new ArrayList<>();
for(BoundingBox bb : list) {
bb = ImageUtils.accountForCrop(i, bb, im2ndConf);
BoundingBox bbPx = BoundingBox.create(bb.cx() * img.cols(), bb.cy() * img.rows(),
bb.height() * img.rows(), bb.width() * img.cols());
if(step.aspectRatio() != null){
double desiredAR = step.aspectRatio();
double actualAR = bbPx.width() / bbPx.height();
if(desiredAR < actualAR){
//Increase height dimension to give desired AR
double newH = bbPx.width() / desiredAR;
bbPx = BoundingBox.create(bbPx.cx(), bbPx.cy(), newH, bbPx.width());
} else if(desiredAR > actualAR){
//Increase width dimension to give desired AR
double newW = bbPx.height() * desiredAR;
bbPx = BoundingBox.create(bbPx.cx(), bbPx.cy(), bbPx.height(), newW);
}
}
double x1 = Math.min(bbPx.x1(), bbPx.x2());
double y1 = Math.min(bbPx.y1(), bbPx.y2());
int x = (int) Math.round(x1);
int y = (int)Math.round(y1);
int h = (int)Math.round(bbPx.height());
int w = (int)Math.round(bbPx.width());
Rect r = new Rect(x, y, w, h);
Mat m = img.apply(r);
if(step.resizeH() != null && step.resizeW() != null){
int rH = step.resizeH();
int rW = step.resizeW();
Mat resized = new Mat();
org.bytedeco.opencv.global.opencv_imgproc.resize(m, resized, new Size(rH, rW));
m = resized;
}
out.add(Image.create(m));
}
String outName = step.outputName() == null ? imgName : step.outputName();
//TODO keep all other values (optionally)
Data d;
if(singleValue){
d = Data.singleton(outName, out.get(0));
} else {
d = Data.singletonList(outName, out, ValueType.IMAGE);
}
if(step.keepOtherFields()){
for(String s : data.keys()){
if(!imgName.equals(s) && !bboxName.equals(s)){
d.copyFrom(s, data);
}
}
}
return d;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/bb
|
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/bb/extract/ExtractBoundingBoxStepRunnerFactory.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.data.image.step.bb.extract;
import ai.konduit.serving.pipeline.api.step.PipelineStep;
import ai.konduit.serving.pipeline.api.step.PipelineStepRunner;
import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory;
import org.nd4j.common.base.Preconditions;
public class ExtractBoundingBoxStepRunnerFactory implements PipelineStepRunnerFactory {
@Override
public boolean canRun(PipelineStep pipelineStep) {
return pipelineStep instanceof ExtractBoundingBoxStep;
}
@Override
public PipelineStepRunner create(PipelineStep pipelineStep) {
Preconditions.checkState(canRun(pipelineStep), "Unable to run step: %s", pipelineStep);
return new ExtractBoundingBoxRunner((ExtractBoundingBoxStep) pipelineStep);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.