index
int64
repo_id
string
file_path
string
content
string
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/graph/AnyStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.graph; import ai.konduit.serving.annotation.json.JsonName; import io.swagger.v3.oas.annotations.media.Schema; import lombok.Data; import lombok.EqualsAndHashCode; import org.nd4j.shade.jackson.annotation.JsonProperty; import java.util.List; /** * AnyStep forwards the first available input to the output. * Usually used in conjunction with a Switch step - i.e., input -> Switch -> (left branch, right branch) -> Any<br> * If more than one of the inputs is available, the output is undefined (could be any of the inputs) * * @author Alex Black */ @Data @EqualsAndHashCode(callSuper = true) @JsonName(GraphConstants.GRAPH_ANY_JSON_KEY) @Schema(description = "A graph pipeline step that forwards the first available input to the output. Usually used in " + "conjunction with an earlier Switch step - i.e., input -> Switch -> (left branch, right branch) -> Any." + " If more than one of the inputs is available to AnyStep, the output is undefined (it could return any of the inputs)") public class AnyStep extends BaseMergeStep implements GraphStep { public AnyStep(@JsonProperty("GraphBuilder") GraphBuilder b, @JsonProperty("steps") List<String> steps, @JsonProperty("name") String name) { super(b, steps, name); } @Override public String toString() { return "Any(\"" + String.join("\",\"", inputs()) + "\")"; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/graph/BaseGraphStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.graph; import lombok.*; import org.nd4j.shade.jackson.annotation.JsonIgnoreProperties; @AllArgsConstructor @Data @NoArgsConstructor @JsonIgnoreProperties("builder") @EqualsAndHashCode(exclude = {"builder"}) public abstract class BaseGraphStep implements GraphStep { protected GraphBuilder builder; protected String name; @Override public String name() { return name; } @Override public void name(@NonNull String name){ this.name = name; } @Override public GraphBuilder builder() { return builder; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/graph/BaseMergeStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.graph; import ai.konduit.serving.pipeline.api.step.PipelineStep; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; import java.util.List; import java.util.stream.Collectors; @AllArgsConstructor @NoArgsConstructor @EqualsAndHashCode(callSuper = true) public abstract class BaseMergeStep extends BaseGraphStep { protected List<String> steps; public BaseMergeStep(GraphBuilder b, List<String> steps, String name){ super(b, name); this.steps = steps; } @Override public int numInputs() { return steps.size(); } @Override public String input() { throw new UnsupportedOperationException("Multiple inputs for MergeStep"); } @Override public List<String> inputs() { return steps; } @Override public boolean hasStep() { return false; } @Override public PipelineStep getStep() { throw new UnsupportedOperationException("MergeStep does not have a PipelineStep associated with it"); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/graph/GraphBuilder.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.graph; import ai.konduit.serving.pipeline.impl.pipeline.GraphPipeline; import org.nd4j.common.base.Preconditions; import java.util.*; /** * A utility class for building {@link GraphPipeline} instances.<br> * Usage: * <pre> * {@code * GraphBuilder b = new GraphBuilder(); * GraphStep input = b.input(); * GraphStep output = input.then("myStep", ...); * Pipeline p = b.build(output); * }}</pre> * * @author Alex Black */ public class GraphBuilder { private List<GraphStep> steps = new ArrayList<>(); private final GraphStep input = new Input(this); private String id; public GraphStep input() { return input; } public GraphBuilder id(String id){ this.id = id; return this; } public GraphStep[] switchOp(String name, SwitchFn fn, GraphStep step) { int nOut = fn.numOutputs(); SwitchStep swStep = new SwitchStep(this, name, step.name(), fn); add(swStep); GraphStep[] out = new GraphStep[nOut]; for (int i = 0; i < nOut; i++) { String oName = name + "_" + i; out[i] = new SwitchOutput(this, oName, name, i); add(out[i]); } return out; } public GraphStep any(String name, GraphStep... steps) { List<String> l = new ArrayList<>(); for (GraphStep g : steps) { l.add(g.name()); } GraphStep g = new AnyStep(this, l, name); add(g); return g; } //Package private void add(GraphStep step) { Preconditions.checkState(!hasStep(step.name()), "Graph pipeline already has a step with name \"%s\"", step.name()); steps.add(step); } //Package private boolean hasStep(String name) { for (GraphStep g : steps) { if (name.equals(g.name())) return true; } return false; } public GraphPipeline build(GraphStep outputStep) { Map<String, GraphStep> m = new HashMap<>(); for (GraphStep g : steps) { m.put(g.name(), g); } m.put(outputStep.name(), outputStep); return new GraphPipeline(m, outputStep.name(), id); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/graph/GraphConstants.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.graph; /** * Contants for serializing GraphPipeline instances * * @author Alex Black */ public class GraphConstants { private GraphConstants() { } public static final String INPUT_KEY = "@input"; public static final String TYPE_KEY = "@type"; public static final String GRAPH_MERGE_JSON_KEY = "MERGE"; public static final String GRAPH_ANY_JSON_KEY = "ANY"; public static final String GRAPH_SWITCH_JSON_KEY = "SWITCH"; public static final String GRAPH_SWITCH_OUTPUT_JSON_KEY = "SWITCH_OUTPUT"; }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/graph/GraphStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.graph; import ai.konduit.serving.pipeline.api.TextConfig; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.impl.pipeline.serde.GraphStepDeserializer; import ai.konduit.serving.pipeline.impl.pipeline.serde.GraphStepSerializer; import io.swagger.v3.oas.annotations.media.Schema; import org.nd4j.shade.jackson.databind.annotation.JsonDeserialize; import org.nd4j.shade.jackson.databind.annotation.JsonSerialize; import java.util.ArrayList; import java.util.List; /** * GraphStep represents one step within a GraphPipeline * Other than SwitchStep, all GraphSteps are either (single input, single output), or (multiple input, single output) * * @author Alex Black */ @JsonSerialize(using = GraphStepSerializer.class) @JsonDeserialize(using = GraphStepDeserializer.class) @Schema(description = "A step in a graph pipeline.") public interface GraphStep extends TextConfig { /** * @return Name of the graph step */ String name(); /** * Set the name of the GraphStep. Should not be used after graph has been created * * @param name Name to set */ void name(String name); /** * @return The builder for this GraphStep. May be null after graph creation */ GraphBuilder builder(); /** * @return The number of inputs to this GraphStep */ int numInputs(); /** * @return The name of the input step, if one exists. If multiple inputs feed into this GraphStep, use {@link #inputs()} instead */ String input(); /** * @return Names of the inputs */ List<String> inputs(); /** * @return True if the GraphStep has a {@link PipelineStep} internally */ boolean hasStep(); @Schema(description = "Determines if the graph step has a pipeline step associated with it.", accessMode = Schema.AccessMode.READ_ONLY) default boolean getHasStep() { return hasStep(); } @Schema(description = "Gets the list of inputs to the graph step.") default List<String> getInputs() { return inputs(); } @Schema(description = "Gets the number of inputs coming into the pipeline step.") default int getNumInputs() { return numInputs(); } @Schema(description = "Name of the graph step.") default String getName() { return name(); } /** * @return The {@link PipelineStep}, if one exists (according to {@link #hasStep()} */ @Schema(description = "Pipeline step that's associated with the graph step.") PipelineStep getStep(); /** * Add a new GraphStep to the GraphBuilder/GraphPipeline, with the specified name, with data fed in from this step * * @param name Name of the new step * @param step New step to add * @return The added step as a GraphStep */ default GraphStep then(String name, PipelineStep step) { GraphStep s = new PipelineGraphStep(builder(), step, name, this.name()); builder().add(s); return s; } /** * Merge the output of this GraphStep with the specified GraphSteps<br> * This means that during execution, the output Data instance of this step (and the Data instances of other steps) * are combined together into a single Data instance * * @param name Name for the new output step * @param steps Steps, the output of which should be merged with the output of this step * @return The GraphStep of the merged data */ default GraphStep mergeWith(String name, GraphStep... steps) { List<String> allSteps = new ArrayList<>(); allSteps.add(this.name()); for (GraphStep g : steps) { allSteps.add(g.name()); } MergeStep out = new MergeStep(builder(), allSteps, name); builder().add(out); return out; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/graph/Input.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.graph; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.impl.pipeline.GraphPipeline; import lombok.AllArgsConstructor; import lombok.Data; import lombok.EqualsAndHashCode; import org.nd4j.shade.jackson.annotation.JsonProperty; import java.util.Collections; import java.util.List; /** * An Input node for {@link GraphBuilder} * @author Alex Black */ @Data @EqualsAndHashCode(exclude = {"builder"}) public class Input implements GraphStep { private final GraphBuilder builder; public Input(@JsonProperty("builder") GraphBuilder builder ){ this.builder = builder; } @Override public String name() { return GraphPipeline.INPUT_KEY; } @Override public void name(String name) { throw new UnsupportedOperationException("Setting name not supported for Input GraphStep"); } @Override public GraphBuilder builder() { return builder; } @Override public int numInputs() { return 0; } @Override public String input() { return null; } @Override public List<String> inputs() { return Collections.emptyList(); } @Override public boolean hasStep() { return false; } @Override public PipelineStep getStep() { throw new UnsupportedOperationException("Input does not have a PipelineStep associated with it"); } @Override public String toString(){ return "Input()"; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/graph/MergeStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.graph; import ai.konduit.serving.annotation.json.JsonName; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Data; import lombok.EqualsAndHashCode; import org.nd4j.shade.jackson.annotation.JsonProperty; import java.util.List; /** * Merge the output of the input GraphSteps together<br> * This means that during execution, the output Data instances of all the steps are combined together into a single Data * instance. * * @author Alex Black */ @AllArgsConstructor @Data @EqualsAndHashCode(callSuper = true) @JsonName(GraphConstants.GRAPH_MERGE_JSON_KEY) @Schema(description = "A graph pipeline step the configures how to merge the output Data instances of multiple graph " + "steps into a single Data instance.") public class MergeStep extends BaseMergeStep implements GraphStep { public MergeStep(@JsonProperty("GraphBuilder") GraphBuilder b, @JsonProperty("steps") List<String> steps, @JsonProperty("name") String name) { super(b, steps, name); } @Override public String toString() { return "Merge(\"" + String.join("\",\"", inputs()) + "\")"; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/graph/PipelineGraphStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.graph; import ai.konduit.serving.pipeline.api.step.PipelineStep; import lombok.*; import org.nd4j.shade.jackson.annotation.JsonIgnoreProperties; import org.nd4j.shade.jackson.annotation.JsonProperty; import org.nd4j.shade.jackson.annotation.JsonUnwrapped; import java.util.Collections; import java.util.List; /** * A {@link GraphStep} that contains a {@link PipelineStep} * * @author Alex Black */ @Data @NoArgsConstructor @EqualsAndHashCode(callSuper = true) @JsonIgnoreProperties("builder") public class PipelineGraphStep extends BaseGraphStep { private GraphBuilder builder; @JsonUnwrapped private PipelineStep step; private String name; private String input; public PipelineGraphStep(@JsonProperty("GraphBuilder") GraphBuilder builder, @JsonProperty("step") PipelineStep step, @JsonProperty("name") String name, @JsonProperty("input") String input) { super(builder, name); this.step = step; this.input = input; } @Override public int numInputs() { return 1; } @Override public String input() { return input; } @Override public List<String> inputs() { return Collections.singletonList(input); } @Override public boolean hasStep() { return true; } @Override public String toString() { return step.toString(); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/graph/SwitchFn.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.graph; import ai.konduit.serving.pipeline.api.TextConfig; import ai.konduit.serving.pipeline.api.data.Data; import org.nd4j.shade.jackson.annotation.JsonTypeInfo; import static org.nd4j.shade.jackson.annotation.JsonTypeInfo.Id.NAME; /** * SwitchFn is used with {@link SwitchStep} in order to determine which of {@code numOutputs()} outputs the input * Data instance will be forwarded on to.<br> * Note that the number of possible outputs is fixed at graph construction time * * @author Alex Black */ @JsonTypeInfo(use = NAME, property = "@type") public interface SwitchFn extends TextConfig { /** * @return Number of outputs */ int numOutputs(); /** * Select the number of the output that the specified Data instance should be forwarded on to * * @param data Input data * @return Index of the output to forward this Data on to. Must be in range 0 to numOutputs()-1 inclusive */ int selectOutput(Data data); }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/graph/SwitchOutput.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.graph; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.Data; import lombok.EqualsAndHashCode; import lombok.experimental.Accessors; import org.nd4j.shade.jackson.annotation.JsonProperty; import java.util.Collections; import java.util.List; /** * SwitchOutput simply represents one output branch of a {@link SwitchStep} * * @author Alex Black */ @Data @EqualsAndHashCode(callSuper = true) @Accessors(fluent = true) @JsonName(GraphConstants.GRAPH_SWITCH_OUTPUT_JSON_KEY) @Schema(description = "A graph pipeline step that is a single item in a list of multiple switch step outputs.") public class SwitchOutput extends BaseGraphStep implements GraphStep { @Schema(description = "Number of outputs from the switch step.") private final int outputNum; @Schema(description = "Name of the switch output step.") private final String switchName; /** * @param b GraphBuilder * @param name Name of the output node * @param switchName Name of the Switch node that this SwitchOutput represents the output for * @param outputNum Output number of the SwitchStep that this is for */ public SwitchOutput(@JsonProperty("GraphBuilder") GraphBuilder b, @JsonProperty("name") String name, @JsonProperty("switchName") String switchName, @JsonProperty("outputNum") int outputNum){ super(b, name); this.switchName = switchName; this.outputNum = outputNum; } @Override public int numInputs() { return 1; } @Override public String input() { return switchName; } @Override public List<String> inputs() { return Collections.singletonList(input()); } @Override public boolean hasStep() { return false; } @Override public PipelineStep getStep() { return null; } @Override public String toString(){ return "SwitchOutput(\"" + input() + "\"," + outputNum + ")"; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/graph/SwitchStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.graph; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.step.PipelineStep; import lombok.*; import lombok.experimental.Accessors; import java.util.Collections; import java.util.List; /** * The SwitchStep forwards the input Data instance to only one of N data instances, using a provided {@link SwitchFn} * This can be used to implement conditional operations. * Usually this is used in conjunction with an AnyStep: i.e., input -> Switch -> (left branch, right branch) -> Any<br> * * @author Alex Black */ @Data @EqualsAndHashCode(callSuper = true) @AllArgsConstructor @NoArgsConstructor @Accessors(fluent = true) @JsonName(GraphConstants.GRAPH_SWITCH_JSON_KEY) public class SwitchStep extends BaseGraphStep implements GraphStep { protected String inStep; protected SwitchFn switchFn; /** * @param b GraphBuilder * @param name Name of this node * @param inStep Name of the input node * @param switchFn Switch function to use to decide which output to forward the input to */ public SwitchStep(GraphBuilder b, String name, String inStep, SwitchFn switchFn) { super(b, name); this.inStep = inStep; this.switchFn = switchFn; } @Override public int numInputs() { return 1; } @Override public String input() { return inStep; } @Override public List<String> inputs() { return Collections.singletonList(inStep); } @Override public boolean hasStep() { return false; } @Override public PipelineStep getStep() { return null; } @Override public String toString() { return "Switch(fn=" + switchFn + ",inputs=" + inputs() + ")"; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/graph
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/graph/switchfn/DataIntSwitchFn.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.graph.switchfn; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.ValueType; import ai.konduit.serving.pipeline.impl.pipeline.graph.SwitchFn; import io.swagger.v3.oas.annotations.media.Schema; import lombok.experimental.Accessors; import org.nd4j.common.base.Preconditions; import org.nd4j.shade.jackson.annotation.JsonProperty; /** * A {@link SwitchFn} that selects the output based on a integer values from the Data instance. * The specified field name must be an integer value between 0 and numOutputs inclusive * * @author Alex Black */ @lombok.Data @Accessors(fluent = true) @JsonName("INT_SWITCH") @Schema(description = "A switch function that that selects the output based on a integer values from the data instance. " + "The specified field name must be an integer value between 0 and numOutputs-1 inclusive") public class DataIntSwitchFn implements SwitchFn { @Schema(description = "The number of outputs from a switch step.") private final int numOutputs; @Schema(description = "Field name key from a data instance whose value will channel the graph flow to a specific switch branch. " + "In this case, it will be the nth index from a list of switch outputs.") private final String fieldName; public DataIntSwitchFn(@JsonProperty("numOutputs") int numOutputs, @JsonProperty("fieldName") String fieldName){ Preconditions.checkState(numOutputs > 0, "Number of outputs must be positive, got %s", numOutputs); this.numOutputs = numOutputs; this.fieldName = fieldName; } @Override public int numOutputs() { return numOutputs; } @Override public int selectOutput(Data data) { Preconditions.checkState(data.has(fieldName), "Input data does not have an integer field of name \"%s\"", fieldName); Preconditions.checkState(data.type(fieldName) == ValueType.INT64, "Input data field \"%s\" has type \"%s\", " + "must be INT64 (long)", fieldName, data.type(fieldName)); long l = data.getLong(fieldName); return (int)l; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/graph
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/graph/switchfn/DataStringSwitchFn.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.graph.switchfn; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.ValueType; import ai.konduit.serving.pipeline.impl.pipeline.graph.SwitchFn; import io.swagger.v3.oas.annotations.media.Schema; import lombok.experimental.Accessors; import org.nd4j.common.base.Preconditions; import org.nd4j.shade.jackson.annotation.JsonProperty; import java.util.Map; /** * A {@link SwitchFn} that selects the output based a String value from the Data instance.<br> * The specified field name must be a String value, and must be present in the provided map.<br> * For example, if the map has values {("x", 0), ("y", 1)} then if Data.getString(fieldName) is "x", the input * is forwarded to output 0. If it is "y" the output is forwarded to output 1. If it is any other value, an exception * is thrown. * * @author Alex Black */ @lombok.Data @Accessors(fluent = true) @JsonName("STRING_SWITCH") @Schema(description = "A switch function that selects the output based a string value from the data instance. " + "The specified field name must be a string value, and must be present in the provided selection map. " + "For example, if the map has values {x: 0, y: 1} then if data[fieldName] is x, the input " + "is forwarded to output 0. If it is y the output is forwarded to output 1.") public class DataStringSwitchFn implements SwitchFn { @Schema(description = "The number of outputs from a switch step. Must be equal to the size of the selection map.") private final int numOutputs; @Schema(description = "Field name key from a data instance whose value should be one of the keys in the selection map.") private final String fieldName; @Schema(description = "Selection map that determines where the output from the switch will be channelled to.") private final Map<String, Integer> map; public DataStringSwitchFn(@JsonProperty("numOutputs") int numOutputs, @JsonProperty("fieldName") String fieldName, @JsonProperty("map") Map<String, Integer> map) { Preconditions.checkState(numOutputs > 0, "Number of outputs must be positive, got %s", numOutputs); this.numOutputs = numOutputs; this.fieldName = fieldName; this.map = map; } @Override public int numOutputs() { return numOutputs; } @Override public int selectOutput(Data data) { Preconditions.checkState(data.has(fieldName), "Input data does not have a String field of name \"%s\"", fieldName); Preconditions.checkState(data.type(fieldName) == ValueType.STRING, "Input data field \"%s\" has type \"%s\", " + "must be String", fieldName, data.type(fieldName)); String s = data.getString(fieldName); Preconditions.checkState(map.containsKey(s), "String->Integer map does not contain key for value \"%s\": " + "Map has only keys %s", map.keySet()); return map.get(s); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/loop/SimpleLoopTrigger.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.loop; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.pipeline.Trigger; import io.swagger.v3.oas.annotations.media.Schema; import lombok.EqualsAndHashCode; import lombok.NonNull; import lombok.ToString; import lombok.extern.slf4j.Slf4j; import org.nd4j.shade.jackson.annotation.JsonIgnoreProperties; import org.nd4j.shade.jackson.annotation.JsonProperty; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Function; /** * A simple looping trigger to be used with an {@link ai.konduit.serving.pipeline.impl.pipeline.AsyncPipeline}.<br> * It has two modes of operation:<br> * (a) Loop continuously with no delay - if frequencyMs is not set, or<br> * (b) Loop every frequencyMs milliseconds, if this is set<br> * <br> * Optionally, a fixed input Data instance may be provided that is fed into the pipeline at each call of the underlying * pipeline (when executed in an async manner) * * @author Alex Black */ @Schema(description = "A simple looping trigger to be used with an AsyncPipeline. It has two modes of operation:<br>" + "(a) Loop continuously with no delay - if frequencyMs is not set, or<br>" + "(b) Loop every frequencyMs milliseconds, if this is set<br>" + "Optionally, a fixed input Data instance may be provided that is fed into the pipeline at each call of the underlying " + "pipeline (when executed in an async manner). If this is not provided, execution is performed using Data.empty() as input.") @lombok.Data @Slf4j @JsonIgnoreProperties({"stop", "thread", "exception", "first", "current", "callbackFn"}) @JsonName("SIMPLE_LOOP_TRIGGER") @EqualsAndHashCode(onlyExplicitlyIncluded = true) @ToString(onlyExplicitlyIncluded = true) public class SimpleLoopTrigger implements Trigger { @EqualsAndHashCode.Include @ToString.Include protected final Long frequencyMs; @EqualsAndHashCode.Include protected final Data data; protected AtomicBoolean stop = new AtomicBoolean(); protected Thread thread; protected Throwable exception; protected CountDownLatch first = new CountDownLatch(1); protected volatile Data current; protected Function<Data,Data> callbackFn; public SimpleLoopTrigger(){ this((Long)null); } public SimpleLoopTrigger(Integer frequencyMs){ this(frequencyMs == null ? null : frequencyMs.longValue()); } public SimpleLoopTrigger(Long frequencyMs) { this(frequencyMs, null); } public SimpleLoopTrigger(@JsonProperty("frequencyMs") Long frequencyMs, @JsonProperty("data") Data inputData){ this.frequencyMs = frequencyMs; this.data = inputData; } @Override public Data query(Data data) { if(stop.get()) throw new IllegalStateException("Unable to get output after trigger has been stopped"); if(current == null){ if(exception != null){ throw new RuntimeException("Error in Async execution thread", exception); } else { try { first.await(); } catch (InterruptedException e){ log.error("Error while waiting for first async result", e); } } //Latch was count down. We need to check again for an exception, as an exception could have occurred // after the last exception check if(current != null){ return current; } else if(exception != null){ throw new RuntimeException("Error in Async execution thread", exception); } else { throw new RuntimeException("Unknown error occurred: current Data is null but no exception was thrown by async executioner"); } } return current; } @Override public void setCallback(@NonNull Function<Data, Data> callbackFn) { this.callbackFn = callbackFn; if(thread != null){ stop.set(true); thread.interrupt(); } stop = new AtomicBoolean(); current = null; first = new CountDownLatch(1); //Start up a new thread for performing inference thread = new Thread(new InferenceRunner(stop, first)); thread.setDaemon(true); //TODO should this be a daemon thread or not? thread.start(); } @Override public void stop() { stop.set(true); if(thread != null){ thread.interrupt(); } } protected long firstRunDelay(){ return 0; } protected long nextStart(long lastStart){ return lastStart + frequencyMs; } private class InferenceRunner implements Runnable { private final AtomicBoolean stop; private final CountDownLatch first; protected InferenceRunner(AtomicBoolean stop, CountDownLatch first){ this.stop = stop; this.first = first; } @Override public void run() { try{ runHelper(); } catch (Throwable t){ log.error("Uncaught exception in SimpleLoopTrigger.InferenceRunner", t); exception = t; current = null; } finally { //Count down in case the external thread is waiting at query, and we have an exception at the first iteration if(current == null){ first.countDown(); } } } public void runHelper(){ boolean delay = frequencyMs != null; Data in = data == null ? Data.empty() : data; boolean firstExec = true; long firstRunDelay = firstRunDelay(); while (!stop.get()){ if(firstExec && firstRunDelay > 0){ //For TimeLoopTrigger, which has an offset try { Thread.sleep(firstRunDelay); } catch (InterruptedException e){ log.error("Received InterruptedException in " + getClass().getName() + " - stopping thread", e); break; } } long start = delay ? System.currentTimeMillis() : 0L; current = callbackFn.apply(in); if(firstExec) { first.countDown(); firstExec = false; } if(delay && !stop.get()) { long nextStart = nextStart(start); long now = System.currentTimeMillis(); if(nextStart > now){ long sleep = nextStart - now; try { Thread.sleep(sleep); } catch (InterruptedException e){ if(!stop.get()) { log.error("Received InterruptedException in SimpleLoopTrigger - stopping thread", e); } break; } } } } } } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/loop/TimeLoopTrigger.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.loop; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.data.Data; import io.swagger.v3.oas.annotations.media.Schema; import lombok.EqualsAndHashCode; import org.nd4j.shade.jackson.annotation.JsonProperty; import java.util.concurrent.TimeUnit; /** * A trigger to be used with an {@link ai.konduit.serving.pipeline.impl.pipeline.AsyncPipeline}.<br> * TimeLoopTrigger performs execution of the underlying executor every time unit (every minute, every 2 hours, etc) * at the start of the time unit (start of the minute, hour, etc), or at the start + an optional offset. * <br> * Optionally, a fixed input Data instance may be provided that is fed into the pipeline at each call of the underlying * pipeline (when executed in an async manner). If this is not provided, execution is performed using Data.empty() as input. * @author Alex Black */ @Schema(description = "A trigger to be used with an {@link ai.konduit.serving.pipeline.impl.pipeline.AsyncPipeline}.<br>" + "TimeLoopTrigger performs execution of the underlying executor every time unit (every minute, every 2 hours, etc)" + "at the start of the time unit (start of the minute, hour, etc), or at the start + an optional offset.<br>" + "Optionally, a fixed input Data instance may be provided that is fed into the pipeline at each call of the underlying " + "pipeline (when executed in an async manner). If this is not provided, execution is performed using Data.empty() as input.") @lombok.Data @EqualsAndHashCode(callSuper = true) @JsonName("TIME_LOOP_TRIGGER") public class TimeLoopTrigger extends SimpleLoopTrigger { protected final long offset; protected TimeLoopTrigger(@JsonProperty("frequencyMs") Long frequencyMs, @JsonProperty("offset") long offset, @JsonProperty("data") Data data) { super(frequencyMs, data); this.offset = offset; } public TimeLoopTrigger(long duration, TimeUnit unit) { this(duration, unit, 0); } public TimeLoopTrigger(long duration, TimeUnit unit, long offset) { super(unit.toMillis(duration)); this.offset = offset; } @Override protected long firstRunDelay() { long now = System.currentTimeMillis(); long next = now + frequencyMs - (now % frequencyMs) + offset; return next - now; } @Override protected long nextStart(long lastStart) { long now = System.currentTimeMillis(); return now + frequencyMs - (now % frequencyMs) + offset; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/serde/AsyncPipelineSerializer.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.serde; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.pipeline.Pipeline; import ai.konduit.serving.pipeline.api.pipeline.Trigger; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.impl.pipeline.AsyncPipeline; import lombok.AllArgsConstructor; import org.nd4j.shade.jackson.annotation.JsonProperty; import org.nd4j.shade.jackson.annotation.JsonPropertyOrder; import org.nd4j.shade.jackson.annotation.JsonUnwrapped; import org.nd4j.shade.jackson.core.JsonGenerator; import org.nd4j.shade.jackson.databind.JsonSerializer; import org.nd4j.shade.jackson.databind.SerializerProvider; import java.io.IOException; public class AsyncPipelineSerializer extends JsonSerializer<AsyncPipeline> { @Override public void serialize(AsyncPipeline ap, JsonGenerator jg, SerializerProvider sp) throws IOException { Pipeline p = ap.underlying(); AsyncPipelineSerializationHelper h = new AsyncPipelineSerializationHelper(ap.trigger(), p); jg.writeObject(h); } //Wrapper/helper class to inject "@AsyncTrigger" into the Pipeline JSON @lombok.Data @AllArgsConstructor @JsonPropertyOrder({"@type", "@input"}) protected static class AsyncPipelineSerializationHelper { @JsonProperty(Data.RESERVED_KEY_ASYNC_TRIGGER) private Trigger _triggerAliasField_; @JsonUnwrapped private Pipeline pipeline; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/serde/BoundingBoxDeserializer.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.serde; import ai.konduit.serving.pipeline.api.data.BoundingBox; import ai.konduit.serving.pipeline.impl.serde.DataJsonDeserializer; import org.nd4j.shade.jackson.core.JsonParser; import org.nd4j.shade.jackson.core.JsonProcessingException; import org.nd4j.shade.jackson.databind.DeserializationContext; import org.nd4j.shade.jackson.databind.JsonDeserializer; import org.nd4j.shade.jackson.databind.JsonNode; import java.io.IOException; public class BoundingBoxDeserializer extends JsonDeserializer<BoundingBox> { @Override public BoundingBox deserialize(JsonParser jp, DeserializationContext dc) throws IOException, JsonProcessingException { JsonNode n = jp.getCodec().readTree(jp); return DataJsonDeserializer.deserializeBB(n); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/serde/BoundingBoxSerializer.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.serde; import ai.konduit.serving.pipeline.api.data.BoundingBox; import ai.konduit.serving.pipeline.impl.serde.DataJsonSerializer; import org.nd4j.shade.jackson.core.JsonGenerator; import org.nd4j.shade.jackson.databind.JsonSerializer; import org.nd4j.shade.jackson.databind.SerializerProvider; import java.io.IOException; public class BoundingBoxSerializer extends JsonSerializer<BoundingBox> { @Override public void serialize(BoundingBox bb, JsonGenerator jg, SerializerProvider sp) throws IOException { DataJsonSerializer.writeBB(jg, bb); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/serde/GraphStepDeserializer.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.serde; import ai.konduit.serving.pipeline.api.serde.JsonSubType; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.impl.pipeline.graph.*; import ai.konduit.serving.pipeline.util.ObjectMappers; import org.nd4j.common.base.Preconditions; import org.nd4j.shade.jackson.core.JsonParser; import org.nd4j.shade.jackson.core.JsonProcessingException; import org.nd4j.shade.jackson.core.TreeNode; import org.nd4j.shade.jackson.databind.DeserializationContext; import org.nd4j.shade.jackson.databind.ObjectMapper; import org.nd4j.shade.jackson.databind.deser.std.StdDeserializer; import org.nd4j.shade.jackson.databind.node.NumericNode; import org.nd4j.shade.jackson.databind.node.TextNode; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * A custom JSON deserializer for deserializing {@link GraphStep}s * * @author Alex Black */ public class GraphStepDeserializer extends StdDeserializer<GraphStep> { protected GraphStepDeserializer() { super(GraphStep.class); } @Override public GraphStep deserialize(JsonParser jp, DeserializationContext dc) throws IOException, JsonProcessingException { TreeNode tn = jp.readValueAsTree(); TreeNode typeNode = tn.get(GraphConstants.TYPE_KEY); TreeNode inputNode = tn.get(GraphConstants.INPUT_KEY); String type = ((TextNode) typeNode).asText(); JsonSubType st = ObjectMappers.findSubtypeByName(type); Preconditions.checkState(st != null, "No class found for mapping PipelineStep/GraphStep with type name \"%s\": " + "required module may not be on the classpath. All types include: %s", type, ObjectMappers.getAllSubtypes()); if(PipelineStep.class.isAssignableFrom(st.getConfigInterface())){ //Deserialize as PipelineStep, then wrap in a PipelineGraphStep PipelineStep ps = jp.getCodec().treeToValue(tn, PipelineStep.class); String input = ((TextNode)inputNode).asText();; return new PipelineGraphStep(null, ps, null, input); } else if(GraphStep.class.isAssignableFrom(st.getConfigInterface())){ //Deserialize as GraphStep String input = null; List<String> inputs = null; if(inputNode.isArray()){ int size = inputNode.size(); inputs = new ArrayList<>(size); for( int i=0; i<size; i++ ){ inputs.add(((TextNode)inputNode.get(i)).asText()); } } else { input = ((TextNode)inputNode).asText(); } switch (type){ case GraphConstants.GRAPH_MERGE_JSON_KEY: return new MergeStep(null, inputs, null); //TODO names case GraphConstants.GRAPH_ANY_JSON_KEY: return new AnyStep(null, inputs, null); //TODO names case GraphConstants.GRAPH_SWITCH_JSON_KEY: TreeNode switchFnNode = tn.get("switchFn"); SwitchFn fn = jp.getCodec().treeToValue(switchFnNode, SwitchFn.class); return new SwitchStep(null, null, input, fn); case GraphConstants.GRAPH_SWITCH_OUTPUT_JSON_KEY: int idx = ((NumericNode)tn.get("outputNum")).intValue(); return new SwitchOutput(null, null, input, idx ); //TODO name default: throw new UnsupportedOperationException("Unknown graph type JSON key: " + type); } } else { //Bad JSON? throw new IllegalStateException("Subtype \"" + type + "\" is neither a PipelineStep or GraphStep"); } } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/serde/GraphStepSerializer.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.pipeline.serde; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.impl.pipeline.graph.*; import ai.konduit.serving.pipeline.impl.pipeline.graph.SwitchOutput; import ai.konduit.serving.pipeline.util.ObjectMappers; import lombok.AllArgsConstructor; import lombok.Data; import org.nd4j.common.base.Preconditions; import org.nd4j.shade.jackson.annotation.JsonProperty; import org.nd4j.shade.jackson.annotation.JsonPropertyOrder; import org.nd4j.shade.jackson.annotation.JsonUnwrapped; import org.nd4j.shade.jackson.core.JsonGenerator; import org.nd4j.shade.jackson.databind.JsonSerializer; import org.nd4j.shade.jackson.databind.SerializerProvider; import java.io.IOException; import java.util.List; import java.util.Map; /** * A custom serializer for GraphSteps * * @author Alex Black */ public class GraphStepSerializer extends JsonSerializer<GraphStep> { @Override public void serialize(GraphStep gs, JsonGenerator jg, SerializerProvider sp) throws IOException { Map<Class<?>,String> names = ObjectMappers.getSubtypeNames(); String stepJsonType = names.get(gs.getClass()); Preconditions.checkState(gs instanceof PipelineGraphStep || stepJsonType != null, "No JSON name is known for GraphStep of type %s", gs); String name = gs.name(); if(gs.hasStep()){ //PipelineStep (StandardGraphStep) only PipelineStep s = gs.getStep(); String type = names.get(s.getClass()); String input = gs.input(); StepSerializationHelper w = new StepSerializationHelper(type, input, s); jg.writeObject(w); } else { jg.writeStartObject(name); jg.writeFieldName(GraphConstants.TYPE_KEY); jg.writeString(stepJsonType); List<String> inputs = gs.inputs(); jg.writeFieldName(GraphConstants.INPUT_KEY); if(inputs.size() == 1){ jg.writeString(inputs.get(0)); } else { jg.writeStartArray(inputs.size()); for(String s : inputs){ jg.writeString(s); } jg.writeEndArray(); } //Write all other fields //TODO maybe there's a better way... But GraphSteps don't really need to be user extensible or anything if(gs instanceof SwitchStep){ SwitchStep ss = (SwitchStep)gs; SwitchFn fn = ss.switchFn(); jg.writeFieldName("switchFn"); jg.writeObject(fn); } else if(gs instanceof SwitchOutput){ SwitchOutput so = (SwitchOutput)gs; jg.writeFieldName("outputNum"); jg.writeNumber(so.outputNum()); } //For AnyStep and MergeStep: No other fields to write (just need type and name) jg.writeEndObject(); } } //Wrapper/helper class to inject "@type" and "@input" fields into the PipelineStep json @Data @AllArgsConstructor @JsonPropertyOrder({"@type", "@input"}) protected static class StepSerializationHelper { @JsonProperty("@type") private String _typeAliasField_; @JsonProperty("@input") private String _inputAliasField_; @JsonUnwrapped private PipelineStep step; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/pipeline/serde/PipelineDeserializer.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.pipeline.impl.pipeline.serde; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.pipeline.Pipeline; import ai.konduit.serving.pipeline.api.pipeline.Trigger; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.impl.pipeline.AsyncPipeline; import ai.konduit.serving.pipeline.impl.pipeline.GraphPipeline; import ai.konduit.serving.pipeline.impl.pipeline.SequencePipeline; import ai.konduit.serving.pipeline.impl.pipeline.graph.GraphStep; import org.nd4j.shade.jackson.core.JsonParseException; import org.nd4j.shade.jackson.core.JsonParser; import org.nd4j.shade.jackson.core.JsonProcessingException; import org.nd4j.shade.jackson.core.TreeNode; import org.nd4j.shade.jackson.databind.DeserializationContext; import org.nd4j.shade.jackson.databind.deser.std.StdDeserializer; import org.nd4j.shade.jackson.databind.node.TextNode; import java.io.IOException; import java.util.Arrays; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; public class PipelineDeserializer extends StdDeserializer<Pipeline> { protected PipelineDeserializer() { super(Pipeline.class); } @Override public Pipeline deserialize(JsonParser jp, DeserializationContext dc) throws IOException, JsonProcessingException { TreeNode tn = jp.readValueAsTree(); TreeNode n = tn.get("steps"); String id = null; if(tn.get("id") != null){ id = ((TextNode)tn.get("id")).asText(); } Trigger asyncTrigger = null; //If present: it's an async pipeline if(tn.get(Data.RESERVED_KEY_ASYNC_TRIGGER) != null){ TreeNode triggerNode = tn.get(Data.RESERVED_KEY_ASYNC_TRIGGER); asyncTrigger = jp.getCodec().treeToValue(triggerNode, Trigger.class); } Pipeline p; if(n.isArray()){ PipelineStep[] steps = jp.getCodec().treeToValue(n, PipelineStep[].class); p = new SequencePipeline(Arrays.asList(steps), id); } else if(n.isObject()){ Map<String, GraphStep> map = new LinkedHashMap<>(); Iterator<String> f = n.fieldNames(); while(f.hasNext()) { String s = f.next(); TreeNode pn = n.get(s); GraphStep step = jp.getCodec().treeToValue(pn, GraphStep.class); step.name(s); map.put(s, step); } String outputStep = ((TextNode)tn.get("outputStep")).asText(); p = new GraphPipeline(map, outputStep, id); } else { throw new JsonParseException(jp, "Unable to deserialize Pipeline: Invalid JSON/YAML? Pipeline is neither a SequencePipeline or a GraphPipeline"); } if(asyncTrigger != null ){ return new AsyncPipeline(p, asyncTrigger); } return p; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/serde/DataJsonDeserializer.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.serde; import ai.konduit.serving.pipeline.api.data.*; import ai.konduit.serving.pipeline.impl.data.JData; import ai.konduit.serving.pipeline.impl.data.image.Png; import ai.konduit.serving.pipeline.impl.data.ndarray.SerializedNDArray; import lombok.SneakyThrows; import org.nd4j.common.base.Preconditions; import org.nd4j.common.primitives.Pair; import org.nd4j.shade.jackson.core.JsonParser; import org.nd4j.shade.jackson.core.JsonProcessingException; import org.nd4j.shade.jackson.databind.DeserializationContext; import org.nd4j.shade.jackson.databind.JsonDeserializer; import org.nd4j.shade.jackson.databind.JsonNode; import org.nd4j.shade.jackson.databind.node.ArrayNode; import org.nd4j.shade.jackson.databind.node.TextNode; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Base64; import java.util.Iterator; import java.util.List; /** * Custom JSON deserializer for Data instances<br> * See {@link DataJsonSerializer} for further details * * @author Alex Black */ public class DataJsonDeserializer extends JsonDeserializer<Data> { @Override public Data deserialize(JsonParser jp, DeserializationContext dc) throws IOException, JsonProcessingException { JsonNode n = jp.getCodec().readTree(jp); return deserialize(jp, n); } public Data deserialize(JsonParser jp, JsonNode n) { JData d = new JData(); Iterator<String> names = n.fieldNames(); while (names.hasNext()) { String s = names.next(); JsonNode n2 = n.get(s); if (Data.RESERVED_KEY_METADATA.equalsIgnoreCase(s)) { Data meta = deserialize(jp, n2); d.setMetaData(meta); } else { if (n2.isTextual()) { //String String str = n2.textValue(); d.put(s, str); } else if (n2.isDouble()) { //Double double dVal = n2.doubleValue(); d.put(s, dVal); } else if (n2.isInt() || n2.isLong()) { //Long long lVal = n2.longValue(); d.put(s, lVal); } else if (n2.isBoolean()) { //Boolean boolean b = n2.booleanValue(); d.put(s, b); } else if (n2.isArray()){ Pair<List<Object>, ValueType> p = deserializeList(jp, n2); d.putList(s, p.getFirst(), p.getSecond()); } else if (n2.isObject()) { //Could be: Bytes, image, NDArray,ByteBuffer BoundingBox, Point or Data if (n2.has(Data.RESERVED_KEY_BYTES_BASE64) || n2.has(Data.RESERVED_KEY_BYTES_ARRAY)) { //byte[] stored in base64 or byte[] as JSON array byte[] bytes = deserializeBytes(n2); d.put(s, bytes); } else if (n2.has(Data.RESERVED_KEY_BYTEBUFFER) || n2.has(Data.RESERVED_KEY_BYTEBUFFER_BASE64)) { //byte[] stored in base64 or byte[] as JSON array byte[] bytes = deserializeBytes(n2); ByteBuffer byteBuffer = ByteBuffer.wrap(bytes); d.put(s, byteBuffer); } else if (n2.has(Data.RESERVED_KEY_NDARRAY_TYPE)) { //NDArray d.put(s, deserializeNDArray(n2)); } else if (n2.has(Data.RESERVED_KEY_IMAGE_DATA)) { //Image d.put(s, deserializeImage(n2)); } else if(n2.has(Data.RESERVED_KEY_BB_CY) || n2.has(Data.RESERVED_KEY_BB_X1)){ d.put(s, deserializeBB(n2)); } else if(n2.has(Data.RESERVED_KEY_POINT_COORDS)){ d.put(s, deserializePoint(n2)); } else { //Must be data Data dInner = deserialize(jp, n2); d.put(s, dInner); } } else { throw new UnsupportedOperationException("Type not yet implemented"); } } } return d; } protected NDArray deserializeNDArray(JsonNode n){ NDArrayType type = NDArrayType.valueOf(n.get(Data.RESERVED_KEY_NDARRAY_TYPE).textValue()); ArrayNode shapeNode = (ArrayNode) n.get(Data.RESERVED_KEY_NDARRAY_SHAPE); long[] shape = new long[shapeNode.size()]; for (int i = 0; i < shape.length; i++) shape[i] = shapeNode.get(i).asLong(); String base64 = n.get(Data.RESERVED_KEY_NDARRAY_DATA_BASE64).textValue(); byte[] bytes = Base64.getDecoder().decode(base64); ByteBuffer bb = ByteBuffer.wrap(bytes); SerializedNDArray ndArray = new SerializedNDArray(type, shape, bb); return NDArray.create(ndArray); } protected Image deserializeImage(JsonNode n2){ String format = n2.get(Data.RESERVED_KEY_IMAGE_FORMAT).textValue(); if(!"PNG".equalsIgnoreCase(format)){ throw new UnsupportedOperationException("Deserialization of formats other than PNG not yet implemented"); } String base64Data = n2.get(Data.RESERVED_KEY_IMAGE_DATA).textValue(); byte[] bytes = Base64.getDecoder().decode(base64Data); Png png = new Png(bytes); return Image.create(png); } public static BoundingBox deserializeBB(JsonNode n2){ String label = null; Double prob = null; if(n2.has("label") ){ label = n2.get("label").textValue(); } else if(n2.has("@label")){ label = n2.get("@label").textValue(); } if(n2.has("probability")){ prob = n2.get("probability").doubleValue(); } else if(n2.has("@probability")){ prob = n2.get("@probability").doubleValue(); } if(n2.has(Data.RESERVED_KEY_BB_CX)){ double cx = n2.get(Data.RESERVED_KEY_BB_CX).doubleValue(); double cy = n2.get(Data.RESERVED_KEY_BB_CY).doubleValue(); double h = n2.get(Data.RESERVED_KEY_BB_H).doubleValue(); double w = n2.get(Data.RESERVED_KEY_BB_W).doubleValue(); return BoundingBox.create(cx, cy, h, w, label, prob); } else { double x1 = n2.get(Data.RESERVED_KEY_BB_X1).doubleValue(); double x2 = n2.get(Data.RESERVED_KEY_BB_X2).doubleValue(); double y1 = n2.get(Data.RESERVED_KEY_BB_Y1).doubleValue(); double y2 = n2.get(Data.RESERVED_KEY_BB_Y2).doubleValue(); return BoundingBox.createXY(x1, x2, y1, y2, label, prob); } } protected Point deserializePoint(JsonNode n2){ String label = null; Double prob = null; if(n2.has("label") ){ label = n2.get("label").textValue(); } else if(n2.has("@label")){ label = n2.get("@label").textValue(); } if(n2.has("probability")){ prob = n2.get("probability").doubleValue(); } else if(n2.has("@probability")){ prob = n2.get("@probability").doubleValue(); } ArrayNode n3 = (ArrayNode) n2.get(Data.RESERVED_KEY_POINT_COORDS); double[] coords = new double[n3.size()]; for (int i = 0; i < n3.size(); i++) { coords[i] = n3.get(i).asDouble(); } return Point.create(coords, label, prob); } @SneakyThrows protected byte[] deserializeBytes(JsonNode n2) { if (n2.has(Data.RESERVED_KEY_BYTES_BASE64)) { //byte[] stored in base64 JsonNode n3 = n2.get(Data.RESERVED_KEY_BYTES_BASE64); String base64Str = n3.textValue(); byte[] bytes = Base64.getDecoder().decode(base64Str); return bytes; } else if (n2.has(Data.RESERVED_KEY_BYTES_ARRAY)) { //byte[] as JSON array ArrayNode n3 = (ArrayNode) n2.get(Data.RESERVED_KEY_BYTES_ARRAY); int size = n3.size(); byte[] b = new byte[size]; for (int i = 0; i < size; i++) { int bVal = n3.get(i).asInt(); if (bVal < Byte.MIN_VALUE || bVal > Byte.MAX_VALUE) { throw new IllegalStateException("Unable to deserialize Data from JSON: JSON contains byte[] with value outside" + " of valid range [-128, 127] - value: " + bVal + " at index " + i); } b[i] = (byte) bVal; } return b; } else if(n2.has(Data.RESERVED_KEY_BYTEBUFFER_BASE64)) { TextNode n3 = (TextNode) n2.get(Data.RESERVED_KEY_BYTEBUFFER_BASE64); byte[] bytes = n3.binaryValue(); return bytes; } else { throw new UnsupportedOperationException("JSON node is not a bytes node"); } } protected Pair<List<Object>, ValueType> deserializeList(JsonParser jp, JsonNode n){ ArrayNode an = (ArrayNode)n; int size = an.size(); //TODO PROBLEM: empty list type is ambiguous! Preconditions.checkState(size > 0, "Unable to deserialize empty lists (not yet implemented)"); JsonNode n3 = n.get(0); ValueType listType = nodeType(n3); List<Object> list = new ArrayList<>(); switch (listType){ case BYTEBUFFER: for( int i = 0; i < size; i++) { list.add(ByteBuffer.wrap(deserializeBytes(n.get(i)))); } break; case NDARRAY: for( int i = 0; i < size; i++) { list.add(deserializeNDArray(n.get(i))); } break; case STRING: for( int i = 0; i < size; i++) { list.add(n.get(i).textValue()); } break; case BYTES: for( int i = 0; i<size; i++) { list.add(deserializeBytes(n.get(i))); } break; case IMAGE: for( int i = 0; i < size; i++) { list.add(deserializeImage(n.get(i))); } break; case DOUBLE: for( int i=0; i<size; i++ ){ list.add(n.get(i).doubleValue()); } break; case INT64: for( int i=0; i<size; i++ ){ list.add(n.get(i).longValue()); } break; case BOOLEAN: for( int i=0; i<size; i++ ){ list.add(n.get(i).booleanValue()); } break; case DATA: for( int i=0; i<size; i++ ){ list.add(deserialize(jp, n.get(i))); } break; case LIST: for( int i=0; i<size; i++ ){ list.add(deserializeList(jp, n.get(i))); } break; case BOUNDING_BOX: for( int i=0; i<size; i++ ){ list.add(deserializeBB(n.get(i))); } break; case POINT: for( int i=0; i<size; i++ ){ list.add(deserializePoint(n.get(i))); } break; default: throw new IllegalStateException("Unable to deserialize list with values of type: " + listType); } return new Pair<>(list, listType); } protected ValueType nodeType(JsonNode n){ if (n.isTextual()) { //String return ValueType.STRING; } else if (n.isDouble()) { //Double return ValueType.DOUBLE; } else if (n.isInt() || n.isLong()) { //Long return ValueType.INT64; } else if (n.isBoolean()) { //Boolean return ValueType.BOOLEAN; } else if (n.isArray()){ return ValueType.LIST; } else if (n.isObject()) { //Could be: Bytes, image, NDArray, BoundingBox, Point or Data if (n.has(Data.RESERVED_KEY_BYTES_BASE64)) { return ValueType.BYTES; } else if (n.has(Data.RESERVED_KEY_BYTES_ARRAY)) { return ValueType.BYTES; } else if (n.has(Data.RESERVED_KEY_NDARRAY_TYPE)) { //NDArray return ValueType.NDARRAY; } else if (n.has(Data.RESERVED_KEY_IMAGE_DATA)) { //Image return ValueType.IMAGE; } else if(n.has(Data.RESERVED_KEY_BB_CX) || n.has(Data.RESERVED_KEY_BB_X1)){ return ValueType.BOUNDING_BOX; } else if(n.has(Data.RESERVED_KEY_POINT_COORDS)){ return ValueType.POINT; } else if(n.has(Data.RESERVED_KEY_BYTEBUFFER_BASE64)) { return ValueType.BYTEBUFFER; } else { //Must be data return ValueType.DATA; } } else { throw new UnsupportedOperationException("Type not yet implemented"); } } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/serde/DataJsonSerializer.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.serde; import ai.konduit.serving.pipeline.api.data.*; import ai.konduit.serving.pipeline.impl.data.box.BBoxCHW; import ai.konduit.serving.pipeline.impl.data.image.Png; import ai.konduit.serving.pipeline.impl.data.ndarray.SerializedNDArray; import ai.konduit.serving.pipeline.impl.format.JavaNDArrayFormats; import org.nd4j.shade.jackson.core.JsonGenerator; import org.nd4j.shade.jackson.databind.JsonSerializer; import org.nd4j.shade.jackson.databind.ObjectMapper; import org.nd4j.shade.jackson.databind.SerializerProvider; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Base64; import java.util.List; /** * Custom JSON serialization for Data instances. * <p> * This is used for ALL Data implementations - and encodes the canonical JSON format that Konduit Serving will use everywhere * <p> * Note that the JSON (and YAML) format is considered part of the public API, hence for optimal usability we are using * a custom JSON serializer to precisely control the format.<br> * Other JSON options exist (using standard Jackson serializers/deserializers, or Protobuf's JSON format) but a manual approach * provides full control over the exact format * * @author Alex Black */ public class DataJsonSerializer extends JsonSerializer<Data> { @Override public void serialize(Data data, JsonGenerator jg, SerializerProvider sp) throws IOException { //TODO do we serialize in any particular order? jg.writeStartObject(); List<String> l = data.keys(); for (String s : l) { ValueType vt = data.type(s); jg.writeFieldName(s); switch (vt) { case NDARRAY: NDArray n = data.getNDArray(s); writeNDArray(jg, n); break; case STRING: String str = data.getString(s); jg.writeString(str); break; case BYTES: writeBytes(jg, data.getBytes(s)); break; case IMAGE: writeImage(jg, data.getImage(s)); break; case DOUBLE: writeDouble(jg, data.getDouble(s)); break; case INT64: writeLong(jg, data.getLong(s)); break; case BOOLEAN: boolean b = data.getBoolean(s); jg.writeBoolean(b); break; case DATA: Data d = data.getData(s); writeNestedData(jg, d); break; case LIST: /* Format: "myList" : ["x", "y", "z"] */ ValueType listVt = data.listType(s); List<?> list = data.getList(s, listVt); writeList(jg, list, listVt); break; case BOUNDING_BOX: BoundingBox bb = data.getBoundingBox(s); writeBB(jg, bb); break; case POINT: Point p = data.getPoint(s); writePoint(jg, p); break; case BYTEBUFFER: ByteBuffer byteBuffer = data.getByteBuffer(s); writeBytes(jg,byteBuffer); break; default: throw new IllegalStateException("Value type not yet supported/implemented: " + vt); } } if (data.getMetaData() != null) { Data md = data.getMetaData(); jg.writeFieldName(Data.RESERVED_KEY_METADATA); writeNestedData(jg, md); } jg.writeEndObject(); } private void writeNestedData(JsonGenerator jg, Data data) throws IOException { ObjectMapper om = (ObjectMapper) jg.getCodec(); String dataStr = om.writeValueAsString(data); jg.writeRawValue(dataStr); } private void writeBytes(JsonGenerator jg, ByteBuffer bytes) throws IOException { //TODO add option to do raw bytes array - [0, 1, 2, ...] style jg.writeStartObject(); jg.writeFieldName(Data.RESERVED_KEY_BYTEBUFFER_BASE64); if(bytes.hasArray()) { String base64 = Base64.getEncoder().encodeToString(bytes.array()); jg.writeString(base64); jg.writeEndObject(); } else { byte[] bytesArr = new byte[bytes.capacity()]; bytes.get(bytesArr.length); String base64 = Base64.getEncoder().encodeToString(bytesArr); jg.writeString(base64); jg.writeEndObject(); } } private void writeBytes(JsonGenerator jg, byte[] bytes) throws IOException { //TODO add option to do raw bytes array - [0, 1, 2, ...] style jg.writeStartObject(); jg.writeFieldName(Data.RESERVED_KEY_BYTES_BASE64); String base64 = Base64.getEncoder().encodeToString(bytes); jg.writeString(base64); jg.writeEndObject(); } private void writeDouble(JsonGenerator jg, double d) throws IOException { jg.writeNumber(d); } private void writeLong(JsonGenerator jg, long l) throws IOException { jg.writeNumber(l); } private void writeImage(JsonGenerator jg, Image i) throws IOException { Png png = i.getAs(Png.class); byte[] imgData = png.getBytes(); jg.writeStartObject(); jg.writeFieldName(Data.RESERVED_KEY_IMAGE_FORMAT); jg.writeString("PNG"); //TODO No magic constant jg.writeFieldName(Data.RESERVED_KEY_IMAGE_DATA); String base64 = Base64.getEncoder().encodeToString(imgData); jg.writeString(base64); jg.writeEndObject(); } private void writeNDArray(JsonGenerator jg, NDArray n) throws IOException { jg.writeStartObject(); SerializedNDArray sn = n.getAs(SerializedNDArray.class); NDArrayType type = sn.getType(); long[] shape = sn.getShape(); jg.writeFieldName(Data.RESERVED_KEY_NDARRAY_TYPE); jg.writeString(type.toString()); jg.writeFieldName(Data.RESERVED_KEY_NDARRAY_SHAPE); jg.writeArray(shape, 0, shape.length); ByteBuffer bb = sn.getBuffer(); bb.rewind(); byte[] array; if (bb.hasArray()) { array = bb.array(); } else { int size = bb.remaining(); array = new byte[size]; for (int i = 0; i < size; i++) { array[i] = bb.get(i); } } String base64 = Base64.getEncoder().encodeToString(array); jg.writeFieldName(Data.RESERVED_KEY_NDARRAY_DATA_ARRAY); jg.writeString(base64); jg.writeEndObject(); } public static void writeBB(JsonGenerator jg, BoundingBox bb) throws IOException { //We'll keep it in the original format, if possible - but encode it as a X/Y format otherwise jg.writeStartObject(); if(bb instanceof BBoxCHW){ BBoxCHW b = (BBoxCHW)bb; jg.writeFieldName(Data.RESERVED_KEY_BB_CX); jg.writeNumber(b.cx()); jg.writeFieldName(Data.RESERVED_KEY_BB_CY); jg.writeNumber(b.cy()); jg.writeFieldName(Data.RESERVED_KEY_BB_H); jg.writeNumber(b.h()); jg.writeFieldName(Data.RESERVED_KEY_BB_W); jg.writeNumber(b.w()); } else { jg.writeFieldName(Data.RESERVED_KEY_BB_X1); jg.writeNumber(bb.x1()); jg.writeFieldName(Data.RESERVED_KEY_BB_X2); jg.writeNumber(bb.x2()); jg.writeFieldName(Data.RESERVED_KEY_BB_Y1); jg.writeNumber(bb.y1()); jg.writeFieldName(Data.RESERVED_KEY_BB_Y2); jg.writeNumber(bb.y2()); } if(bb.label() != null){ jg.writeFieldName("label"); jg.writeString(bb.label()); } if(bb.probability() != null){ jg.writeFieldName("probability"); jg.writeNumber(bb.probability()); } jg.writeEndObject(); } private void writePoint(JsonGenerator jg, Point p) throws IOException { jg.writeStartObject(); jg.writeFieldName(Data.RESERVED_KEY_POINT_COORDS); jg.writeStartArray(p.dimensions()); for (int i = 0; i < p.dimensions(); i++) { writeDouble(jg, p.get(i)); } jg.writeEndArray(); if(p.label() != null){ jg.writeFieldName("label"); jg.writeString(p.label()); } if(p.probability() != null){ jg.writeFieldName("probability"); jg.writeNumber(p.probability()); } jg.writeEndObject(); } private void writeList(JsonGenerator jg, List<?> list, ValueType listType) throws IOException { int n = list.size(); jg.writeStartArray(n); switch (listType) { case NDARRAY: for(NDArray arr : (List<NDArray>) list){ writeNDArray(jg, arr); } break; case STRING: for (String s : (List<String>) list) { //TODO avoid unsafe cast? jg.writeString(s); } break; case BYTES: for (byte[] bytes : (List<byte[]>) list) { writeBytes(jg, bytes); } break; case BYTEBUFFER: for(ByteBuffer b: (List<ByteBuffer>) list) { writeBytes(jg,b); } break; case IMAGE: for (Image img : (List<Image>) list) { writeImage(jg, img); } break; case DOUBLE: List<Double> dList = (List<Double>) list; //TODO checks for unsafe cast? for(Double d : dList){ writeDouble(jg, d); } break; case INT64: List<Long> lList = (List<Long>) list; for(Long l : lList){ writeLong(jg, l); } break; case BOOLEAN: List<Boolean> bList = (List<Boolean>) list; for (Boolean b : bList) { jg.writeBoolean(b); } break; case DATA: List<Data> dataList = (List<Data>) list; for (Data d : dataList) { writeNestedData(jg, d); } break; case BOUNDING_BOX: List<BoundingBox> bbList = (List<BoundingBox>)list; for(BoundingBox bb : bbList){ writeBB(jg, bb); } break; case POINT: List<Point> pList = (List<Point>)list; for(Point p : pList){ writePoint(jg, p); } break; case LIST: //List of lists... throw new IllegalStateException("Not yet implemented: Nested lists JSON serialization"); // List<List<?>> listList = (List<List<?>>)list; // jg.writeStartArray(listList.size()); // for(List<?> l : listList){ // ValueType vt = null; //TODO design problem... //// writeList(); // } default: throw new IllegalStateException("Not yet implemented: list type serialization for values " + listType); } jg.writeEndArray(); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/serde/PointDeserializer.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.serde; import ai.konduit.serving.pipeline.api.data.Point; import ai.konduit.serving.pipeline.impl.data.point.NDPoint; import org.nd4j.shade.jackson.core.JsonParser; import org.nd4j.shade.jackson.core.JsonProcessingException; import org.nd4j.shade.jackson.databind.DeserializationContext; import org.nd4j.shade.jackson.databind.JsonDeserializer; import org.nd4j.shade.jackson.databind.JsonNode; import org.nd4j.shade.jackson.databind.node.ArrayNode; import java.io.IOException; public class PointDeserializer extends JsonDeserializer<Point> { @Override public Point deserialize(JsonParser jp, DeserializationContext dc) throws IOException, JsonProcessingException { JsonNode n = jp.getCodec().readTree(jp); String lbl = n.has("label") ? n.get("label").textValue() : null; Double prob = n.has("probability") ? n.get("probability").doubleValue() : null; ArrayNode cn = (ArrayNode)n.get("coords"); double[] pts = new double[cn.size()]; for( int i=0; i<pts.length; i++ ){ pts[i] = cn.get(i).doubleValue(); } return new NDPoint(pts, lbl, prob); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/bbox
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/bbox/filter/BoundingBoxFilterStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.step.bbox.filter; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import lombok.experimental.Tolerate; import java.util.Arrays; import java.util.List; @Data @Accessors(fluent = true) @AllArgsConstructor @NoArgsConstructor @JsonName("BOUNDING_BOX_FILTER") public class BoundingBoxFilterStep implements PipelineStep { public static final String DEFAULT_OUTPUT_NAME = "bounding_boxes"; @Schema(description = "If true, other data key and values from the previous step are kept and passed on to the next step as well.", defaultValue = "true") private boolean keepOtherValues = true; @Schema(description = "A list of class labels for which bounding boxes will be drawn") protected List<String> classesToKeep; @Schema(description = "Input name where the all bounding box are be contained in", defaultValue = "input") protected String inputName = "input"; @Schema(description = "Output key name where the bounding box will be contained in.", defaultValue = DEFAULT_OUTPUT_NAME) protected String outputName = DEFAULT_OUTPUT_NAME; @Tolerate public BoundingBoxFilterStep classesToKeep(String... classesToKeep) { return this.classesToKeep(Arrays.asList(classesToKeep)); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/bbox
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/bbox/filter/BoundingBoxFilterStepRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.step.bbox.filter; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.BoundingBox; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.ValueType; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.util.DataUtils; import org.nd4j.shade.guava.base.Preconditions; import java.util.List; import java.util.stream.Collectors; public class BoundingBoxFilterStepRunner implements PipelineStepRunner { protected final BoundingBoxFilterStep step; public BoundingBoxFilterStepRunner(BoundingBoxFilterStep step) { this.step = step; Preconditions.checkArgument(!this.step.classesToKeep.isEmpty(),"Seems you forget to set the classes to keep."); }; @Override public void close() { } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { String key = "detection_boxes"; //TODO String prob = "detection_scores"; String labels = "detection_classes"; String inputName = step.inputName(); if(inputName == null){ String err = "No input name was set in the BoundingBoxFilterStep configuration and input name could not be guessed based on type"; DataUtils.inferField(data, ValueType.BOUNDING_BOX, true, err + " (multiple keys)", err + " (no List<BoundingBox> values)"); } List<String> classesToKeep = step.classesToKeep; List<BoundingBox> boundingBoxes = data .getListBoundingBox(inputName) .stream() .filter(i -> classesToKeep.contains(i.label())) .collect(Collectors.toList()); String outName = step.outputName(); if (outName == null) outName = BoundingBoxFilterStep.DEFAULT_OUTPUT_NAME; Data d = Data.singletonList(outName, boundingBoxes, ValueType.BOUNDING_BOX); if (step.keepOtherValues()) { for (String s : data.keys()) { if (!key.equals(s) && !prob.equals(s) &&!labels.equals(s) && !inputName.equals(s)) { d.copyFrom(s, data); } } } return d; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/bbox
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/bbox/filter/BoundingBoxFilterStepRunnerFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.step.bbox.filter; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class BoundingBoxFilterStepRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep step) { return step instanceof BoundingBoxFilterStep; } @Override public PipelineStepRunner create(PipelineStep step) { Preconditions.checkState(canRun(step), "Unable to run step: %s", step); return new BoundingBoxFilterStepRunner((BoundingBoxFilterStep) step); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/bbox
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/bbox/point/BoundingBoxToPointStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.step.bbox.point; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; /** * BoundingBoxToPointStep: Given one or more bounding boxes, create a point representation of them.<br> * You can choose from the following methods of converting the bounding box to a point: TOP_LEFT, TOP_RIGHT, BOTTOM_LEFT, * BOTTOM_RIGHT, CENTER.<br> * Set to CENTER by default.<br> * Note: supports both {@code BoundingBox} and {@code List<BoundingBox>} fields. If the input is as single value, * the output will be a single value; if the input is a list, the output will be a list.<br> */ @Data @Accessors(fluent = true) @AllArgsConstructor @NoArgsConstructor @JsonName("BOUNDING_BOX_TO_POINT") public class BoundingBoxToPointStep implements PipelineStep { public enum ConversionMethod {TOP_LEFT, TOP_RIGHT, BOTTOM_LEFT, BOTTOM_RIGHT, CENTER} @Schema(description = "Name of the bounding boxes key from the previous step. If set to null, it will try to find any bounding box in the incoming data instance.") private String bboxName; @Schema(description = "Name of the point extracted from the input bounding box. If null, the input field name is used.") private String outputName; @Schema(description = "If true, other data key and values from the previous step are kept and passed on to the next step as well.", defaultValue = "true") private boolean keepOtherFields = true; @Schema(description = "You can choose from the following methods of converting the bounding box to a point: TOP_LEFT, TOP_RIGHT, BOTTOM_LEFT,\n" + "BOTTOM_RIGHT, CENTER. Set to CENTER by default", defaultValue = "CENTER") private ConversionMethod method = ConversionMethod.CENTER; }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/bbox
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/bbox/point/BoundingBoxToPointStepRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.step.bbox.point; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.BoundingBox; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.Point; import ai.konduit.serving.pipeline.api.data.ValueType; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.util.DataUtils; import lombok.NonNull; import java.util.ArrayList; import java.util.Collections; import java.util.List; @CanRun(BoundingBoxToPointStep.class) public class BoundingBoxToPointStepRunner implements PipelineStepRunner { protected final BoundingBoxToPointStep step; public BoundingBoxToPointStepRunner(@NonNull BoundingBoxToPointStep step){ this.step = step; } @Override public void close() { //No op } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { String bboxName = step.bboxName(); if(bboxName == null){ String errMultipleKeys = "Bounding box field name was not provided and could not be inferred: multiple BoundingBox (or List<BoundingBox>) fields exist: %s and %s"; String errNoKeys = "Bounding box field name was not provided and could not be inferred: no BoundingBox (or List<BoundingBox>) fields exist"; bboxName = DataUtils.inferField(data, ValueType.BOUNDING_BOX, true, errMultipleKeys, errNoKeys); } ValueType vt = data.type(bboxName); List<BoundingBox> list; boolean singleValue; if(vt == ValueType.BOUNDING_BOX){ list = Collections.singletonList(data.getBoundingBox(bboxName)); singleValue = true; } else if(vt == ValueType.LIST){ if(data.listType(bboxName) == ValueType.BOUNDING_BOX) { list = data.getListBoundingBox(bboxName); } else { throw new IllegalStateException("Data[" + bboxName + "] is List<" + data.listType(bboxName) + "> not List<BoundingBox>"); } singleValue = false; } else { throw new IllegalStateException("Data[" + bboxName + "] is neither a BoundingBox or List<BoundingBox> - is " + vt); } List<Point> out = new ArrayList<>(); for(BoundingBox bb : list) { switch (step.method()){ case TOP_LEFT: out.add(Point.create(bb.x1(), bb.y1(), bb.label(), bb.probability())); break; case TOP_RIGHT: out.add(Point.create(bb.x2(), bb.y1(), bb.label(), bb.probability())); break; case BOTTOM_LEFT: out.add(Point.create(bb.x1(), bb.y2(), bb.label(), bb.probability())); break; case BOTTOM_RIGHT: out.add(Point.create(bb.x2(), bb.y2(), bb.label(), bb.probability())); break; case CENTER: out.add(Point.create(bb.cx(), bb.cy(), bb.label(), bb.probability())); break; } } String outName = step.outputName() == null ? bboxName : step.outputName(); Data d; if(singleValue){ d = Data.singleton(outName, out.get(0)); } else { d = Data.singletonList(outName, out, ValueType.POINT); } if(step.keepOtherFields()){ for(String s : data.keys()){ if(!bboxName.equals(s)){ d.copyFrom(s, data); } } } return d; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/bbox
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/bbox/point/BoundingBoxToPointStepRunnerFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.step.bbox.point; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class BoundingBoxToPointStepRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep pipelineStep) { return pipelineStep instanceof BoundingBoxToPointStep; } @Override public PipelineStepRunner create(PipelineStep pipelineStep) { Preconditions.checkState(canRun(pipelineStep), "Unable to run step: %s", pipelineStep); return new BoundingBoxToPointStepRunner((BoundingBoxToPointStep) pipelineStep); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/bbox
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/bbox/yolo/YoloToBoundingBoxRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.step.bbox.yolo; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.BoundingBox; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.NDArray; import ai.konduit.serving.pipeline.api.data.ValueType; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.impl.data.ndarray.SerializedNDArray; import ai.konduit.serving.pipeline.util.DataUtils; import ai.konduit.serving.pipeline.util.NDArrayUtils; import lombok.AllArgsConstructor; import org.nd4j.common.base.Preconditions; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; @CanRun(YoloToBoundingBoxStep.class) @AllArgsConstructor public class YoloToBoundingBoxRunner implements PipelineStepRunner { private final YoloToBoundingBoxStep step; @Override public void close() { } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { String input = step.input(); if(input == null){ String errMultipleKeys = "YoloToBoundingBoxStep input array field \"input\" was not provided in the config and " + "could not be inferred: multiple NDArray fields exist: %s and %s"; String errNoKeys = "YoloToBoundingBoxStep input array field \"input\" was not provided and could not be inferred: no NDArray fields exist"; input = DataUtils.inferField(data, ValueType.NDARRAY, false, errMultipleKeys, errNoKeys); } else { Preconditions.checkState(data.has(input), "YoloToBoundingBoxStep: Data does not have an input field with name \"%s\"", input); Preconditions.checkState(data.type(input) == ValueType.NDARRAY, "YoloToBoundingBoxStep: Data input field \"%s\" has type %s but expected NDARRAY type", input, data.type(input)); } NDArray arr = data.getNDArray(input); Preconditions.checkState(arr.rank() == 4, "YoloToBoundingBoxStep: Data field \"%s\" is NDArray but must be rank 4. Got array with rank %s,shape %s", arr.rank(), arr.shape()); Preconditions.checkState(step.numClasses() != null || step.classLabels() != null, "YoloToBoundingBoxStep: either numClasses" + " field or classLabels field must be set"); int numClasses = step.numClasses() != null ? step.numClasses() : step.classLabels().size(); Preconditions.checkState(numClasses > 0, "YoloToBoundingboxStep: Number of classes must be > 0"); if(!step.nchw()){ arr = NDArray.create(NDArrayUtils.nhwcToNchw(arr.getAs(float[][][][].class))); } //Activation have format: [mb, B*(5+C), H, W] long n = arr.size(0); long b5c = arr.size(1); long h = arr.size(2); long w = arr.size(3); int b = (int) (b5c / (numClasses + 5)); int c = (int) (b5c / b - 5); //Reshape to [mb, B, 5+C, H, W] SerializedNDArray sa = arr.getAs(SerializedNDArray.class); ByteBuffer bb = sa.getBuffer(); SerializedNDArray sa5 = new SerializedNDArray(sa.getType(), new long[]{n, b, 5 + c, h, w}, bb); float[][][][][] f5 = NDArray.create(sa5).getAs(float[][][][][].class); List<String> classLabels = step.classLabels(); List<BoundingBox> out = new ArrayList<>(); for( int i = 0; i < n; i++ ) { for (int x = 0; x < w; x++) { for (int y = 0; y < h; y++) { for (int box = 0; box < b; box++) { float conf = f5[i][box][4][y][x]; if(conf < step.threshold()) continue; float px = f5[i][box][0][y][x]; //Originally: in 0 to 1 in grid cell float py = f5[i][box][1][y][x]; //Originally: in 0 to 1 in grid cell float pw = f5[i][box][2][y][x]; //In grid units (for example, 0 to 13) float ph = f5[i][box][3][y][x]; //In grid units (for example, 0 to 13) //Convert the "position in grid cell" to "position in image (in grid cell units)" px += x; py += y; //Probabilities float prob = 0.0f; int pIdx = 0; for( int cl=0; cl < c; cl++) { float f = f5[i][box][5+cl][y][x]; if(f > prob){ prob = f; pIdx = cl; } } String lbl; if(classLabels == null || pIdx >= classLabels.size()){ lbl = String.valueOf(pIdx); } else { lbl = classLabels.get(pIdx); } out.add(BoundingBox.create(px/w, py/h, ph/h, pw/w, lbl, (double)prob)); } } } } Data dOut; if(step.keepOtherValues()){ dOut = data.clone(); } else { dOut = Data.empty(); } String outName = step.output() == null ? YoloToBoundingBoxStep.DEFAULT_OUTPUT_NAME : step.output(); dOut.putListBoundingBox(outName, out); return dOut; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/bbox
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/bbox/yolo/YoloToBoundingBoxStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.step.bbox.yolo; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.Data; import lombok.experimental.Accessors; import lombok.experimental.Tolerate; import java.util.Arrays; import java.util.List; /** * Convert an NDArray for the predictions of a YOLO model to {@code List<BoundingBox>}.<br> * The NDArray is assumed to be in "standard" YOLO output format, after activations (sigmoid/softmax) have been applied.<br> * Input must be a float/double NDArray with shape [minibatch, B*(5+C), H, W] (if nchw=true) or [minibatch, H, W, B*(5+C)] (if nchw=false)<br> * B = number of bounding box priors<br> * C = number of classes<br> * H = output/label height<br> * W = output/label width<br> * Along the channel dimension (for each box prior), we have the following values: * 0: px = predicted x location within grid cell, 0.0 to 1.0<br> * 1: py = predicted y location within grid cell, 0.0 to 1.0<br> * 2: pw = predicted width, in grid cell, for example 0.0 to H (for example, pw = 2.0 -> 2.0/W fraction of image)<br> * 3: ph = predicted height, in grid cell, for example 0.0 to H (for example, ph = 2.0 -> 2.0/H fraction of image)<br> * 4: c = object confidence - i.e., probability an object is present or not, 0.0 to 1.0<br> * 5 to 4+C = probability of class (given an object is present), 0.0 to 1.0, with values summing to 1.0<br> * <br> * Note that the height/width dimensions are grid cell units - for example, with 416x416 input, 32x downsampling by the network * we have 13x13 grid cells (each corresponding to 32 pixels in the input image). Thus, a centerX of 5.5 would be xPixels=5.5x32 * = 176 pixels from left.<br> * Widths and heights are similar: in this example, a with of 13 would be the entire image (416 pixels), and a height of * 6.5 would be 6.5/13 = 0.5 of the image (208 pixels). * */ @Schema(description = "Convert an NDArray for the predictions of a YOLO model to {@code List<BoundingBox>}.<br>" + "The NDArray is assumed to be in \"standard\" YOLO output format, after activations (sigmoid/softmax) have been applied.<br>" + "Input must be a float/double NDArray with shape [minibatch, B*(5+C), H, W] (if nchw=true) or [minibatch, H, W, B*(5+C)] (if nchw=false)<br> " + "B = number of bounding box priors<br>" + "C = number of classes<br>" + "H = output/label height<br>" + "W = output/label width<br>" + "Along the channel dimension (for each box prior), we have the following values:" + "0: px = predicted x location within grid cell, 0.0 to 1.0<br>" + "1: py = predicted y location within grid cell, 0.0 to 1.0<br>" + "2: pw = predicted width, in grid cell, for example 0.0 to H (for example, pw = 2.0 -> 2.0/W fraction of image)<br>" + "3: ph = predicted height, in grid cell, for example 0.0 to H (for example, ph = 2.0 -> 2.0/H fraction of image)<br>" + "4: c = object confidence - i.e., probability an object is present or not, 0.0 to 1.0<br>" + "5 to 4+C = probability of class (given an object is present), 0.0 to 1.0, with values summing to 1.0<br>" + "<br>" + "Note that the height/width dimensions are grid cell units - for example, with 416x416 input, 32x downsampling by the network" + "we have 13x13 grid cells (each corresponding to 32 pixels in the input image). Thus, a centerX of 5.5 would be xPixels=5.5x32" + "= 176 pixels from left.<br>" + "Widths and heights are similar: in this example, a with of 13 would be the entire image (416 pixels), and a height of" + "6.5 would be 6.5/13 = 0.5 of the image (208 pixels).") @Data @Accessors(fluent = true) @JsonName("YOLO_BBOX") public class YoloToBoundingBoxStep implements PipelineStep { public static final String DEFAULT_OUTPUT_NAME = "bounding_boxes"; @Schema(description = "Name of the input - optional. If not set, the input is inferred (assuming a single NDArray exists in the input)") protected String input; @Schema(description = "Name of the input - optional. If not set, \"bounding_boxes\" is used") protected String output; @Schema(description = "The data format - NCHW (true) or NHWC (false) aka 'channels first' (true) or 'channels last' (false)") protected boolean nchw = true; @Schema(description = "The threshold, in range 0.0 to 1.0. Any boxes with object confidence less than this will be ignored") protected double threshold = 0.5; @Schema(description = "Non-max suppression threshold to use, to filter closely overlapping objects") protected double nmsThreshold = 0.5; @Schema(description = "Number of classes. Not required if classLabels are provided") protected Integer numClasses; @Schema(description = "Optional - names of the object classes") protected List<String> classLabels; @Schema(description = "If true: keep all other input fields in the Data instance. False: only return the List<BoundingBox>") protected boolean keepOtherValues = true; @Tolerate public YoloToBoundingBoxStep classLabels(String... labels){ this.classLabels = Arrays.asList(labels); return this; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/bbox
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/bbox/yolo/YoloToBoundingBoxStepFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.step.bbox.yolo; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class YoloToBoundingBoxStepFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep step) { return step instanceof YoloToBoundingBoxStep; } @Override public PipelineStepRunner create(PipelineStep step) { Preconditions.checkState(canRun(step), "Unable to run step of type: %s", step.getClass()); return new YoloToBoundingBoxRunner((YoloToBoundingBoxStep) step); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/logging/LoggingPipelineStepRunnerFactory.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.pipeline.impl.step.logging; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import lombok.NonNull; import org.nd4j.common.base.Preconditions; public class LoggingPipelineStepRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep pipelineStep) { return pipelineStep.getClass() == LoggingStep.class; } @Override public PipelineStepRunner create(@NonNull PipelineStep pipelineStep) { Preconditions.checkArgument(canRun(pipelineStep), "Unable to execute pipeline step of type: {}", pipelineStep.getClass()); return new LoggingRunner((LoggingStep) pipelineStep); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/logging/LoggingRunner.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.pipeline.impl.step.logging; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.slf4j.event.Level; import java.util.List; import java.util.regex.Pattern; @Slf4j @CanRun(LoggingStep.class) public class LoggingRunner implements PipelineStepRunner { private final LoggingStep step; private final Pattern pattern; public LoggingRunner(@NonNull LoggingStep step) { this.step = step; if(step.keyFilterRegex() != null){ pattern = Pattern.compile(step.keyFilterRegex()); } else { pattern = null; } } @Override public void close() { //No-op } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { Level logLevel = step.logLevel(); LoggingStep.Log toLog = step.log(); boolean keysOnly = toLog == LoggingStep.Log.KEYS; //TODO does SLF4J have utility methods for this? boolean skip = logLevel == Level.ERROR && !log.isErrorEnabled() || logLevel == Level.WARN && !log.isWarnEnabled() || logLevel == Level.INFO && !log.isInfoEnabled() || logLevel == Level.DEBUG && !log.isDebugEnabled() || logLevel == Level.TRACE && !log.isTraceEnabled(); if(skip) return data; List<String> keys = data.keys(); StringBuilder sb = new StringBuilder(); for(String s : keys){ if(pattern != null && pattern.matcher(s).matches()) continue; if(keysOnly){ if(sb.length() > 0){ sb.append(", "); } sb.append("\"").append(s).append("\""); } else { if(sb.length() > 0) sb.append("\n"); sb.append("\"").append(s).append("\": ").append(data.get(s)); } } String s = sb.toString(); //TODO Is there a cleaner way to do this? switch (logLevel){ case ERROR: log.error(s); break; case WARN: log.warn(s); break; case INFO: log.info(s); break; case DEBUG: log.debug(s); break; case TRACE: log.trace(s); break; } return data; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/logging/LoggingStep.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.pipeline.impl.step.logging; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import org.nd4j.shade.jackson.annotation.JsonProperty; import org.slf4j.event.Level; /** * LoggingStep simply logs the input Data keys (and optionally values) and returns the input data unchanged * * @author Alex Black */ @Data @Accessors(fluent=true) @NoArgsConstructor @JsonName("LOGGING") @Schema(description = "A pipeline step that simply logs the input Data keys (and optionally values) and returns " + "the input data unchanged.") public class LoggingStep implements PipelineStep { @Schema(description = "An enum specifying what part of a data instance should be logged. <br><br>" + "KEYS -> only output data keys, <br>" + "KEYS_AND_VALUES -> output both data keys and values.") public enum Log { KEYS, KEYS_AND_VALUES } @Schema(description = "Log level. This is similar to how standard logging frameworks define logging categories.", defaultValue = "INFO") private Level logLevel = Level.INFO; @Schema(description = "An enum specifying what part of a data instance should be logged.", defaultValue = "KEYS") private Log log = Log.KEYS; @Schema(description = "A regular expression that allows filtering of keys - i.e., only those that match the regex will be logged.") public String keyFilterRegex = null; public LoggingStep(@JsonProperty("logLevel") Level logLevel, @JsonProperty("log") Log log, @JsonProperty("keyfilterRegex") String keyFilterRegex) { this.logLevel = logLevel; this.log = log; this.keyFilterRegex = keyFilterRegex; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/ml
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/ml/classifier/ClassifierOutputRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.step.ml.classifier; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.NDArray; import ai.konduit.serving.pipeline.api.data.ValueType; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.registry.MicrometerRegistry; import ai.konduit.serving.pipeline.settings.KonduitSettings; import ai.konduit.serving.pipeline.util.DataUtils; import io.micrometer.core.instrument.Counter; import io.micrometer.core.instrument.MeterRegistry; import lombok.NonNull; import lombok.RequiredArgsConstructor; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import static ai.konduit.serving.pipeline.util.NDArrayUtils.*; @RequiredArgsConstructor @CanRun(ClassifierOutputStep.class) public class ClassifierOutputRunner implements PipelineStepRunner { @NonNull protected final ClassifierOutputStep step; private boolean metricsSetup = false; private MeterRegistry registry = null; private List<Counter> classificationMetricsCounters = new ArrayList<>(); @Override public void close() { } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { String inputName = step.inputName(); if (inputName == null) { String errMultipleKeys = "NDArray field name was not provided and could not be inferred: multiple NDArray fields exist: %s and %s"; String errNoKeys = "NDArray field name was not provided and could not be inferred: no image NDArray exist"; inputName = DataUtils.inferField(data, ValueType.NDARRAY, false, errMultipleKeys, errNoKeys); } String probName = step.probName() == null ? ClassifierOutputStep.DEFAULT_PROB_NAME : step.probName(); String indexName = step.indexName() == null ? ClassifierOutputStep.DEFAULT_PROB_NAME : step.indexName(); String labelName = step.labelName() == null ? ClassifierOutputStep.DEFAULT_PROB_NAME : step.labelName(); NDArray classifierOutput = data.getNDArray(inputName); if (classifierOutput.shape().length > 2) { throw new UnsupportedOperationException("Invalid input to ClassifierOutputStep: only rank 1 or 2 inputs are available, got array with shape" + Arrays.toString(classifierOutput.shape())); } classifierOutput = FloatNDArrayToDouble(classifierOutput); boolean batch = false; if (classifierOutput.shape().length == 2 && classifierOutput.shape()[0] > 1) { batch = true; } // If not specified, the predicted class index as a string is used - i.e., "0", "1" List<String> labels = step.labels(); if (labels == null) { labels = new ArrayList<>(); } if (labels.isEmpty()) { for (int i = 0; i < classifierOutput.shape()[1]; i++) { labels.add(Integer.toString(i)); } } if(!metricsSetup) { registry = MicrometerRegistry.getRegistry(); if(registry != null) { for (String label : labels) { classificationMetricsCounters.add(Counter.builder(label) .description("Classification counts seen so far for class label: " + label) .tag("servingId", KonduitSettings.getServingId()) .baseUnit("classification.outcome") .register(registry)); } } metricsSetup = true; } if (!batch) { double[] classifierOutputArr = squeeze(classifierOutput); double[] maxValueWithIdx = getMaxValueAndIndex(classifierOutputArr); double prob = maxValueWithIdx[0]; long index = (long) maxValueWithIdx[1]; String label = labels.get((int) index); if(registry != null && index < classificationMetricsCounters.size()) { classificationMetricsCounters.get((int) index).increment(); } if (step.topN() != null && step.topN() > 1) { if (step.returnProb()) { data.putListDouble(probName, Collections.singletonList(prob)); } if (step.returnIndex()) { data.putListInt64(indexName, Collections.singletonList(index)); } if (step.returnLabel()) { data.putListString(labelName, Collections.singletonList(label)); } } else { if (step.returnProb()) { data.put(probName, prob); } if (step.returnIndex()) { data.put(indexName, index); } if (step.returnLabel()) { data.put(labelName, label); } } if (step.allProbabilities()) { data.put("allProbabilities", NDArray.create(classifierOutputArr)); } } if (batch) { int bS = (int) classifierOutput.shape()[1]; double[][] y = classifierOutput.getAs(double[][].class); List<Double> probs = new ArrayList<>(); List<Long> indices = new ArrayList<>(); List<String> labelsList = new ArrayList<>(); List<NDArray> allProbabilities = new ArrayList<>(); for (int i = 0; i < bS; i++) { double[] sample = y[i]; double[] maxValueWithIdx = getMaxValueAndIndex(sample); double prob = maxValueWithIdx[0]; long index = (long) maxValueWithIdx[1]; String label = labels.get((int) index); if(registry != null && index < classificationMetricsCounters.size()) { classificationMetricsCounters.get((int) index).increment(); } probs.add(prob); indices.add(index); labelsList.add(label); allProbabilities.add(NDArray.create(sample)); } if (step.returnProb()) { data.putListDouble(probName, probs); } if (step.returnIndex()) { data.putListInt64(indexName, indices); } if (step.returnLabel()) { data.putListString(labelName, labelsList); } if (step.allProbabilities()) { data.putListNDArray("allProbabilities", allProbabilities); } } return data; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/ml
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/ml/classifier/ClassifierOutputRunnerFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.step.ml.classifier; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class ClassifierOutputRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep step) { return step instanceof ClassifierOutputStep; } @Override public PipelineStepRunner create(PipelineStep step) { Preconditions.checkState(canRun(step), "Unable to run step: %s", step); return new ClassifierOutputRunner((ClassifierOutputStep) step); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/ml
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/ml/classifier/ClassifierOutputStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.step.ml.classifier; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.step.PipelineStep; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import io.swagger.v3.oas.annotations.media.Schema; import java.util.List; /** * ClassifierOutputStep takes as input a numerical 2d NDArray (i.e., float/double etc type) with shape [minibatch, numClasses]" * which represents the probability distribution (softmax) predictions for a standard classifier and returns based on this array:<br> * - The predicted class label - as a String<br> * - The predicted class index - as an integer (long)<br> * - The predicted class probability - as a Double */ @Data @Accessors(fluent = true) @AllArgsConstructor @NoArgsConstructor @JsonName("CLASSIFIER_OUTPUT") @Schema(description = "ClassifierOutputStep takes as input a numerical 2d NDArray (i.e., float/double etc type) with shape [minibatch, numClasses]" + " which represents the softmax predictions for a standard classifier and returns based on this array:<br>" + " - The predicted class label - as a String<br>" + "- The predicted class index - as an integer (long)<br>" + "- The predicted class probability - as a Double") public class ClassifierOutputStep implements PipelineStep { public static final String DEFAULT_PROB_NAME = "prob"; public static final String DEFAULT_INDEX_NAME = "index"; public static final String DEFAULT_LABEL_NAME = "label"; @Schema(description = "inputName - optional. If set: this represents the NDArray. If not set: use DataUtils.inferField to find an NDArray field", defaultValue = "null") private String inputName = null; @Schema(description = "returnLabel, default is true; if false, don't return label", defaultValue = "true") private boolean returnLabel = true; @Schema(description = "returnIndex, default is true", defaultValue = "true") private boolean returnIndex = true; @Schema(description = " returnProb, default is true", defaultValue = "true") private boolean returnProb = true; @Schema(description = "output names for the labels", defaultValue = DEFAULT_LABEL_NAME) private String labelName = DEFAULT_LABEL_NAME; @Schema(description = "output names for the index", defaultValue = DEFAULT_INDEX_NAME) private String indexName = DEFAULT_INDEX_NAME; @Schema(description = "output names for the labels propabilities", defaultValue = DEFAULT_PROB_NAME) private String probName = DEFAULT_PROB_NAME; @Schema(description = "as a List<String>. Optional. If not specified, the predicted class index as a string is used - i.e., \"0\", \"1\", etc", defaultValue = "null") private List<String> labels = null; @Schema(description = "Integer, null by default. If non-null and > 1, we return List<String>, List<Long>, List<Double> for the predicted class/index/probability instead of String/Long/Double.", defaultValue = "null") private Integer topN = null; @Schema(description = "If true, also returns a List<List<Double>> of all probabilities (basically, converd NDArray to list. False by default.", defaultValue = "false") private boolean allProbabilities = false; }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/ml
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/ml/regression/RegressionOutputRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.step.ml.regression; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.NDArray; import ai.konduit.serving.pipeline.api.data.ValueType; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.util.DataUtils; import lombok.AllArgsConstructor; import java.util.*; import static ai.konduit.serving.pipeline.util.NDArrayUtils.FloatNDArrayToDouble; import static ai.konduit.serving.pipeline.util.NDArrayUtils.squeeze; @AllArgsConstructor @CanRun(RegressionOutputStep.class) public class RegressionOutputRunner implements PipelineStepRunner { protected final RegressionOutputStep step; @Override public void close() { } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { String inputName = step.inputName(); if (inputName == null) { String errMultipleKeys = "NDArray field name was not provided and could not be inferred: multiple NDArray fields exist: %s and %s"; String errNoKeys = "NDArray field name was not provided and could not be inferred: no image NDArray exist"; inputName = DataUtils.inferField(data, ValueType.NDARRAY, false, errMultipleKeys, errNoKeys); } NDArray regressionOutput = data.getNDArray(inputName); if (regressionOutput.shape().length > 2) { throw new UnsupportedOperationException("Invalid input to RegressionOutputStep: only rank 1 or 2 inputs are available, got array with shape" + Arrays.toString(regressionOutput.shape())); } regressionOutput = FloatNDArrayToDouble(regressionOutput); boolean batch = false; if (regressionOutput.shape().length == 2 && regressionOutput.shape()[0] > 1) { batch = true; } Map<String, Integer> outputNames = step.names(); if (outputNames == null || outputNames.isEmpty()) { throw new UnsupportedOperationException("RegressionOutputStep names field was not provided or is null"); } if (!batch) { double[] regressionOutputArr = squeeze(regressionOutput); for (Map.Entry<String, Integer> entry : outputNames.entrySet()) data.put(entry.getKey(), regressionOutputArr[entry.getValue()]); } if (batch) { int bS = (int) regressionOutput.shape()[0]; double[][] y = regressionOutput.getAs(double[][].class); for (Map.Entry<String, Integer> entry : outputNames.entrySet()) { List<Double> list = new ArrayList<Double>(); for (int i = 0; i < bS; i++) { list.add(y[i][entry.getValue()]); } data.putListDouble(entry.getKey(), list); } } return data; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/ml
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/ml/regression/RegressionOutputRunnerFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.step.ml.regression; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class RegressionOutputRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep step) { return step instanceof RegressionOutputStep; } @Override public PipelineStepRunner create(PipelineStep step) { Preconditions.checkState(canRun(step), "Unable to run step: %s", step); return new RegressionOutputRunner((RegressionOutputStep) step); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/ml
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/ml/regression/RegressionOutputStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.step.ml.regression; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import java.util.Map; /** * RegressionOutput adapter - extracts values from an NDArray to double values in the output Data instance, * with names as specified.<br> * For example input=Data{'myArray"=<ndarray>}, output=Data{"x"=ndarray[0], "y"=ndarray[7]} */ @Data @Accessors(fluent = true) @AllArgsConstructor @NoArgsConstructor @JsonName("REGRESSION_OUTPUT") @Schema(description = "RegressionOutput adapter - extracts values from an NDArray to double values in the output Data instance," + " with names as specified.<br>For example input=Data{\"myArray\"=<ndarray>}, output=Data{\"x\"=ndarray[0], \"y\"=ndarray[7]}" ) public class RegressionOutputStep implements PipelineStep { @Schema(description = "inputName - optional. If set: this represents the NDArray. If not set: use DataUtils.inferField to find an NDArray field") private String inputName; @Schema(description = "Map<String,Integer> where the key is the output name, and the value is the index in the array.", defaultValue = "null") private Map<String,Integer> names; }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/ml
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/ml/ssd/SSDToBoundingBoxRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.step.ml.ssd; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.BoundingBox; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.NDArray; import ai.konduit.serving.pipeline.api.data.ValueType; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import io.micrometer.core.instrument.util.IOUtils; import lombok.AllArgsConstructor; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.math3.exception.OutOfRangeException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; @AllArgsConstructor @CanRun(SSDToBoundingBoxStep.class) public class SSDToBoundingBoxRunner implements PipelineStepRunner { protected final SSDToBoundingBoxStep step; @Override public void close() { } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { double threshold = step.threshold(); String key = "detection_boxes"; //TODO String prob = "detection_scores"; NDArray bND = data.getNDArray(key); NDArray pND = data.getNDArray(prob); float[][][] bArr = bND.getAs(float[][][].class); //Batch, num, xy float[][] pArr = pND.getAs(float[][].class); //Batch, num if (step.classLabels != null) { String labels = "detection_classes"; NDArray lND = data.getNDArray(labels); float[][] lArr = lND.getAs(float[][].class); List<BoundingBox> l = new ArrayList<>(); for (int i = 0; i < bArr[0].length; i++) { //SSD order usually: [y1, x1, y2, x2] double y1 = bArr[0][i][0]; double x1 = bArr[0][i][1]; double y2 = bArr[0][i][2]; double x2 = bArr[0][i][3]; double p = pArr[0][i]; if(p < threshold) continue; if(step.aspectRatio() != null){ double[] d = adjustAspect(x1, x2, y1, y2, step.aspectRatio()); x1 = d[0]; x2 = d[1]; y1 = d[2]; y2 = d[3]; } if(step.scale() != null && step.scale() != 1.0){ double s = step.scale(); double cx = (x1+x2) / 2.0; double cy = (y1+y2) / 2.0; x1 = cx - (cx-x1) * s; x2 = cx + (x2-cx) * s; y1 = cy - (cy-y1) * s; y2 = cy + (y2-cy) * s; } float label = lArr[0][i]; if (step.classLabels.size() > 0 && (int) label > step.classLabels.size()) { throw new ArrayIndexOutOfBoundsException(String.format("Predicted label index was %s but only %s labels were provided", (int) label, step.classLabels.size())); } l.add(BoundingBox.createXY(x1, x2, y1, y2, !step.classLabels.isEmpty() ? step.classLabels.get((int) label - 1) : "no label provided", p)); } //TODO copy other data to output String outName = step.outputName(); if (outName == null) outName = SSDToBoundingBoxStep.DEFAULT_OUTPUT_NAME; Data d = Data.singletonList(outName, l, ValueType.BOUNDING_BOX); if (step.keepOtherValues()) { for (String s : data.keys()) { if (!key.equals(s) && !prob.equals(s)) { d.copyFrom(s, data); } } } return d; } else { List<BoundingBox> l = new ArrayList<>(); for (int i = 0; i < bArr[0].length; i++) { //SSD order usually: [y1, x1, y2, x2] double y1 = bArr[0][i][0]; double x1 = bArr[0][i][1]; double y2 = bArr[0][i][2]; double x2 = bArr[0][i][3]; double p = pArr[0][i]; if(p < threshold) continue; if(step.aspectRatio() != null){ double[] d = adjustAspect(x1, x2, y1, y2, step.aspectRatio()); x1 = d[0]; x2 = d[1]; y1 = d[2]; y2 = d[3]; } if(step.scale() != null && step.scale() != 1.0){ double s = step.scale(); double cx = (x1+x2) / 2.0; double cy = (y1+y2) / 2.0; x1 = cx - (cx-x1) * s; x2 = cx + (x2-cx) * s; y1 = cy - (cy-y1) * s; y2 = cy + (y2-cy) * s; } l.add(BoundingBox.createXY(x1, x2, y1, y2, null, p)); } //TODO copy other data to output String outName = step.outputName(); if (outName == null) outName = SSDToBoundingBoxStep.DEFAULT_OUTPUT_NAME; Data d = Data.singletonList(outName, l, ValueType.BOUNDING_BOX); if (step.keepOtherValues()) { for (String s : data.keys()) { if (!key.equals(s) && !prob.equals(s)) { d.copyFrom(s, data); } } } return d; } } protected double[] adjustAspect(double x1, double x2, double y1, double y2, double aspect){ double w = (x2 - x1); double h = (y2 - y1); double currAspect = w / h; if(currAspect == aspect){ return new double[]{x1, x2, y1, y2}; } else if(currAspect < aspect){ //Increase width double newW = aspect * h; double cx = (x1+x2)/2.0; double x1a = cx - newW/2.0; double x2a = cx + newW/2.0; return new double[]{x1a, x2a, y1, y2}; } else if(currAspect > aspect){ //Increase height double newH = w / aspect; double cy = (y1+y2)/2.0; double y1a = cy - newH/2.0; double y2a = cy + newH/2.0; return new double[]{x1, x2, y1a, y2a}; } else { throw new RuntimeException("Invalid aspect ratios: current = " + currAspect + ", required = " + aspect); } } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/ml
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/ml/ssd/SSDToBoundingBoxRunnerFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.step.ml.ssd; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class SSDToBoundingBoxRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep step) { return step instanceof SSDToBoundingBoxStep; } @Override public PipelineStepRunner create(PipelineStep step) { Preconditions.checkState(canRun(step), "Unable to run step: %s", step); return new SSDToBoundingBoxRunner((SSDToBoundingBoxStep) step); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/ml
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/impl/step/ml/ssd/SSDToBoundingBoxStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.impl.step.ml.ssd; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import lombok.experimental.Tolerate; import java.util.Arrays; import java.util.List; /** * * <ul> * <li><b>scale</b>: An optional way to increase the size of the bounding boxes by some fraction. If specified, a value * of 1.0 is equivalent to no scaling. A scale of 2.0 means the center is unchanged, but the width and height are now * 2.0x larger than it would otherwise be</li> * <li><b>aspectRatio</b>: An optional way to control the output shape (aspect ratio) of the bounding boxes. Defined in * terms of "width / height" - if specified, an aspect ratio of 1.0 gives a square output; an aspect ratio of 2.0 gives * twice as wide as it is high. Note that for making the output the correct aspect ratio, one of the height or width * will be increased; the other dimension will not change. That is, the pre-aspect-ratio-corrected box will be contained * fully within the output box * </li> * </ul> * */ @Data @Accessors(fluent = true) @AllArgsConstructor @NoArgsConstructor @JsonName("SSD_TO_BBOX") @Schema(description = "A pipeline step that configures extraction of bounding boxes from an SSD model output.") public class SSDToBoundingBoxStep implements PipelineStep { public static final String DEFAULT_OUTPUT_NAME = "bounding_boxes"; // You can do new SSDToBoundingBoxStep().classLabels(SSDToBoundingBoxStep.COCO_LABELS) public static final String[] COCO_LABELS = new String[]{"person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", "truck", "boat", "traffic light", "fire hydrant", "street sign", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow", "elephant", "bear", "zebra", "giraffe", "hat", "backpack", "umbrella", "shoe", "eye glasses", "handbag", "tie", "suitcase", "frisbee", "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", "tennis racket", "bottle", "plate", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "couch", "potted plant", "bed", "mirror", "dining table", "window", "desk", "toilet", "door", "tv", "laptop", "mouse", "remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink", "refrigerator", "blender", "book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush", "hair brush"}; //TODO config @Schema(description = "A list of class labels.") protected List<String> classLabels ; @Schema(description = "If true, other data key and values from the previous step are kept and passed on to the next step as well.", defaultValue = "true") protected boolean keepOtherValues = true; @Schema(description = "Threadshold to the output of the SSD models for fetching bounding boxes for.", defaultValue = "0.5") protected double threshold = 0.5; @Schema(description = "An optional way to increase the size of the bounding boxes by some fraction. If specified, a value\n" + "of 1.0 is equivalent to no scaling. A scale of 2.0 means the center is unchanged, but the width and height are now\n" + "2.0x larger than it would otherwise be") protected Double scale = null; @Schema(description = "An optional way to control the output shape (aspect ratio) of the bounding boxes. Defined in\n" + "terms of \"width / height\" - if specified, an aspect ratio of 1.0 gives a square output; an aspect ratio of 2.0 gives\n" + "twice as wide as it is high. Note that for making the output the correct aspect ratio, one of the height or width\n" + "will be increased; the other dimension will not change. That is, the pre-aspect-ratio-corrected box will be contained\n" + "fully within the output box") protected Double aspectRatio = null; @Schema(description = "Output key name where the bounding box will be contained in.", defaultValue = DEFAULT_OUTPUT_NAME) protected String outputName = DEFAULT_OUTPUT_NAME; @Tolerate public SSDToBoundingBoxStep classLabels(String... classLabels) { return this.classLabels(Arrays.asList(classLabels)); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/registry/AbstractRegistry.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.registry; import lombok.NonNull; import java.util.*; public abstract class AbstractRegistry<T> { protected Class<T> factoryClass; protected List<T> factories; protected Map<Class<?>,List<T>> factoriesMap; //Intentionally package private (no/default access modifier) AbstractRegistry(Class<T> factoryClass){ this.factoryClass = factoryClass; } public int registryNumFactories(){ if(factories == null) init(); return factories.size(); } public List<T> registryGetFactories(){ if(factories == null) init(); return Collections.unmodifiableList(factories); } public T registryGetFactoryFor(@NonNull Object o){ if(factories == null) init(); List<T> l = factoriesMap.get(o.getClass()); if(l != null && !l.isEmpty()) return l.get(0); //TODO what if if there are multiple factories that can create an NDArray from this? Which should we use? //Otherwise: iterate through (for example: in case of interface) for(T f : factories ){ if(acceptFactory(f, o)) return f; } return null; } public abstract boolean acceptFactory(T factory, Object o); public abstract Set<Class<?>> supportedForFactory(T factory); protected synchronized void init(){ if(factories != null) return; List<T> l = new ArrayList<>(); Map<Class<?>,List<T>> m = new HashMap<>(); ServiceLoader<T> sl = ServiceLoader.load(factoryClass); for (T f : sl) { l.add(f); Set<Class<?>> s = supportedForFactory(f); for (Class<?> c : s) { m.computeIfAbsent(c, x -> new ArrayList<>()).add(f); } } factoriesMap = m; factories = l; } public void addFactoryInstance(T factory){ if(factories == null) init(); this.factories.add(factory); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/registry/BaseFactoryRegistry.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.registry; import ai.konduit.serving.pipeline.api.format.FormatFactory; import ai.konduit.serving.pipeline.api.format.ImageFactory; import lombok.NonNull; import java.util.*; public abstract class BaseFactoryRegistry<T extends FormatFactory> extends AbstractRegistry<T> { //Intentionally package private (no/default access modifier) BaseFactoryRegistry(Class<T> factoryClass){ super(factoryClass); } public boolean acceptFactory(T factory, Object o){ return factory.canCreateFrom(o); } public Set<Class<?>> supportedForFactory(T factory){ return factory.supportedTypes(); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/registry/ImageConverterRegistry.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.registry; import ai.konduit.serving.pipeline.api.data.Image; import ai.konduit.serving.pipeline.api.data.NDArray; import ai.konduit.serving.pipeline.api.format.*; import ai.konduit.serving.pipeline.api.format.ImageConverter; import ai.konduit.serving.pipeline.impl.data.image.Png; import lombok.AllArgsConstructor; import lombok.NonNull; import org.nd4j.common.primitives.Pair; import java.util.Collections; import java.util.List; import java.util.Set; public class ImageConverterRegistry extends AbstractRegistry<ImageConverter> { private static final ImageConverterRegistry INSTANCE = new ImageConverterRegistry(); protected ImageConverterRegistry(){ super(ImageConverter.class); } public static int numFactories(){ return INSTANCE.registryNumFactories(); } public static List<ImageConverter> getFactories(){ return INSTANCE.registryGetFactories(); } public static ImageConverter getFactoryFor(@NonNull Object o){ return INSTANCE.registryGetFactoryFor(o); } @Override public boolean acceptFactory(ImageConverter factory, Object o) { Pair<Image, ImageFormat> p = (Pair<Image, ImageFormat>) o; return factory.canConvert(p.getFirst(), p.getSecond()); } @Override public Set<Class<?>> supportedForFactory(ImageConverter factory) { return Collections.emptySet(); } public static ImageConverter getConverterFor(Image img, Class<?> type ){ return INSTANCE.getConverterForClass(img, type); } public static ImageConverter getConverterFor(Image img, ImageFormat<?> type ){ return INSTANCE.getConverterForType(img, type); } public ImageConverter getConverterForClass(Image img, Class<?> type ){ if(factories == null) init(); if(factoriesMap.containsKey(type)){ return factoriesMap.get(type).get(0); //TODO multiple converters available } for(ImageConverter c : factories){ if(c.canConvert(img, type)){ return c; } } //No converter is available. Try to fall back on X -> PNG -> Y if(type != Png.class && !(img.get() instanceof Png)){ ImageConverter c1 = getConverterForClass(img, Png.class); if(c1 != null){ Image i2 = Image.create(c1.convert(img, Png.class)); //TODO this is ugly - we throw this result away! ImageConverter c2 = getConverterForClass(i2, type); return new TwoStepImageConverter(img.get().getClass(), type, c1, c2); } } return null; } public ImageConverter getConverterForType(Image img, ImageFormat<?> type ){ if(factories == null) init(); for(ImageConverter c : factories){ if(c.canConvert(img, type)){ return c; } } return null; } public static void addConverter(ImageConverter f){ INSTANCE.addFactoryInstance(f); } @AllArgsConstructor private static class TwoStepImageConverter implements ImageConverter { private Class<?> cFrom; private Class<?> cTo; private ImageConverter c1; private ImageConverter c2; @Override public boolean canConvert(Image from, ImageFormat<?> to) { return false; } @Override public boolean canConvert(Image from, Class<?> to) { return cFrom.isAssignableFrom(from.get().getClass()) && to.isAssignableFrom(cTo); } @Override public <T> T convert(Image from, ImageFormat<T> to) { throw new UnsupportedOperationException("Not supported"); } @Override public <T> T convert(Image from, Class<T> to) { Image png = Image.create(c1.convert(from, Png.class)); return (T) c2.convert(png, cTo); } } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/registry/ImageFactoryRegistry.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.registry; import ai.konduit.serving.pipeline.api.format.ImageFactory; import lombok.NonNull; import java.util.List; public class ImageFactoryRegistry extends BaseFactoryRegistry<ImageFactory> { private static final ImageFactoryRegistry INSTANCE = new ImageFactoryRegistry(); protected ImageFactoryRegistry(){ super(ImageFactory.class); } public static int numFactories(){ return INSTANCE.registryNumFactories(); } public static List<ImageFactory> getFactories(){ return INSTANCE.registryGetFactories(); } public static ImageFactory getFactoryFor(@NonNull Object o){ return INSTANCE.registryGetFactoryFor(o); } public static void addFactory(ImageFactory f){ INSTANCE.addFactoryInstance(f); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/registry/MicrometerRegistry.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.registry; import ai.konduit.serving.pipeline.impl.metrics.MetricsProvider; import io.micrometer.core.instrument.MeterRegistry; import io.micrometer.core.instrument.simple.SimpleMeterRegistry; import lombok.extern.slf4j.Slf4j; import org.nd4j.common.io.CollectionUtils; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.ServiceLoader; @Slf4j public class MicrometerRegistry { private static List<io.micrometer.core.instrument.MeterRegistry> registries; static { initRegistries(); } public static io.micrometer.core.instrument.MeterRegistry getRegistry() { if (CollectionUtils.isEmpty(registries)) { initRegistries(); } if (registries.size() > 1) { log.info("Loaded {} MeterRegistry instances. Loading the first one.", registries.size()); } return registries.get(0); } public static synchronized void initRegistries() { if(registries == null) { registries = new ArrayList<>(); } else { registries.clear(); } ServiceLoader<MetricsProvider> sl = ServiceLoader.load(MetricsProvider.class); Iterator<MetricsProvider> iterator = sl.iterator(); while(iterator.hasNext()){ MetricsProvider r = iterator.next(); MeterRegistry reg = r.getRegistry(); registries.add(reg); io.micrometer.core.instrument.Metrics.globalRegistry.add(reg); } if(registries.isEmpty()){ //Nothing found via ServiceLoader registries.add(new SimpleMeterRegistry()); } } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/registry/NDArrayConverterRegistry.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.registry; import ai.konduit.serving.pipeline.api.data.Image; import ai.konduit.serving.pipeline.api.data.NDArray; import ai.konduit.serving.pipeline.api.format.*; import ai.konduit.serving.pipeline.impl.data.image.Png; import ai.konduit.serving.pipeline.impl.data.ndarray.SerializedNDArray; import lombok.AllArgsConstructor; import lombok.NonNull; import org.nd4j.common.primitives.Pair; import java.util.Collections; import java.util.List; import java.util.Set; public class NDArrayConverterRegistry extends AbstractRegistry<NDArrayConverter> { private static final NDArrayConverterRegistry INSTANCE = new NDArrayConverterRegistry(); protected NDArrayConverterRegistry(){ super(NDArrayConverter.class); } public static int numFactories(){ return INSTANCE.registryNumFactories(); } public static List<NDArrayConverter> getFactories(){ return INSTANCE.registryGetFactories(); } public static NDArrayConverter getFactoryFor(@NonNull Object o){ return INSTANCE.registryGetFactoryFor(o); } @Override public boolean acceptFactory(NDArrayConverter factory, Object o) { Pair<NDArray, NDArrayFormat> p = (Pair<NDArray, NDArrayFormat>) o; return factory.canConvert(p.getFirst(), p.getSecond()); } @Override public Set<Class<?>> supportedForFactory(NDArrayConverter factory) { return Collections.emptySet(); } public static NDArrayConverter getConverterFor(NDArray arr, Class<?> type ){ return INSTANCE.getConverterForClass(arr, type); } public static NDArrayConverter getConverterFor(NDArray arr, NDArrayFormat<?> type ){ return INSTANCE.getConverterForType(arr, type); } public NDArrayConverter getConverterForClass(NDArray arr, Class<?> type ){ if(factories == null) init(); if(factoriesMap.containsKey(type)){ return factoriesMap.get(type).get(0); //TODO multiple converters } for(NDArrayConverter c : factories){ if(c.canConvert(arr, type)){ return c; } } //No factory is available. Try to fall back on X -> SerializedNDArray -> Y if(type != SerializedNDArray.class && !(arr.get() instanceof SerializedNDArray)){ NDArrayConverter c1 = getConverterForClass(arr, SerializedNDArray.class); if(c1 != null){ NDArray arr2 = NDArray.create(c1.convert(arr, SerializedNDArray.class)); //TODO this is ugly - we throw this result away! NDArrayConverter c2 = getConverterForClass(arr2, type); return new TwoStepNDArrayConverter(arr.get().getClass(), type, c1, c2); } } return null; } public NDArrayConverter getConverterForType(NDArray arr, NDArrayFormat<?> type ){ if(factories == null) init(); for(NDArrayConverter c : factories){ if(c.canConvert(arr, type)){ return c; } } return null; } public static void addConverter(NDArrayConverter f){ INSTANCE.addFactoryInstance(f); } @AllArgsConstructor private static class TwoStepNDArrayConverter implements NDArrayConverter { private Class<?> cFrom; private Class<?> cTo; private NDArrayConverter c1; private NDArrayConverter c2; @Override public boolean canConvert(NDArray from, NDArrayFormat<?> to) { return false; } @Override public boolean canConvert(NDArray from, Class<?> to) { return cFrom.isAssignableFrom(from.get().getClass()) && to.isAssignableFrom(cTo); } @Override public <T> T convert(NDArray from, NDArrayFormat<T> to) { throw new UnsupportedOperationException("Not supported"); } @Override public <T> T convert(NDArray from, Class<T> to) { NDArray sArr = NDArray.create(c1.convert(from, SerializedNDArray.class)); return (T) c2.convert(sArr, cTo); } } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/registry/NDArrayFactoryRegistry.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.registry; import ai.konduit.serving.pipeline.api.format.ImageFactory; import ai.konduit.serving.pipeline.api.format.NDArrayFactory; import lombok.NonNull; import java.util.List; public class NDArrayFactoryRegistry extends BaseFactoryRegistry<NDArrayFactory> { private static final NDArrayFactoryRegistry INSTANCE = new NDArrayFactoryRegistry(); protected NDArrayFactoryRegistry(){ super(NDArrayFactory.class); } public static int numFactories(){ return INSTANCE.registryNumFactories(); } public static List<NDArrayFactory> getFactories(){ return INSTANCE.registryGetFactories(); } public static NDArrayFactory getFactoryFor(@NonNull Object o){ return INSTANCE.registryGetFactoryFor(o); } public static void addFactory(NDArrayFactory f){ INSTANCE.addFactoryInstance(f); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/registry/PipelineRegistry.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.pipeline.registry; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.ServiceLoader; @Slf4j public class PipelineRegistry { private static List<PipelineStepRunnerFactory> stepRunnerFactories; public static List<PipelineStepRunnerFactory> getStepRunnerFactories(){ if(stepRunnerFactories == null) initStepRunnerFactories(); return stepRunnerFactories; } private static void initStepRunnerFactories(){ ServiceLoader<PipelineStepRunnerFactory> sl = ServiceLoader.load(PipelineStepRunnerFactory.class); Iterator<PipelineStepRunnerFactory> iterator = sl.iterator(); List<PipelineStepRunnerFactory> f = new ArrayList<>(); while(iterator.hasNext()){ f.add(iterator.next()); } stepRunnerFactories = f; log.info("Loaded {} PipelineStepRunnerFactory instances", f.size()); } public static void registerStepRunnerFactory(@NonNull PipelineStepRunnerFactory f){ if(stepRunnerFactories == null) initStepRunnerFactories(); stepRunnerFactories.add(f); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/settings/DirectoryFetcher.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.settings; import ai.konduit.serving.pipeline.settings.constants.Constants; import ai.konduit.serving.pipeline.settings.constants.EnvironmentConstants; import ai.konduit.serving.pipeline.settings.constants.PropertiesConstants; import lombok.AccessLevel; import lombok.NoArgsConstructor; import java.io.File; import java.nio.file.Paths; /** * This class is responsible for fetching different directories for konduit-serving */ @NoArgsConstructor(access = AccessLevel.PRIVATE) public class DirectoryFetcher { /** * Creates the working directory if missing and fetches it. * @return konduit-serving working directory. */ public static File getWorkingDir() { return createAndValidateDirectory( KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.WORKING_DIR), System.getProperty(PropertiesConstants.WORKING_DIR), getDefaultWorkingDir() ) ); } /** * Creates the vertx runtime and cache data directory, if missing and fetches it. * @return konduit-serving vertx cache and runtime data directory. */ public static File getVertxDir() { return createAndValidateDirectory( KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.VERTX_DIR), System.getProperty(PropertiesConstants.VERTX_DIR), getDefaultVertxDir() ) ); } /** * Creates the build directory if missing and fetches it. * @return konduit-serving build directory. */ public static File getBuildDir() { return createAndValidateDirectory( KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.BUILD_DIR), System.getProperty(PropertiesConstants.BUILD_DIR), getDefaultBuildDir() ) ); } /** * Creates the profiles directory if missing and fetches it. * @return konduit-serving profiles directory. */ public static File getProfilesDir() { return createAndValidateDirectory( KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.PROFILES_DIR), System.getProperty(PropertiesConstants.PROFILES_DIR), getDefaultProfilesDir() ) ); } /** * Creates the server data directory if missing and fetches it. * @return konduit-serving server data directory */ public static File getServersDataDir() { return createAndValidateDirectory(new File(getWorkingDir(), Constants.DEFAULT_SERVERS_DATA_DIR_NAME)); } /** * Creates the logs endpoint logs data directory if missing and fetches it. * @return konduit-serving logs endpoint logs data directory */ public static File getEndpointLogsDir() { return createAndValidateDirectory( KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.ENDPOINT_LOGS_DIR), System.getProperty(PropertiesConstants.ENDPOINT_LOGS_DIR), getDefaultEndpointLogsDir() ) ); } /** * Creates the command logs directory if missing and fetches it. * @return konduit-serving command logs directory */ public static File getCommandLogsDir() { return createAndValidateDirectory( KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.COMMAND_LOGS_DIR), System.getProperty(PropertiesConstants.COMMAND_LOGS_DIR), getDefaultCommandLogsDir() ) ); } /** * Creates the file uploads directory if missing and fetches it. * @return konduit-serving file upldads directory */ public static File getFileUploadsDir() { return createAndValidateDirectory( KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.FILE_UPLOADS_DIR), System.getProperty(PropertiesConstants.FILE_UPLOADS_DIR), getDefaultFileUploadsDir() ) ); } /** * Returns user home directory * @return user home directory */ public static String getUserHomeDir() { return System.getProperty("user.home"); } /** * Creates the default working directory if missing and fetches it. * @return konduit-serving default working directory absolute path. */ public static String getDefaultWorkingDir() { return createAndValidateDirectory(Paths.get(getUserHomeDir(), Constants.DEFAULT_WORKING_BASE_DIR_NAME).toFile()).getAbsolutePath(); } /** * Creates the default vertx runtime and cache data directory, if missing and fetches it. * @return konduit-serving default vertx runtime and cache data directory absolute path. */ public static String getDefaultVertxDir() { return createAndValidateDirectory(new File(getWorkingDir(), Constants.DEFAULT_VERTX_DIR_NAME)).getAbsolutePath(); } /** * Creates the default build directory if missing and fetches it. * @return konduit-serving default build directory absolute path. */ public static String getDefaultBuildDir() { return createAndValidateDirectory(new File(getWorkingDir(), Constants.DEFAULT_BUILD_DIR_NAME)).getAbsolutePath(); } /** * Creates the default profiles directory if missing and fetches it. * @return konduit-serving default profiles directory absolute path. */ public static String getDefaultProfilesDir() { return createAndValidateDirectory(new File(getWorkingDir(), Constants.DEFAULT_PROFILES_DIR_NAME)).getAbsolutePath(); } /** * Creates the default logs endpoint data directory if missing and fetches it. * @return konduit-serving default logs endpoint data directory absolute path. */ public static String getDefaultEndpointLogsDir() { return createAndValidateDirectory(new File(getWorkingDir(), Constants.DEFAULT_ENDPOINT_LOGS_DIR_NAME)).getAbsolutePath(); } /** * Creates the default command logs directory if missing and fetches it. * @return konduit-serving default command logs directory absolute path. */ public static String getDefaultCommandLogsDir() { return createAndValidateDirectory(new File(getWorkingDir(), Constants.DEFAULT_COMMAND_LOGS_DIR_NAME)).getAbsolutePath(); } /** * Creates the default file upload directory if missing and fetches it. * @return konduit-serving default file upload directory absolute path. */ public static String getDefaultFileUploadsDir() { return createAndValidateDirectory(System.getProperty("java.io.tmpdir")).getAbsolutePath(); } /** * Creates a directory based on the given path string if missing * @param directoryPath a string contain the path string of the directory location * @return the created directory. */ public static File createAndValidateDirectory(String directoryPath) { return createAndValidateDirectory(new File(directoryPath)); } /** * Creates a directory based on the given path * @param directory a string contain the path of the directory location * @return the created directory. */ public static File createAndValidateDirectory(File directory) { if(directory.exists()) { if(directory.isDirectory()) { return directory; } else { throw new IllegalStateException("Invalid directory: " + directory.getAbsolutePath()); } } else { if (directory.mkdirs()) { if (directory.isDirectory()) { return directory; } else { throw new IllegalStateException("Invalid directory: " + directory.getAbsolutePath()); } } else { throw new IllegalStateException("Unable to create directory: " + directory.getAbsolutePath()); } } } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/settings/KonduitSettings.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.settings; import ai.konduit.serving.pipeline.settings.constants.Constants; import ai.konduit.serving.pipeline.settings.constants.EnvironmentConstants; import ai.konduit.serving.pipeline.settings.constants.PropertiesConstants; import org.nd4j.shade.guava.base.Strings; import java.util.UUID; import static ai.konduit.serving.pipeline.settings.constants.PropertiesConstants.SERVING_ID; public class KonduitSettings { public static boolean getStartHttpServerForKafka() { return getStartHttpServerForKafka(Constants.DEFAULT_START_HTTP_SERVER_FOR_KAFKA); } public static boolean getStartHttpServerForKafka(boolean defaultValue) { return Boolean.parseBoolean(KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.START_HTTP_SERVER_FOR_KAFKA), System.getProperty(PropertiesConstants.START_HTTP_SERVER_FOR_KAFKA), String.valueOf(defaultValue)) ); } public static String getHttpKafkaHost() { return getHttpKafkaHost(null); } public static String getHttpKafkaHost(String defaultValue) { return KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.HTTP_KAFKA_HOST), System.getProperty(PropertiesConstants.HTTP_KAFKA_HOST), defaultValue != null ? defaultValue : Constants.DEFAULT_HTTP_KAFKA_HOST ); } public static int getHttpKafkaPort() { return getHttpKafkaPort(Constants.DEFAULT_HTTP_KAFKA_PORT); } public static int getHttpKafkaPort(int defaultValue) { return Integer.parseInt(KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.HTTP_KAFKA_PORT), System.getProperty(PropertiesConstants.HTTP_KAFKA_PORT), String.valueOf(defaultValue)) ); } public static String getConsumerTopicName() { return getConsumerTopicName(null); } public static String getConsumerTopicName(String defaultValue) { return KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.CONSUMER_TOPIC_NAME), System.getProperty(PropertiesConstants.CONSUMER_TOPIC_NAME), defaultValue != null ? defaultValue : Constants.DEFAULT_CONSUMER_TOPIC_NAME ); } public static String getKafkaConsumerKeyDeserializerClass() { return getKafkaConsumerKeyDeserializerClass(null); } public static String getKafkaConsumerKeyDeserializerClass(String defaultValue) { return KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.KAFKA_CONSUMER_KEY_DESERIALIZER_CLASS), System.getProperty(PropertiesConstants.KAFKA_CONSUMER_KEY_DESERIALIZER_CLASS), defaultValue != null ? defaultValue : Constants.DEFAULT_KAFKA_CONSUMER_KEY_DESERIALIZER_CLASS ); } public static String getKafkaConsumerValueDeserializerClass() { return getKafkaConsumerValueDeserializerClass(null); } public static String getKafkaConsumerValueDeserializerClass(String defaultValue) { return KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.KAFKA_CONSUMER_VALUE_DESERIALIZER_CLASS), System.getProperty(PropertiesConstants.KAFKA_CONSUMER_VALUE_DESERIALIZER_CLASS), defaultValue != null ? defaultValue : Constants.DEFAULT_KAFKA_CONSUMER_VALUE_DESERIALIZER_CLASS ); } public static String getConsumerGroupId() { return getConsumerGroupId(null); } public static String getConsumerGroupId(String defaultValue) { return KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.CONSUMER_GROUP_ID), System.getProperty(PropertiesConstants.CONSUMER_GROUP_ID), defaultValue != null ? defaultValue : Constants.DEFAULT_CONSUMER_GROUP_ID ); } public static String getConsumerAutoOffsetReset() { return getConsumerAutoOffsetReset(null); } public static String getConsumerAutoOffsetReset(String defaultValue) { return KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.CONSUMER_AUTO_OFFSET_RESET), System.getProperty(PropertiesConstants.CONSUMER_AUTO_OFFSET_RESET), defaultValue != null ? defaultValue : Constants.DEFAULT_CONSUMER_AUTO_OFFSET_RESET ); } public static String getConsumerAutoCommit() { return getConsumerAutoCommit(null); } public static String getConsumerAutoCommit(String defaultValue) { return KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.CONSUMER_AUTO_COMMIT), System.getProperty(PropertiesConstants.CONSUMER_AUTO_COMMIT), defaultValue != null ? defaultValue : Constants.DEFAULT_CONSUMER_AUTO_COMMIT ); } public static String getProducerTopicName() { return getProducerTopicName(null); } public static String getProducerTopicName(String defaultValue) { return KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.PRODUCER_TOPIC_NAME), System.getProperty(PropertiesConstants.PRODUCER_TOPIC_NAME), defaultValue != null ? defaultValue : Constants.DEFAULT_PRODUCER_TOPIC_NAME ); } public static String getKafkaProducerKeySerializerClass() { return getKafkaProducerKeySerializerClass(null); } public static String getKafkaProducerKeySerializerClass(String defaultValue) { return KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.KAFKA_PRODUCER_KEY_SERIALIZER_CLASS), System.getProperty(PropertiesConstants.KAFKA_PRODUCER_KEY_SERIALIZER_CLASS), defaultValue != null ? defaultValue : Constants.DEFAULT_KAFKA_PRODUCER_KEY_SERIALIZER_CLASS ); } public static String getKafkaProducerValueSerializerClass() { return getKafkaProducerValueSerializerClass(null); } public static String getKafkaProducerValueSerializerClass(String defaultValue) { return KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.KAFKA_PRODUCER_VALUE_SERIALIZER_CLASS), System.getProperty(PropertiesConstants.KAFKA_PRODUCER_VALUE_SERIALIZER_CLASS), defaultValue != null ? defaultValue : Constants.DEFAULT_KAFKA_PRODUCER_VALUE_SERIALIZER_CLASS ); } public static String getProducerAcks() { return getProducerAcks(null); } public static String getProducerAcks(String defaultValue) { return KonduitSettings.fetchValueBasedOnPriority( System.getenv(EnvironmentConstants.PRODUCER_ACKS), System.getProperty(PropertiesConstants.PRODUCER_ACKS), defaultValue != null ? defaultValue : Constants.DEFAULT_PRODUCER_ACKS ); } public static String getServingId() { if(System.getProperty(SERVING_ID) == null) { System.setProperty(SERVING_ID, "id-not-set-" + UUID.randomUUID()); } return System.getProperty(SERVING_ID); } /** * Fetches the values based on their priority. If the first value is null or an empty string, * it will fetch the second value, and if the second value is null or an empty string then it will * fetch the default value. * @param first first value in priority * @param second second value in priority * @param defaultValue the default value * @return fetched value based on the priority. */ static String fetchValueBasedOnPriority(String first, String second, String defaultValue) { if (!Strings.isNullOrEmpty(first)) { return first; } else if (!Strings.isNullOrEmpty(second)){ return second; } else { return defaultValue; } } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/settings
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/settings/constants/Constants.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.settings.constants; import lombok.AccessLevel; import lombok.NoArgsConstructor; /** * This class contains important keys for different operations inside konduit-serving */ @NoArgsConstructor(access = AccessLevel.PRIVATE) public class Constants { /** * Date format to be used in konduit-serving */ public static final String DATE_FORMAT = "dd-MMMM-yyyy HH:mm:ss a z (Z)"; /** * The name of the default base name of the konduit-serving working directory. */ public static final String DEFAULT_WORKING_BASE_DIR_NAME = ".konduit-serving"; /** * The name of the default konduit-serving vertx runtime and cache data directory. */ public static final String DEFAULT_VERTX_DIR_NAME = "vertx"; /** * The name of the default konduit-serving build data directory. */ public static final String DEFAULT_BUILD_DIR_NAME = "build"; /** * The name of the default konduit-serving profiles data directory . */ public static final String DEFAULT_PROFILES_DIR_NAME = "profiles"; /** * Default base directory name for the endpoints log (/logs). */ public static final String DEFAULT_ENDPOINT_LOGS_DIR_NAME = "endpoint_logs"; /** * Default directory name for containing the command log files. */ public static final String DEFAULT_COMMAND_LOGS_DIR_NAME = "command_logs"; /** * Default directory name for containing the running server data. The files in * this directory usually contains the server configurations. The format of the files is * {@code <pid>.data} */ public static final String DEFAULT_SERVERS_DATA_DIR_NAME = "servers"; /** * Name of the log file which contains the logging data for the {@code /logs} * endpoint. */ public static final String DEFAULT_MAIN_ENDPOINT_LOGS_FILE = "main.log"; public static final boolean DEFAULT_START_HTTP_SERVER_FOR_KAFKA = true; public static final String DEFAULT_HTTP_KAFKA_HOST = "localhost"; public static final int DEFAULT_HTTP_KAFKA_PORT = 0; public static final String DEFAULT_CONSUMER_TOPIC_NAME = "inference-in"; public static final String DEFAULT_KAFKA_CONSUMER_KEY_DESERIALIZER_CLASS = "io.vertx.kafka.client.serialization.JsonObjectDeserializer"; public static final String DEFAULT_KAFKA_CONSUMER_VALUE_DESERIALIZER_CLASS = "io.vertx.kafka.client.serialization.JsonObjectDeserializer"; public static final String DEFAULT_CONSUMER_GROUP_ID = "konduit-serving-consumer-group"; public static final String DEFAULT_CONSUMER_AUTO_OFFSET_RESET = "earliest"; public static final String DEFAULT_CONSUMER_AUTO_COMMIT = "true"; public static final String DEFAULT_PRODUCER_TOPIC_NAME = "inference-out"; public static final String DEFAULT_KAFKA_PRODUCER_KEY_SERIALIZER_CLASS = "io.vertx.kafka.client.serialization.JsonObjectSerializer"; public static final String DEFAULT_KAFKA_PRODUCER_VALUE_SERIALIZER_CLASS = "io.vertx.kafka.client.serialization.JsonObjectSerializer"; public static final String DEFAULT_PRODUCER_ACKS = "1"; }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/settings
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/settings/constants/EnvironmentConstants.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.settings.constants; import lombok.AccessLevel; import lombok.NoArgsConstructor; /** * This class contains important constants for different environment variable settings * for konduit-serving. */ @NoArgsConstructor(access = AccessLevel.PRIVATE) public class EnvironmentConstants { /** * Environment variable name for storing port number for the konduit server. * This variable will be prioritizes over {@link ai.konduit.serving.vertx.config.InferenceConfiguration#port()} */ public static final String KONDUIT_SERVING_PORT = "KONDUIT_SERVING_PORT"; /** * An environment variable for setting the working directory for konduit serving. * The working directory contains the runtime files generated by vertx or * konduit-serving itself. The runtime files could contain logs, * running process details, vertx cache files etc. */ public static final String WORKING_DIR = "KONDUIT_WORKING_DIR"; /** * Environment variable specifying vertx runtime and cache directory. */ public static final String VERTX_DIR = "KONDUIT_VERTX_DIR"; /** * Environment variable specifying build data directory where build logs for the build CLI are kept. */ public static final String BUILD_DIR = "KONDUIT_BUILD_DIR"; /** * Environment variable specifying profiles data directory where details of individual profiles are kept. */ public static final String PROFILES_DIR = "KONDUIT_PROFILES_DIR"; /** * This variable is responsible for setting the path where the log files for a konduit server * is kept for the `/logs` endpoint. */ public static final String ENDPOINT_LOGS_DIR = "KONDUIT_ENDPOINT_LOGS_DIR"; /** * Default directory for containing the command line logs for konduit-serving */ public static final String COMMAND_LOGS_DIR = "KONDUIT_COMMAND_LOGS_DIR"; /** * Sets the directory where the file uploads are kept for Vertx BodyHandler */ public static final String FILE_UPLOADS_DIR = "KONDUIT_FILE_UPLOADS_DIR"; public static final String START_HTTP_SERVER_FOR_KAFKA = "KONDUIT_START_HTTP_SERVER_FOR_KAFKA"; public static final String HTTP_KAFKA_HOST = "KONDUIT_HTTP_KAFKA_HOST"; public static final String HTTP_KAFKA_PORT = "KONDUIT_HTTP_KAFKA_PORT"; public static final String CONSUMER_TOPIC_NAME = "KONDUIT_CONSUMER_TOPIC_NAME"; public static final String KAFKA_CONSUMER_KEY_DESERIALIZER_CLASS = "KONDUIT_KAFKA_CONSUMER_SERIALIZER_CLASS"; public static final String KAFKA_CONSUMER_VALUE_DESERIALIZER_CLASS = "KONDUIT_KAFKA_CONSUMER_DESERIALIZER_CLASS"; public static final String CONSUMER_GROUP_ID = "KONDUIT_CONSUMER_GROUP_ID"; public static final String CONSUMER_AUTO_OFFSET_RESET = "KONDUIT_CONSUMER_OFFSET_RESET"; public static final String CONSUMER_AUTO_COMMIT = "KONDUIT_CONSUMER_AUTO_COMMIT"; public static final String PRODUCER_TOPIC_NAME = "KONDUIT_PRODUCER_TOPIC_NAME"; public static final String KAFKA_PRODUCER_KEY_SERIALIZER_CLASS = "KONDUIT_KAFKA_PRODUCER_SERIALIZER_CLASS"; public static final String KAFKA_PRODUCER_VALUE_SERIALIZER_CLASS = "KONDUIT_KAFKA_PRODUCER_DESERIALIZER_CLASS"; public static final String PRODUCER_ACKS = "KONDUIT_PRODUCER_ACKS"; }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/settings
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/settings/constants/PropertiesConstants.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.settings.constants; import lombok.AccessLevel; import lombok.NoArgsConstructor; /** * This class contains important constants for different system properties * for konduit-serving. */ @NoArgsConstructor(access = AccessLevel.PRIVATE) public class PropertiesConstants { /** * For setting the working directory for konduit serving. * The working directory contains the runtime files generated by vertx or * konduit-serving itself. The runtime files could contain logs, * running process details, vertx cache files etc. */ public static final String WORKING_DIR = "konduit.working.dir"; /** * System property specifying a directory for keeping vertx runtime and cache files. */ public static final String VERTX_DIR = "konduit.vertx.dir"; /** * System property specifying build data directory where files for the build CLI are kept. */ public static final String BUILD_DIR = "konduit.build.dir"; /** * System property specifying profiles data directory where details of individual profiles are kept. */ public static final String PROFILES_DIR = "konduit.profiles.dir"; /** * This system property is responsible for setting the path where the log files for a konduit server * is kept for the `/logs` endpoint. */ public static final String ENDPOINT_LOGS_DIR = "konduit.endpoint.logs.dir"; /** * Default directory for containing the command line logs for konduit-serving */ public static final String COMMAND_LOGS_DIR = "konduit.command.logs.dir"; /** * Sets the directory where the file uploads are kept for Vertx BodyHandler */ public static final String FILE_UPLOADS_DIR = "konduit.file.uploads.dir"; public static final String START_HTTP_SERVER_FOR_KAFKA = "konduit.start.http.server.for.kafka"; public static final String HTTP_KAFKA_HOST = "konduit.http.kafka.host"; public static final String HTTP_KAFKA_PORT = "konduit.http.kafka.port"; public static final String CONSUMER_TOPIC_NAME = "konduit.consumer.topic.name"; public static final String KAFKA_CONSUMER_KEY_DESERIALIZER_CLASS = "konduit.kafka.consumer.serializer.class"; public static final String KAFKA_CONSUMER_VALUE_DESERIALIZER_CLASS = "konduit.kafka.consumer.deserializer.class"; public static final String CONSUMER_GROUP_ID = "konduit.consumer.group.id"; public static final String CONSUMER_AUTO_OFFSET_RESET = "konduit.consumer.offset.reset"; public static final String CONSUMER_AUTO_COMMIT = "konduit.consumer.auto.commit"; public static final String PRODUCER_TOPIC_NAME = "konduit.producer.topic.name"; public static final String KAFKA_PRODUCER_KEY_SERIALIZER_CLASS = "konduit.kafka.producer.serializer.class"; public static final String KAFKA_PRODUCER_VALUE_SERIALIZER_CLASS = "konduit.kafka.producer.deserializer.class"; public static final String PRODUCER_ACKS = "konduit.producer.acks"; public static final String SERVING_ID = "serving.id"; }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/util/ArchiveUtils.java
/******************************************************************************* * Copyright (c) 2015-2018 Skymind, Inc. * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.pipeline.util; import lombok.extern.slf4j.Slf4j; import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.nd4j.common.base.Preconditions; import java.io.*; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; import java.util.zip.GZIPInputStream; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import java.util.zip.ZipInputStream; /** * @author Adam Gibson */ @Slf4j public class ArchiveUtils { protected ArchiveUtils() { } /** * Extracts all files from the archive to the specified destination.<br> * Note: Logs the path of all extracted files by default. Use {@link #unzipFileTo(String, String, boolean)} if * logging is not desired.<br> * Can handle .zip, .jar, .tar.gz, .tgz, .tar, and .gz formats. * Format is interpreted from the filename * * @param file the file to extract the files from * @param dest the destination directory. Will be created if it does not exist * @throws IOException If an error occurs accessing the files or extracting */ public static void unzipFileTo(String file, String dest) throws IOException { unzipFileTo(file, dest, true); } /** * Extracts all files from the archive to the specified destination, optionally logging the extracted file path.<br> * Can handle .zip, .jar, .tar.gz, .tgz, .tar, and .gz formats. * Format is interpreted from the filename * * @param file the file to extract the files from * @param dest the destination directory. Will be created if it does not exist * @param logFiles If true: log the path of every extracted file; if false do not log * @throws IOException If an error occurs accessing the files or extracting */ public static void unzipFileTo(String file, String dest, boolean logFiles) throws IOException { File target = new File(file); if (!target.exists()) throw new IllegalArgumentException("Archive doesnt exist"); if (!new File(dest).exists()) new File(dest).mkdirs(); FileInputStream fin = new FileInputStream(target); int BUFFER = 2048; byte data[] = new byte[BUFFER]; if (file.endsWith(".zip") || file.endsWith(".jar")) { try(ZipInputStream zis = new ZipInputStream(fin)) { //get the zipped file list entry ZipEntry ze = zis.getNextEntry(); while (ze != null) { String fileName = ze.getName(); String canonicalDestinationDirPath = new File(dest).getCanonicalPath(); File newFile = new File(dest + File.separator + fileName); String canonicalDestinationFile = newFile.getCanonicalPath(); if (!canonicalDestinationFile.startsWith(canonicalDestinationDirPath + File.separator)) { log.debug("Attempt to unzip entry is outside of the target dir"); throw new IOException("Entry is outside of the target dir: "); } if (ze.isDirectory()) { newFile.mkdirs(); zis.closeEntry(); ze = zis.getNextEntry(); continue; } FileOutputStream fos = new FileOutputStream(newFile); int len; while ((len = zis.read(data)) > 0) { fos.write(data, 0, len); } fos.close(); ze = zis.getNextEntry(); if(logFiles) { log.info("File extracted: " + newFile.getAbsoluteFile()); } } zis.closeEntry(); } } else if (file.endsWith(".tar.gz") || file.endsWith(".tgz") || file.endsWith(".tar")) { BufferedInputStream in = new BufferedInputStream(fin); TarArchiveInputStream tarIn; if(file.endsWith(".tar")){ //Not compressed tarIn = new TarArchiveInputStream(in); } else { GzipCompressorInputStream gzIn = new GzipCompressorInputStream(in); tarIn = new TarArchiveInputStream(gzIn); } TarArchiveEntry entry; /* Read the tar entries using the getNextEntry method **/ while ((entry = (TarArchiveEntry) tarIn.getNextEntry()) != null) { if(logFiles) { log.info("Extracting: " + entry.getName()); } /* If the entry is a directory, create the directory. */ if (entry.isDirectory()) { File f = new File(dest + File.separator + entry.getName()); f.mkdirs(); } /* * If the entry is a file,write the decompressed file to the disk * and close destination stream. */ else { int count; try(FileOutputStream fos = new FileOutputStream(dest + File.separator + entry.getName()); BufferedOutputStream destStream = new BufferedOutputStream(fos, BUFFER);) { while ((count = tarIn.read(data, 0, BUFFER)) != -1) { destStream.write(data, 0, count); } destStream.flush(); IOUtils.closeQuietly(destStream); } } } // Close the input stream tarIn.close(); } else if (file.endsWith(".gz")) { File extracted = new File(target.getParent(), target.getName().replace(".gz", "")); if (extracted.exists()) extracted.delete(); extracted.createNewFile(); try (GZIPInputStream is2 = new GZIPInputStream(fin); OutputStream fos = FileUtils.openOutputStream(extracted)) { IOUtils.copyLarge(is2, fos); fos.flush(); } } else { throw new IllegalStateException("Unable to infer file type (compression format) from source file name: " + file); } target.delete(); } /** * List all of the files and directories in the specified tar.gz file * * @param tarFile A .tar file * @return List of files and directories */ public static List<String> tarListFiles(File tarFile) throws IOException { Preconditions.checkState(!tarFile.getPath().endsWith(".tar.gz"), ".tar.gz files should not use this method - use tarGzListFiles instead"); return tarGzListFiles(tarFile, false); } /** * List all of the files and directories in the specified tar.gz file * * @param tarGzFile A tar.gz file * @return List of files and directories */ public static List<String> tarGzListFiles(File tarGzFile) throws IOException { return tarGzListFiles(tarGzFile, true); } protected static List<String> tarGzListFiles(File file, boolean isTarGz) throws IOException { try(TarArchiveInputStream tin = isTarGz ? new TarArchiveInputStream(new GZIPInputStream(new BufferedInputStream(new FileInputStream(file)))) : new TarArchiveInputStream(new BufferedInputStream(new FileInputStream(file)))) { ArchiveEntry entry; List<String> out = new ArrayList<>(); while((entry = tin.getNextTarEntry()) != null){ String name = entry.getName(); out.add(name); } return out; } } /** * List all of the files and directories in the specified .zip file * * @param zipFile Zip file * @return List of files and directories */ public static List<String> zipListFiles(File zipFile) throws IOException { List<String> out = new ArrayList<>(); try (ZipFile zf = new ZipFile(zipFile)) { Enumeration entries = zf.entries(); while (entries.hasMoreElements()) { ZipEntry ze = (ZipEntry) entries.nextElement(); out.add(ze.getName()); } } return out; } /** * Extract a single file from a .zip file. Does not support directories * * @param zipFile Zip file to extract from * @param destination Destination file * @param pathInZip Path in the zip to extract * @throws IOException If exception occurs while reading/writing */ public static void zipExtractSingleFile(File zipFile, File destination, String pathInZip) throws IOException { try (ZipFile zf = new ZipFile(zipFile); InputStream is = new BufferedInputStream(zf.getInputStream(zf.getEntry(pathInZip))); OutputStream os = new BufferedOutputStream(new FileOutputStream(destination))) { IOUtils.copy(is, os); } } /** * Extract a single file from a tar.gz file. Does not support directories. * NOTE: This should not be used for batch extraction of files, due to the need to iterate over the entries until the * specified entry is found. Use {@link #unzipFileTo(String, String)} for batch extraction instead * * @param tarGz A tar.gz file * @param destination The destination file to extract to * @param pathInTarGz The path in the tar.gz file to extract */ public static void tarGzExtractSingleFile(File tarGz, File destination, String pathInTarGz) throws IOException { try(TarArchiveInputStream tin = new TarArchiveInputStream(new GZIPInputStream(new BufferedInputStream(new FileInputStream(tarGz))))) { ArchiveEntry entry; boolean extracted = false; while((entry = tin.getNextTarEntry()) != null){ String name = entry.getName(); if(pathInTarGz.equals(name)){ try(OutputStream os = new BufferedOutputStream(new FileOutputStream(destination))){ IOUtils.copy(tin, os); } extracted = true; } } Preconditions.checkState(extracted, "No file was extracted. File not found? %s", pathInTarGz); } } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/util/DataUtils.java
package ai.konduit.serving.pipeline.util; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.ValueType; import lombok.NonNull; import java.util.Arrays; import java.util.List; public class DataUtils { private DataUtils(){ } public static boolean listEquals(List<?> list1, List<?> list2, ValueType l1Type, ValueType l2Type){ if(l1Type != l2Type) return false; if(list1.size() != list2.size()){ return false; } if(l1Type == ValueType.BYTES){ List<byte[]> lb1 = (List<byte[]>)list1; List<byte[]> lb2 = (List<byte[]>)list2; for( int i=0; i<lb1.size(); i++ ){ byte[] b1 = lb1.get(i); byte[] b2 = lb2.get(i); if(b1.length != b2.length) return false; if(!Arrays.equals(b1, b2)) return false; } } else if(l1Type == ValueType.LIST){ throw new UnsupportedOperationException("Nested lists equality not yet implemented"); } else { if(!list1.equals(list2)) return false; } return true; } public static String inferField(@NonNull Data d, @NonNull ValueType vt, boolean allowLists, @NonNull String errPrefix){ String errMultipleKeys = errPrefix + ": " + vt + " was not provided could not be inferred: multiple " + vt + " fields exist: %s and %s"; String errNoKeys = errPrefix + ": " + vt + " field name was not provided and could not be inferred: no " + vt + " fields exist"; return inferField(d, vt, allowLists, errMultipleKeys, errNoKeys); } public static String inferField(@NonNull Data d, @NonNull ValueType vt, boolean allowLists, @NonNull String errMultipleKeys, @NonNull String errNoKeys){ String field = null; for(String s : d.keys()){ if(d.type(s) == vt){ if(field == null) { field = s; } else { throw new IllegalStateException(String.format(errMultipleKeys, field, s)); } } else if(allowLists & d.type(s) == ValueType.LIST && d.listType(s) == vt){ if(field == null) { field = s; } else { throw new IllegalStateException(String.format(errMultipleKeys, field, s)); } } } if(field == null) throw new IllegalStateException(errNoKeys); return field; } public static String inferListField(@NonNull Data d, @NonNull ValueType vt, @NonNull String errPrefix){ String errMultipleKeys = errPrefix + ": List<" + vt + "> field name was not provided and could not be inferred: multiple List<" + vt + "> fields exist: %s and %s"; String errNoKeys = errPrefix + ": List<" + vt + "> field name was not provided and could not be inferred: no List<" + vt + "> fields exist"; return inferListField(d, vt, errMultipleKeys, errNoKeys); } public static String inferListField(@NonNull Data d, @NonNull ValueType vt, @NonNull String errMultipleKeys, @NonNull String errNoKeys){ String field = null; for(String s : d.keys()){ if(d.type(s) == ValueType.LIST && d.listType(s) == vt){ if(field == null) { field = s; } else { throw new IllegalStateException(String.format(errMultipleKeys, field, s)); } } } if(field == null) throw new IllegalStateException(errNoKeys); return field; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/util/FileUtils.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.util; import java.io.File; public class FileUtils { private FileUtils(){ } public static File getTempFileBaseDir(){ File f = new File(System.getProperty("java.io.tmpdir"), "konduit-serving"); if(!f.exists()) f.mkdirs(); return f; } public static File getTempFileDir(String subdirectory){ File f = new File(getTempFileBaseDir(), subdirectory); if(!f.exists()) f.mkdirs(); return f; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/util/NDArrayUtils.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.util; import ai.konduit.serving.pipeline.api.data.NDArray; import ai.konduit.serving.pipeline.api.data.NDArrayType; public class NDArrayUtils { private NDArrayUtils(){ } public static NDArray FloatNDArrayToDouble(NDArray ndarr) { if (ndarr.type() == NDArrayType.FLOAT || ndarr.type() == NDArrayType.FLOAT16 || ndarr.type() == NDArrayType.BFLOAT16) { float[][] farr = ndarr.getAs(float[][].class); double[][] darr = new double[(int) ndarr.shape()[0]][(int) ndarr.shape()[1]]; for (int i = 0; i < farr.length; i++) { for (int j = 0; j < farr[i].length; j++) { darr[i][j] = Double.valueOf(farr[i][j]); } } return NDArray.create(darr); } return ndarr; } public static double[] squeeze(NDArray arr) { // we have [numClasses] array, so do not modify nothing if (arr.shape().length == 1) { return arr.getAs(double[].class); } // i.e we have [1, numClasses] array if (arr.shape().length == 2 && arr.shape()[0] == 1) { return arr.getAs(double[][].class)[0]; } throw new UnsupportedOperationException("Failed squeezing NDArray"); } public static double[] getMaxValueAndIndex(double[] arr) { double max = arr[0]; int maxIdx = 0; for (int i = 1; i < arr.length; i++) { if (arr[i] > max) { max = arr[i]; maxIdx = i; } } return new double[]{max, maxIdx}; } /** * Convert a NCHW (channels first) float array to NHWC (channels last) format * @param nchw NCHW array * @return NHWC array */ public static float[][][][] nchwToNhwc(float[][][][] nchw){ int n = nchw.length; int c = nchw[0].length; int h = nchw[0][0].length; int w = nchw[0][0][0].length; float[][][][] nhwc = new float[n][h][w][c]; for( int i=0; i<n; i++ ){ for( int j=0; j<h; j++ ){ for( int k=0; k<w; k++ ){ for( int l=0; l<c; l++ ){ nhwc[i][j][k][l] = nchw[i][l][j][k]; } } } } return nhwc; } /** * Convert a NHWC (channels last) float array to a NCHW (channels first) format * @param nhwc NHWC array * @return NCHW array */ public static float[][][][] nhwcToNchw(float[][][][] nhwc){ int n = nhwc.length; int h = nhwc[0].length; int w = nhwc[0][0].length; int c = nhwc[0][0][0].length; float[][][][] nchw = new float[n][c][h][w]; for( int i=0; i<n; i++ ){ for( int j=0; j < h; j++ ){ for( int k=0; k < w; k++ ){ for( int l=0; l <c; l++ ){ nchw[i][l][j][k] = nhwc[i][j][k][l]; } } } } return nchw; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/util/ObjectMappers.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.pipeline.util; import ai.konduit.serving.pipeline.api.serde.JsonSubType; import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping; import ai.konduit.serving.pipeline.settings.constants.Constants; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.nd4j.shade.jackson.annotation.JsonAutoDetect; import org.nd4j.shade.jackson.annotation.JsonInclude; import org.nd4j.shade.jackson.annotation.PropertyAccessor; import org.nd4j.shade.jackson.core.JsonProcessingException; import org.nd4j.shade.jackson.databind.*; import org.nd4j.shade.jackson.databind.jsontype.NamedType; import org.nd4j.shade.jackson.dataformat.yaml.YAMLFactory; import org.nd4j.shade.jackson.dataformat.yaml.YAMLGenerator; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.*; /** * A simple object mapper holder for using one single {@link ObjectMapper} across the whole project. */ @Slf4j public class ObjectMappers { private static final Set<JsonSubType> manuallyRegisteredSubtypes = new HashSet<>(); private static ObjectMapper jsonMapper = configureMapper(new ObjectMapper()); private static ObjectMapper yamlMapper = configureMapper(new ObjectMapper(new YAMLFactory() .disable(YAMLGenerator.Feature.USE_NATIVE_TYPE_ID) // For preventing YAML from adding `!<TYPE>` with polymorphic objects // and use Jackson's type information mechanism. )); private ObjectMappers() { } /** * Get a single object mapper for use with reading and writing JSON * * @return JSON object mapper */ public static ObjectMapper json() { return jsonMapper; } /** * Get a single object mapper for use with reading and writing YAML * * @return YAML object mapper */ public static ObjectMapper yaml() { return yamlMapper; } public static ObjectMapper configureMapper(ObjectMapper ret) { ret.setDateFormat(new SimpleDateFormat(Constants.DATE_FORMAT)); ret.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); ret.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); ret.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, false); //Use order in which fields are defined in classes ret.enable(SerializationFeature.INDENT_OUTPUT); ret.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.NONE); ret.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY); ret.setVisibility(PropertyAccessor.CREATOR, JsonAutoDetect.Visibility.ANY); ret.setSerializationInclusion(JsonInclude.Include.NON_NULL); if (ret.getFactory() instanceof YAMLFactory) { ret.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE); } ret.configure(SerializationFeature.FAIL_ON_UNWRAPPED_TYPE_IDENTIFIERS, false); //Configure subtypes - via service loader from other modules List<JsonSubType> l = getAllSubtypes(); for(JsonSubType t : l){ NamedType nt = new NamedType(t.getSubtype(), t.getName()); ret.registerSubtypes(nt); } return ret; } /** * Convert the specified object to a YAML String, throwing an unchecked exception (RuntimeException) if conversion fails * * @param o Object * @return Object as YAML */ public static String toYaml(@NonNull Object o) { try { return yaml().writeValueAsString(o); } catch (JsonProcessingException e) { throw new RuntimeException("Error converting object of class " + o.getClass().getName() + " to YAML", e); } } /** * Convert the specified object to a JSON String, throwing an unchecked exception (RuntimeException) if conversion fails * * @param o Object * @return Object as JSON */ public static String toJson(@NonNull Object o) { try { return json().writeValueAsString(o); } catch (JsonProcessingException e) { throw new RuntimeException("Error converting object of class " + o.getClass().getName() + " to JSON", e); } } /** * Convert the specified YAML String to an object of the specified class, throwing an unchecked exception (RuntimeException) if conversion fails * * @param yaml YAML string * @param c Class for the object * @return Object from YAML */ public static <T> T fromYaml(@NonNull String yaml, @NonNull Class<T> c) { try { return yaml().readValue(yaml, c); } catch (IOException e) { throw new RuntimeException("Error deserializing YAML string to class " + c.getName(), e); } } /** * Convert the specified YAML String to an object of the specified class, throwing an unchecked exception (RuntimeException) if conversion fails * * @param json JSON string * @param c Class for the object * @return Object from JSON */ public static <T> T fromJson(@NonNull String json, @NonNull Class<T> c) { try { return json().readValue(json, c); } catch (IOException e) { throw new RuntimeException("Error deserializing JSON string to class " + c.getName(), e); } } /** * Register JSON subtypes manually. Mainly used for testing purposes. * In general ServiceLoader should be used for registering JSON subtypes. * * @param subTypes Subtypes to register manually */ public static void registerSubtypes(@NonNull List<JsonSubType> subTypes) { manuallyRegisteredSubtypes.addAll(subTypes); jsonMapper = configureMapper(new ObjectMapper()); yamlMapper = configureMapper(new ObjectMapper(new YAMLFactory() .disable(YAMLGenerator.Feature.USE_NATIVE_TYPE_ID) // For preventing YAML from adding `!<TYPE>` with polymorphic objects // and use Jackson's type information mechanism. )); } public static List<JsonSubType> getAllSubtypes() { ServiceLoader<JsonSubTypesMapping> sl = ServiceLoader.load(JsonSubTypesMapping.class); Iterator<JsonSubTypesMapping> iterator = sl.iterator(); List<JsonSubType> out = new ArrayList<>(); while(iterator.hasNext()){ JsonSubTypesMapping m = iterator.next(); List<JsonSubType> l = m.getSubTypesMapping(); out.addAll(l); } out.addAll(manuallyRegisteredSubtypes); return out; } public static List<JsonSubType> getSubtypesOf(Class<?> c){ List<JsonSubType> all = getAllSubtypes(); List<JsonSubType> out = new ArrayList<>(); for(JsonSubType j : all){ if(j.getConfigInterface() == c){ out.add(j); } } return out; } public static Map<Class<?>, String> getSubtypeNames() { List<JsonSubType> all = getAllSubtypes(); Map<Class<?>,String> m = new HashMap<>(); for(JsonSubType j : all){ m.put(j.getSubtype(), j.getName()); } return m; } public static JsonSubType findSubtypeByName(String name) { for(JsonSubType type : getAllSubtypes()) { if(type.getName().equals(name)){ return type; } } return null; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/util/TestUtils.java
/* * * * ****************************************************************************** * * * Copyright (c) 2020 Konduit AI. * * * * * * This program and the accompanying materials are made available under the * * * terms of the Apache License, Version 2.0 which is available at * * * https://www.apache.org/licenses/LICENSE-2.0. * * * * * * Unless required by applicable law or agreed to in writing, software * * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * * License for the specific language governing permissions and limitations * * * under the License. * * * * * * SPDX-License-Identifier: Apache-2.0 * * ***************************************************************************** * * */ package ai.konduit.serving.pipeline.util; import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.FileUtils; import java.io.File; import java.io.IOException; import java.net.URL; /** * Test utilities for Konduit Serving tests * * @author Alex Black */ @Slf4j public class TestUtils { private TestUtils() { } private static File baseResourcesDir; /** * Get the base storage directory for any test resources (downloaded and cached on system) */ public static File testResourcesStorageDir() { if (baseResourcesDir == null) { File f = new File(System.getProperty("user.home"), ".konduittest/"); if (!f.exists()) f.mkdirs(); baseResourcesDir = f; } return baseResourcesDir; } public static File cache(String url, String module, String testName, String fileName) throws IOException { File testDir = TestUtils.testResourcesStorageDir(); File saveDir = new File(testDir, module + "/" + testName); File f = new File(saveDir, fileName); if (!f.exists()) { log.info("Downloading model: {} -> {}", url, f.getAbsolutePath()); FileUtils.copyURLToFile(new URL(url), f); log.info("Download complete"); } return f; } }
0
java-sources/ai/konduit/serving/konduit-serving-prometheus/0.3.0/ai/konduit/serving/metrics
java-sources/ai/konduit/serving/konduit-serving-prometheus/0.3.0/ai/konduit/serving/metrics/prometheus/KonduitServingPrometheusJsonMapping.java
package ai.konduit.serving.metrics.prometheus;import ai.konduit.serving.pipeline.api.serde.JsonSubType; import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping; import ai.konduit.serving.pipeline.api.serde.JsonSubType; import java.util.ArrayList; import java.util.List; //GENERATED CLASS DO NOT EDIT public class KonduitServingPrometheusJsonMapping implements JsonSubTypesMapping { @Override public List<JsonSubType> getSubTypesMapping() { List<JsonSubType> l = new ArrayList<>(); return l; } }
0
java-sources/ai/konduit/serving/konduit-serving-prometheus/0.3.0/ai/konduit/serving/metrics
java-sources/ai/konduit/serving/konduit-serving-prometheus/0.3.0/ai/konduit/serving/metrics/prometheus/PrometheusMetricsProvider.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.metrics.prometheus; import ai.konduit.serving.pipeline.impl.metrics.MetricsProvider; import io.micrometer.core.instrument.MeterRegistry; import io.micrometer.prometheus.PrometheusConfig; import io.micrometer.prometheus.PrometheusMeterRegistry; public class PrometheusMetricsProvider implements MetricsProvider { @Override public MeterRegistry getRegistry() { return new PrometheusMeterRegistry(PrometheusConfig.DEFAULT); } @Override public Object getEndpoint() { return io.vertx.micrometer.PrometheusScrapingHandler.create(); } }
0
java-sources/ai/konduit/serving/konduit-serving-prometheus/0.3.0/ai/konduit/serving/metrics
java-sources/ai/konduit/serving/konduit-serving-prometheus/0.3.0/ai/konduit/serving/metrics/prometheus/PrometheusModuleInfo.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.metrics.prometheus; import ai.konduit.serving.annotation.module.ModuleInfo; @ModuleInfo("konduit-serving-prometheus") public class PrometheusModuleInfo { private PrometheusModuleInfo(){ } }
0
java-sources/ai/konduit/serving/konduit-serving-python/0.3.0/ai/konduit/serving
java-sources/ai/konduit/serving/konduit-serving-python/0.3.0/ai/konduit/serving/python/KonduitServingPythonJsonMapping.java
package ai.konduit.serving.python;import ai.konduit.serving.pipeline.api.serde.JsonSubType; import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping; import ai.konduit.serving.pipeline.api.serde.JsonSubType; import java.util.ArrayList; import java.util.List; //GENERATED CLASS DO NOT EDIT public class KonduitServingPythonJsonMapping implements JsonSubTypesMapping { @Override public List<JsonSubType> getSubTypesMapping() { List<JsonSubType> l = new ArrayList<>(); return l; } }
0
java-sources/ai/konduit/serving/konduit-serving-python/0.3.0/ai/konduit/serving
java-sources/ai/konduit/serving/konduit-serving-python/0.3.0/ai/konduit/serving/python/NoneType.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.python; import org.nd4j.python4j.Python; import org.nd4j.python4j.PythonObject; import org.nd4j.python4j.PythonType; /** * Represents the none type in python. * * @author Adam Gibson */ public class NoneType extends PythonType<Object> { private static NoneType INSTANCE = new NoneType(); public static NoneType instance() { return INSTANCE; } private NoneType() { super("None", Object.class); } @Override public Object toJava(PythonObject pythonObject) { return INSTANCE; } @Override public PythonObject toPython(Object o) { return Python.None(); } }
0
java-sources/ai/konduit/serving/konduit-serving-python/0.3.0/ai/konduit/serving
java-sources/ai/konduit/serving/konduit-serving-python/0.3.0/ai/konduit/serving/python/PythonModuleInfo.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.python; import ai.konduit.serving.annotation.module.ModuleInfo; @ModuleInfo("konduit-serving-python") public class PythonModuleInfo { private PythonModuleInfo(){ } }
0
java-sources/ai/konduit/serving/konduit-serving-python/0.3.0/ai/konduit/serving
java-sources/ai/konduit/serving/konduit-serving-python/0.3.0/ai/konduit/serving/python/PythonRunner.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.python; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.data.nd4j.data.ND4JNDArray; import ai.konduit.serving.model.PythonIO; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.ValueType; import ai.konduit.serving.pipeline.api.python.models.AppendType; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.python.util.KonduitPythonUtils; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.FileUtils; import org.nd4j.common.base.Preconditions; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.python4j.PythonExecutioner; import org.nd4j.python4j.PythonGIL; import org.nd4j.python4j.PythonVariable; import org.nd4j.python4j.PythonVariables; import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Map; import static org.bytedeco.cpython.global.python.PyGILState_Check; @CanRun(PythonStep.class) @Slf4j public class PythonRunner implements PipelineStepRunner { private PythonStep pythonStep; private String code; @SneakyThrows public PythonRunner(PythonStep pythonStep) { Preconditions.checkNotNull(pythonStep.pythonConfig(),"Python configuration must not be null!"); this.pythonStep = pythonStep; String code = pythonStep.pythonConfig().getPythonCode(); AppendType appendType = this.pythonStep.pythonConfig().getAppendType(); String pythonLibrariesPath = this.pythonStep.pythonConfig().getPythonLibrariesPath(); if(pythonLibrariesPath == null) pythonLibrariesPath = this.pythonStep.pythonConfig().resolvePythonLibrariesPath(); if(pythonLibrariesPath != null) { log.info("Over riding python path " + pythonLibrariesPath); System.setProperty("org.eclipse.python4j.path", pythonLibrariesPath); } else { log.warn("Unable to determine python path. Python configuration has no pythonLibrariesPath specified."); } System.setProperty("org.eclipse.python4j.path.append", appendType == null ? AppendType.BEFORE.name() : appendType.name().toLowerCase()); new PythonExecutioner(); if (code == null) { try { this.code = FileUtils.readFileToString(new File(pythonStep.pythonConfig().getPythonCodePath()), StandardCharsets.UTF_8); } catch (IOException e) { log.error("Unable to read code from " + pythonStep.pythonConfig().getPythonCodePath(), e); } log.info("Resolving execution code from " + pythonStep.pythonConfig().getPythonCodePath()); } else this.code = code; String importCode = pythonStep.pythonConfig().getImportCode(); String importCodePath = pythonStep.pythonConfig().getImportCodePath(); if (importCode == null && importCodePath != null) { try { importCode = FileUtils.readFileToString(new File(importCodePath), StandardCharsets.UTF_8); } catch (IOException e) { log.error("Unable to read code from " + pythonStep.pythonConfig().getImportCodePath(), e); } log.info("Resolving import code from " + pythonStep.pythonConfig().getImportCodePath()); } if(importCode != null) { try(PythonGIL ignored = PythonGIL.lock()) { PythonExecutioner.exec(importCode); } } } @Override public void close() { } @Override public PipelineStep getPipelineStep() { return pythonStep; } @SneakyThrows @Override public Data exec(Context ctx, Data data) { Data ret = Data.empty(); System.out.println("Creating outputs"); PythonVariables outputs = KonduitPythonUtils.createOutputVariables(pythonStep.pythonConfig()); System.out.println("Created outputs"); PythonVariables pythonVariables = KonduitPythonUtils.createPythonVariablesFromDataInput(data, pythonStep.pythonConfig()); System.out.println("After created python variables"); try(PythonGIL ignored = PythonGIL.lock()) { log.debug("Thread " + Thread.currentThread().getId() + " has the GIL. Name of thread " + Thread.currentThread().getName()); log.debug("Py gil state " + (PyGILState_Check() > 0)); runExec(ret, outputs, pythonVariables); } return ret; } private void runExec(Data ret, PythonVariables outputs, PythonVariables pythonVariables) throws IOException { System.out.println("Before execution of code " + code); PythonExecutioner.exec(code, pythonVariables, outputs); System.out.println("After execution of code " + code); Preconditions.checkNotNull(outputs,"No outputs found!"); for(PythonVariable variable : outputs) { PythonIO pythonIO = pythonStep.pythonConfig().getIoOutputs().get(variable.getName()); Preconditions.checkNotNull(pythonIO,"No variable found for " + variable.getName()); switch(variable.getType().getName().toLowerCase()) { case "bool": ret.put(variable.getName(),KonduitPythonUtils.getWithType(outputs,variable.getName(),Boolean.class)); break; case "list": Preconditions.checkState(pythonIO.isListWithType(),"No output type specified for list with key " + variable); List<Object> listValue = KonduitPythonUtils.getWithType(outputs,variable.getName(),List.class); ValueType valueType = pythonIO.secondaryType(); Preconditions.checkNotNull(listValue,"List value returned null for output named " + variable.getName() + " type should have been list of " + valueType); List<Object> convertedInput = KonduitPythonUtils.createValidListForPythonVariables(listValue,valueType); KonduitPythonUtils.insertListIntoData(ret, variable.getName(), convertedInput, valueType); break; case "bytes": KonduitPythonUtils.insertBytesIntoPythonVariables( ret, outputs, variable.getName(), pythonStep.pythonConfig()); break; case "numpy.ndarray": System.out.println("Before insert ndarray"); ret.put(variable.getName(),new ND4JNDArray(KonduitPythonUtils.getWithType(outputs,variable.getName(),INDArray.class))); System.out.println("After insert ndarray"); break; case "str": ret.put(variable.getName(),KonduitPythonUtils.getWithType(outputs,variable.getName(),String.class)); break; case "dict": ValueType dictValueType = pythonIO.type(); Map<String,Object> items = (Map<String, Object>) KonduitPythonUtils.getWithType(outputs,variable.getName(),Map.class); switch(dictValueType) { case POINT: ret.put(variable.getName(),DictUtils.fromPointDict(items)); break; case BOUNDING_BOX: ret.put(variable.getName(),DictUtils.boundingBoxFromDict(items)); break; default: throw new IllegalArgumentException("Limited support for de serializing dictionaries. Invalid type " + dictValueType); } break; case "int": ret.put(variable.getName(),KonduitPythonUtils.getWithType(outputs,variable.getName(),Long.class)); break; case "float": ret.put(variable.getName(),KonduitPythonUtils.getWithType(outputs,variable.getName(),Double.class)); break; default: break; } } } }
0
java-sources/ai/konduit/serving/konduit-serving-python/0.3.0/ai/konduit/serving
java-sources/ai/konduit/serving/konduit-serving-python/0.3.0/ai/konduit/serving/python/PythonRunnerFactory.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.python; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class PythonRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep step) { return step instanceof PythonStep; } @Override public PipelineStepRunner create(PipelineStep step) { Preconditions.checkState(canRun(step), "Unable to run pipeline step: %s", step.getClass()); return new PythonRunner((PythonStep) step); } }
0
java-sources/ai/konduit/serving/konduit-serving-python/0.3.0/ai/konduit/serving/python
java-sources/ai/konduit/serving/konduit-serving-python/0.3.0/ai/konduit/serving/python/util/KonduitPythonUtils.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.python.util; import ai.konduit.serving.data.image.data.FrameImage; import ai.konduit.serving.data.image.data.MatImage; import ai.konduit.serving.data.nd4j.data.ND4JNDArray; import ai.konduit.serving.model.PythonConfig; import ai.konduit.serving.model.PythonIO; import ai.konduit.serving.pipeline.api.data.*; import ai.konduit.serving.pipeline.impl.data.image.*; import ai.konduit.serving.python.DictUtils; import ai.konduit.serving.python.PythonStep; import ai.konduit.serving.python.NoneType; import org.bytedeco.javacpp.BytePointer; import org.bytedeco.javacv.Frame; import org.bytedeco.opencv.opencv_core.Mat; import org.nd4j.common.base.Preconditions; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.python4j.*; import org.nd4j.python4j.numpy.NumpyArray; import javax.imageio.ImageIO; import java.awt.image.BufferedImage; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.Buffer; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.Map; import static org.nd4j.python4j.PythonTypes.*; public class KonduitPythonUtils { public final static String[] PYTHON_VARIABLE_TYPES = { "bool", "list", "bytes", "numpy.ndarray", "str", "dict", "int", "float" }; private KonduitPythonUtils() {} /** * Create the input {@link PythonVariables} * based on the {@link PythonConfig#getIoOutputs()} * @param pythonConfig the python configuration to derive inputs from * @return */ public static PythonVariables createInputVariables(PythonConfig pythonConfig) { PythonVariables ret = new PythonVariables(); for(Map.Entry<String,PythonIO> entry : pythonConfig.getIoInputs().entrySet()) { ret.add(new PythonVariable<>(entry.getKey(),PythonTypes.get(entry.getValue().pythonType()))); } return ret; } /** * Create the input {@link PythonVariables} * based on the {@link PythonConfig#getIoOutputs()} ()} * @param pythonConfig the python configuration to derive inputs from * @return */ public static PythonVariables createOutputVariables(PythonConfig pythonConfig) { PythonVariables ret = new PythonVariables(); for(Map.Entry<String,PythonIO> entry : pythonConfig.getIoOutputs().entrySet()) { ret.add(new PythonVariable<>(entry.getKey(),PythonTypes.get(entry.getValue().pythonType()))); } return ret; } /** * Create a valid list for input in to * {@link PythonVariables} * based on the input value type * @param input the input to convert * @param valueType the value type to add * @return the equivalent list for safe insertion * in to a {@link PythonVariables} object */ public static List<Object> createValidListForPythonVariables(List<Object> input,ValueType valueType) { List<Object> ret = new ArrayList<>(input.size()); for(Object inputItem : input) { switch(valueType) { case NDARRAY: if(inputItem instanceof INDArray) { INDArray arr = (INDArray) inputItem; ret.add(arr); } else if(inputItem instanceof NDArray) { NDArray ndArray = (NDArray) inputItem; INDArray arr = ndArray.getAs(INDArray.class); ret.add(arr); } else { throw new IllegalArgumentException("Value type NDArray was specified but item found in list was neither of type NDArray or INDArray"); } break; case BYTEBUFFER: ByteBuffer byteBuffer = (ByteBuffer) inputItem; if(byteBuffer.hasArray()) { ret.add(byteBuffer.array()); } else { byte[] toAdd = new byte[byteBuffer.capacity()]; byteBuffer.get(toAdd); ret.add(toAdd); byteBuffer.rewind(); } break; case INT64: ret.add(inputItem); break; case IMAGE: Image image = (Image) inputItem; try { byte[] imageConvert = convertImageToBytes(image); ret.add(imageConvert); } catch (IOException e) { throw new IllegalArgumentException("Unable to convert image to bytes for addition in to python. Image was of type " + inputItem.getClass().getName()); } break; case DATA: break; case STRING: ret.add(inputItem); break; case DOUBLE: ret.add(inputItem); break; case LIST: break; case BYTES: ret.add(inputItem); break; case BOOLEAN: ret.add(inputItem); break; case BOUNDING_BOX: BoundingBox boundingBox = (BoundingBox) inputItem; ret.add(DictUtils.toBoundingBoxDict(boundingBox)); break; case POINT: Point point = (Point) inputItem; ret.add(DictUtils.toPointDict(point)); break; } } return ret; } /** * Returns an {@link PythonType} * for the given {@link ValueType} * @param valueType the value type to get the input for * @return the value type */ public static PythonType typeForValueType(ValueType valueType) { switch(valueType) { default: throw new IllegalArgumentException("Data is not a valid value type for input in to a python script"); case NONE: return NoneType.instance(); case BOOLEAN: return BOOL; case STRING: return STR; case DOUBLE: return FLOAT; case INT64: return INT; case NDARRAY: return NumpyArray.INSTANCE; case LIST: return LIST; case BYTES: case IMAGE: case BYTEBUFFER: return BYTES; case BOUNDING_BOX: case POINT: return DICT; } } /** * Return an equivalent {@link PythonType} * for the given java class. Accepted clases right now are: * {@link INDArray}: {@link NumpyArray} * double, {@link Double}: {@link PythonTypes#FLOAT} * int, {@link Integer}, long {@link Long}: {@link PythonTypes#INT} * {@link Map}: {@link PythonTypes#DICT} * {@link List}: {@link PythonTypes#LIST} * {@link String} : {@link PythonTypes#STR} * {@link ByteBuffer}, byte[], : {@link PythonTypes#BYTES} * {@link Boolean}, boolean: {@link PythonTypes#BOOL} * @param clazz the input class * @param <T> the type of the class * @return the equivalent {@link PythonType} listed above */ public static <T> PythonType pythonTypeFor(Class<T> clazz) { if(clazz.equals(INDArray.class)) { return NumpyArray.INSTANCE; } else if(clazz.equals(Float.class) || clazz.equals(float.class) || clazz.equals(double.class) || clazz.equals(Double.class)) { return PythonTypes.FLOAT; } else if(clazz.equals(Integer.class) || clazz.equals(int.class) || clazz.equals(long.class) || clazz.equals(Long.class)) { return PythonTypes.INT; } else if(clazz.isAssignableFrom(Map.class)) { return PythonTypes.DICT; } else if(List.class.isAssignableFrom(clazz)) { return PythonTypes.LIST; } else if(clazz.equals(Boolean.class) || clazz.equals(boolean.class)) { return PythonTypes.BOOL; //clazz is assignable from doesn't seem to work with direct byte buffer } else if(clazz.equals(byte[].class) || Buffer.class.isAssignableFrom(clazz) || clazz.getName().contains("Buffer")) { return PythonTypes.BYTES; } else if(clazz.isAssignableFrom(CharSequence.class)) { return PythonTypes.STR; } else { throw new IllegalArgumentException("Illegal clazz type " + clazz.getName()); } } /** * Invoke {@link PythonVariables#add(String, PythonType, Object)} * with the given input inferring the {@link PythonType} * based on {@link #pythonTypeFor(Class)} based on the * input targetType * @param addTo the variables object to add to * @param key the variable name * @param input the input object to add */ public static void addObjectToPythonVariables(PythonVariables addTo,String key,Object input) { addTo.add(key,pythonTypeFor(input.getClass()),input); } /** * Get the desired variable * with the desired type * @param getFrom the variables to get * {@link PythonVariables#get(String)} from * @param variableName the name of the variable * @param clazz the type of the class * @param <T> the type * @return the type */ public static <T> T getWithType(PythonVariables getFrom,String variableName,Class<T> clazz) { PythonVariable pythonVariable = getFrom.get(variableName); Object value = pythonVariable.getValue(); return clazz.cast(value); } /** * Convert the given {@link Image} * to bytes * @param image the image * @return the output bytearray for the image * @throws IOException */ public static byte[] convertImageToBytes(Image image) throws IOException { if(image instanceof BImage) { BImage bImage = (BImage) image; BufferedImage bufferedImage = bImage.getAs(BufferedImage.class); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); ImageIO.write(bufferedImage,"jpg",byteArrayOutputStream); return byteArrayOutputStream.toByteArray(); } else if(image instanceof PngImage) { PngImage pngImage = (PngImage) image; Png png = pngImage.getAs(Png.class); return png.getBytes(); } else if(image instanceof GifImage) { GifImage gifImage = (GifImage) image; Gif gif = gifImage.getAs(Gif.class); return gif.getBytes(); } else if(image instanceof JpegImage) { JpegImage jpegImage = (JpegImage) image; Jpeg jpeg = jpegImage.getAs(Jpeg.class); return jpeg.getBytes(); } else if(image instanceof BmpImage) { BmpImage bmpImage = (BmpImage) image; Bmp bmp = bmpImage.getAs(Bmp.class); return bmp.getBytes(); } else if(image instanceof FrameImage) { FrameImage frameImage = (FrameImage) image; Frame frame = frameImage.getAs(Frame.class); ByteBuffer byteBuffer = frame.data; int totalLen = frame.data.capacity(); byte[] convert = new byte[totalLen]; byteBuffer.get(convert.length); return convert; } else if(image instanceof MatImage) { MatImage matImage = (MatImage) image; Mat mat = matImage.getAs(Mat.class); int totalLen = (int) mat.elemSize() * mat.cols() * mat.depth() * mat.rows(); ByteBuffer byteBuffer = mat.data().asByteBuffer(); byte[] convert = new byte[totalLen]; byteBuffer.get(convert.length); return convert; } throw new IllegalArgumentException("Illegal image type " + image.getClass()); } /** * Adds an image to a set of python variables. * Also adds the length as key_len * @param pythonVariables * @param key * @param image * @throws Exception */ public static void addImageToPython(PythonVariables pythonVariables, String key, Image image) throws Exception { addObjectToPythonVariables(pythonVariables,key,convertImageToBytes(image)); } /** * Insert a list in to the given {@link Data} * object. The given list will typically come from a * {@link PythonVariables#get(String)} (String)} * invocation with the restriction of a single type * per list to play well with the konduit serving * {@link Data} serialization * @param ret the data to insert * @param variable the variable used as the key for * inserting in to the data object * @param listValue the list of values to insert * @param valueType the value type used for the list */ public static void insertListIntoData(Data ret, String variable, List listValue, ValueType valueType) { switch(valueType) { case BYTEBUFFER: List<ByteBuffer> byteBuffers = new ArrayList<>(listValue.size()); for(Object o : listValue) { ByteBuffer byteBuffer = (ByteBuffer) o; byteBuffers.add(byteBuffer); } ret.putListByteBuffer(variable,byteBuffers); break; case IMAGE: List<Image> images = new ArrayList<>(listValue.size()); for(Object o : listValue) { Image image = Image.create(o); images.add(image); } ret.putListImage(variable,images); break; case DOUBLE: List<Double> doubles = new ArrayList<>(listValue.size()); for(Object o : listValue) { Number number = (Number) o; doubles.add(number.doubleValue()); } ret.putListDouble(variable,doubles); break; case INT64: List<Long> longs = new ArrayList<>(listValue.size()); for(Object o : listValue) { Number number = (Number) o; longs.add(number.longValue()); } ret.putListInt64(variable,longs); break; case BOOLEAN: List<Boolean> booleans = new ArrayList<>(listValue.size()); for(Object o : listValue) { Boolean b = (Boolean) o; booleans.add(b); } ret.putListBoolean(variable,booleans); break; case BOUNDING_BOX: List<BoundingBox> boundingBoxes = new ArrayList<>(listValue.size()); for(Object input : listValue) { if(input instanceof BoundingBox) { BoundingBox boundingBox = (BoundingBox) input; boundingBoxes.add(boundingBox); } else if(input instanceof Map) { Map<String,Object> dict = DictUtils.toBoundingBoxDict((BoundingBox) input); BoundingBox boundingBox = DictUtils.boundingBoxFromDict(dict); boundingBoxes.add(boundingBox); } } ret.putListBoundingBox(variable,boundingBoxes); break; case STRING: List<String> strings = new ArrayList<>(listValue.size()); for(Object o : listValue) { strings.add(o.toString()); } ret.putListString(variable,strings); break; case POINT: List<Point> points = new ArrayList<>(); for(Object o : listValue) { if(o instanceof Point) { Point point = (Point) o; points.add(point); } else if(o instanceof Map) { Map<String,Object> dict = DictUtils.toPointDict((Point) o); points.add(DictUtils.fromPointDict(dict)); } } ret.putListPoint(variable,points); break; case DATA: throw new IllegalArgumentException("Unable to de serialize data from python"); case NDARRAY: List<NDArray> ndArrays = new ArrayList<>(listValue.size()); for(Object o : listValue) { INDArray arr = (INDArray) o; ndArrays.add(new ND4JNDArray(arr)); } ret.putListNDArray(variable,ndArrays); break; case BYTES: List<byte[]> bytes = new ArrayList<>(listValue.size()); for(Object o : listValue) { byte[] arr = (byte[]) o; bytes.add(arr); } ret.putListBytes(variable,bytes); break; case LIST: throw new IllegalArgumentException("List of lists not allowed"); } } /** * Insert a bytes object in to the given {@link Data} * object * @param ret the data object to insert in to * @param outputs the outputs variable to insert in to * @param variable the variable representing the key of the value to insert * @param pythonConfig the python configuration for validation * @throws IOException */ public static void insertBytesIntoPythonVariables(Data ret, PythonVariables outputs, String variable, PythonConfig pythonConfig) throws IOException { PythonIO pythonIO = pythonConfig.getIoOutputs().get(variable); Preconditions.checkState(pythonConfig.getIoOutputs().containsKey(variable),"No output type conversion found for " + variable + " please ensure a type exists for converting bytes to an appropriate data type."); ValueType byteOutputValueType = pythonIO.type(); Preconditions.checkNotNull(byteOutputValueType,"No byte value output type specified!"); Preconditions.checkState(outputs.get("len_" + variable) != null,"Please ensure a len_" + variable + " is defined for your python script output to get a consistent length from python."); Long length = getWithType(outputs,"len_" + variable,Long.class); Preconditions.checkNotNull(length,"No byte pointer length found for variable"); Preconditions.checkNotNull("No byte pointer length found for variable",variable); BytePointer bytesValue = new BytePointer(getWithType(outputs,variable,byte[].class)); Preconditions.checkNotNull("No byte pointer found for variable",variable); //ensure length matches what's found in python Long capacity = length; bytesValue.capacity(capacity); switch(byteOutputValueType) { case IMAGE: ByteBuffer byteBuffer1 = bytesValue.asBuffer(); if(byteBuffer1.hasArray()) { byte[] bytesContent = byteBuffer1.array(); ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytesContent); BufferedImage bufferedImage = ImageIO.read(byteArrayInputStream); ret.put(variable, Image.create(bufferedImage)); } else { byte[] bytes = new byte[capacity.intValue()]; byteBuffer1.get(bytes); ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes); BufferedImage bufferedImage = ImageIO.read(byteArrayInputStream); Preconditions.checkNotNull(bufferedImage,"Buffered image was not returned. Invalid image bytes passed in."); ret.put(variable,Image.create(bufferedImage)); } break; case BYTES: ByteBuffer byteBuffer = bytesValue.asByteBuffer(); byte[] bytes = new byte[byteBuffer.capacity()]; byteBuffer.get(bytes); ret.put(variable,bytes); break; case BYTEBUFFER: ByteBuffer byteBuffer2 = bytesValue.asByteBuffer(); ret.put(variable,byteBuffer2); break; case STRING: ret.put(variable,bytesValue.getStringBytes()); break; default: throw new IllegalArgumentException("Illegal type found for output type conversion " + byteOutputValueType); } } /** * Create a set of {@link PythonVariables} * to use in with {@link PythonStep} * @param data the data input to use * @param pythonConfig the python configuration to sue * @return the created {@link PythonVariables} * @throws Exception */ public static PythonVariables createPythonVariablesFromDataInput(Data data, PythonConfig pythonConfig) throws Exception { PythonVariables pythonVariables = new PythonVariables(); for(String key : data.keys()) { PythonIO pythonIO = pythonConfig.getIoInputs() .get(key); Preconditions.checkNotNull(pythonIO,"No python IO found for key " + key); switch(data.type(key)) { case NDARRAY: NDArray ndArray = data.getNDArray(key); INDArray arr = ndArray.getAs(INDArray.class); pythonVariables.add(key, NumpyArray.INSTANCE,arr); break; case BYTES: byte[] bytes = data.getBytes(key); pythonVariables.add(key, BYTES,bytes); break; case BYTEBUFFER: ByteBuffer byteBuffer = data.getByteBuffer(key); if(byteBuffer.hasArray()) { byte[] backingArr = byteBuffer.array(); pythonVariables.add(key, BYTES,backingArr); } else { byte[] newArr = new byte[byteBuffer.capacity()]; byteBuffer.get(newArr); byteBuffer.rewind(); pythonVariables.add(key,BYTES,newArr); } break; case DOUBLE: double aDouble = data.getDouble(key); pythonVariables.add(key, PythonTypes.FLOAT,aDouble); break; case LIST: Preconditions.checkState(pythonIO.isListWithType(),"No input type specified for list with key " + key); ValueType valueType = pythonIO.secondaryType(); List<Object> list = data.getList(key, valueType); List<Object> preProcessed = createValidListForPythonVariables(list,valueType); KonduitPythonUtils.addObjectToPythonVariables(pythonVariables,key,preProcessed); break; case INT64: long aLong = data.getLong(key); pythonVariables.add(key,PythonTypes.INT,aLong); break; case BOOLEAN: boolean aBoolean = data.getBoolean(key); pythonVariables.add(key, BOOL,aBoolean); break; case DATA: // Just send it as a JSON string String dataString = data.getData(key).toJson(); pythonVariables.add(key,PythonTypes.STR,dataString); break; case STRING: String string = data.getString(key); pythonVariables.add(key,PythonTypes.STR,string); break; case IMAGE: Image image = data.getImage(key); addImageToPython(pythonVariables,key,image); break; case BOUNDING_BOX: BoundingBox boundingBox = data.getBoundingBox(key); Map<String,Object> boundingBoxValues = DictUtils.toBoundingBoxDict(boundingBox); pythonVariables.add(key, pythonTypeFor(Map.class),boundingBoxValues); break; case POINT: Point point = data.getPoint(key); Map<String,Object> pointerValue = DictUtils.toPointDict(point); pythonVariables.add(key, pythonTypeFor(Map.class),pointerValue); break; default: throw new IllegalArgumentException("Illegal type " + data.type(key)); } } return pythonVariables; } }
0
java-sources/ai/konduit/serving/konduit-serving-python-config/0.3.0/ai/konduit/serving
java-sources/ai/konduit/serving/konduit-serving-python-config/0.3.0/ai/konduit/serving/model/PythonConfig.java
/* * * * ****************************************************************************** * * * Copyright (c) 2015-2019 Skymind Inc. * * * Copyright (c) 2022 Konduit K.K. * * * * * * This program and the accompanying materials are made available under the * * * terms of the Apache License, Version 2.0 which is available at * * * https://www.apache.org/licenses/LICENSE-2.0. * * * * * * Unless required by applicable law or agreed to in writing, software * * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * * License for the specific language governing permissions and limitations * * * under the License. * * * * * * SPDX-License-Identifier: Apache-2.0 * * ***************************************************************************** * * */ package ai.konduit.serving.model; import ai.konduit.serving.pipeline.api.TextConfig; import ai.konduit.serving.pipeline.api.process.ProcessUtils; import ai.konduit.serving.pipeline.api.python.PythonPathUtils; import ai.konduit.serving.pipeline.api.python.models.*; import io.swagger.v3.oas.annotations.media.Schema; import lombok.*; import lombok.extern.slf4j.Slf4j; import org.nd4j.shade.jackson.annotation.JsonAutoDetect; import org.nd4j.shade.jackson.annotation.JsonIgnoreProperties; import org.nd4j.shade.jackson.annotation.JsonProperty; import java.io.File; import java.io.Serializable; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; /** * Python configuration for specifying: * 1. pythonCode: actual python source code * 2. pythonCodePath: a path to a source file. * 3. pythonPath: a python path for dependencies * 4. pythonInputs/pythonOutputs/extraInputs: a map of variable name to python type * 5. returnAllInputs: rather than specifying outputs explicitly, the python execution * will just return all created python variables during execution * * * @author Adam Gibson */ @Data @NoArgsConstructor @Slf4j @Builder @AllArgsConstructor @JsonIgnoreProperties({"ioInput","ioOutput"}) @Schema(description = "The python configuration for setting up python execution.") public class PythonConfig implements Serializable, TextConfig { @Schema(description = "The python configuration type") private PythonConfigType pythonConfigType; @Schema(description = "The python path to use with python execution") private String pythonPath; @Schema(description = "The environment name to use with a conda environment") private String environmentName; @Schema(description = "The javacpp append type. This is meant to handle how javacpp's python blends with a custom python path.") private AppendType appendType; @Builder.Default @Schema(description = "Automatic python path resolution type") private PythonPathResolution pythonPathResolution = PythonPathResolution.STATIC; @JsonProperty("pythonCode") @Schema(description = "Python code to be specified in line") private String pythonCode; @JsonProperty("pythonCodePath") @Schema(description = "A path to a file containing valid python code.") private String pythonCodePath; @JsonProperty("pythonLibrariesPath") private String pythonLibrariesPath; @JsonProperty("importCode") @Schema(description = "Python import code to run and to be concatneated with python code") private String importCode; @JsonProperty("importCodePath") @Schema(description = "The path to the import code.") private String importCodePath; @Singular @Deprecated private Map<String, String> pythonInputs, pythonOutputs, extraInputs; @JsonProperty("returnAllInputs") @Schema(description = "Whether to return all variables created within a python script execution") private boolean returnAllInputs; @JsonProperty("setupAndRun") private boolean setupAndRun; @Singular("ioInput") @JsonProperty("ioInputs") @Schema(description = "The various input variables containing types, variable names") private Map<String,PythonIO> ioInputs; @Singular("ioOutput") @JsonProperty("ioOutputs") @Schema(description = "The various output variables containing types, variable names") private Map<String,PythonIO> ioOutputs; @Builder.Default private String jobSuffix = "konduit_job"; public String resolvePythonLibrariesPath() { if(pythonConfigType == null) { log.info("Python config type not specified..."); List<CondaDetails> condaInstalls = PythonPathUtils.findCondaInstallations(); if(!condaInstalls.isEmpty()) { String baseEnvironmentName = "base"; log.info("Using conda at path '{}' and environment '{}'", condaInstalls.get(0).path(), baseEnvironmentName); this.pythonLibrariesPath = findPythonLibrariesPathFromCondaDetails(condaInstalls.get(0).id(), baseEnvironmentName); } else { List<PythonDetails> pythonInstalls = PythonPathUtils.findPythonInstallations(); if(!pythonInstalls.isEmpty()) { log.info("Using python install at path '{}'", pythonInstalls.get(0)); this.pythonLibrariesPath = findPythonLibariesPath(pythonInstalls.get(0).id()); } else { throw new IllegalStateException("Unable to resolve python paths automatically. Please specify a python config type in the python step configuration " + "with appropriate id and environment name. Run 'konduit pythonpaths --help' for more information."); } } } else { switch (pythonConfigType) { case PYTHON: this.pythonLibrariesPath = findPythonLibariesPath(pythonPath); break; case CONDA: this.pythonLibrariesPath = findPythonLibrariesPathFromCondaDetails(pythonPath, environmentName); break; case VENV: this.pythonLibrariesPath = findPythonLibariesPathFromVenvDetails(pythonPath); break; case CUSTOM: this.pythonLibrariesPath = pythonLibrariesFromAbsolutePath(pythonPath); break; case JAVACPP: default: break; } } return this.pythonLibrariesPath; } public static String findPythonLibrariesPathFromCondaDetails(String condaPathId, String environmentName) { CondaDetails condaDetails = findCondaDetails(condaPathId); List<PythonDetails> pythonDetailsList = condaDetails.environments(); Optional<PythonDetails> optionalPythonDetails = pythonDetailsList .stream() .filter(pythonDetails -> pythonDetails.id().equals(environmentName)) .findFirst(); if(optionalPythonDetails.isPresent()) { return pythonLibrariesFromAbsolutePath(optionalPythonDetails.get().path()); } else { throw new IllegalStateException(String.format("No environment available with the name '%s' for conda path id '%s'. Available python environments for conda path id '%s' are: %n%s", environmentName, condaPathId, condaPathId, String.format("%n---%n%s---%n", pythonDetailsList.stream() .map(pythonDetails -> String.format("-\tname: %s%n\tpath: %s%n\tversion: %s", pythonDetails.id(), pythonDetails.path(), pythonDetails.version())) .collect(Collectors.joining(System.lineSeparator())) ))); } } public static CondaDetails findCondaDetails(String condaPathId) { List<CondaDetails> condaDetailsList = PythonPathUtils.findCondaInstallations(); Optional<CondaDetails> optionalCondaDetails = condaDetailsList .stream() .filter(condaDetails -> condaDetails.id().equals(condaPathId)) .findFirst(); if(optionalCondaDetails.isPresent()) { return optionalCondaDetails.get(); } else { throw new IllegalStateException(String.format("No id '%s' available for conda path type. Available conda type paths are: %n%s", condaPathId, String.format("%n---%n%s---%n", condaDetailsList.stream() .map(condaDetails -> String.format("-\tid: %s%n\tpath: %s%n\tversion: %s", condaDetails.id(), condaDetails.path(), condaDetails.version())) .collect(Collectors.joining(System.lineSeparator())) ))); } } public static String findPythonLibariesPathFromVenvDetails(String venvPathId) { List<VenvDetails> venvDetailsList = PythonPathUtils.findVenvInstallations(); Optional<VenvDetails> optionalVenvDetails = venvDetailsList .stream() .filter(venvDetails -> venvDetails.id().equals(venvPathId)) .findFirst(); if(optionalVenvDetails.isPresent()) { return pythonLibrariesFromAbsolutePath(PythonPathUtils.getVenvPythonFile(optionalVenvDetails.get().path()).getAbsolutePath()); } else { throw new IllegalStateException(String.format("No id '%s' available for venv path type. Available venv type paths are: %n%s", venvPathId, String.format("%n---%n%s---%n", venvDetailsList.stream() .map(pythonDetails -> String.format("-\tid: %s%n\tpath: %s%n\tversion: %s", pythonDetails.id(), pythonDetails.path(), pythonDetails.version())) .collect(Collectors.joining(System.lineSeparator())) ))); } } public static String findPythonLibariesPath(String pythonPathId) { List<PythonDetails> pythonDetailsList = PythonPathUtils.findPythonInstallations(); Optional<PythonDetails> optionalPythonDetails = pythonDetailsList .stream() .filter(pythonDetails -> pythonDetails.id().equals(pythonPathId)) .findFirst(); if(optionalPythonDetails.isPresent()) { return pythonLibrariesFromAbsolutePath(optionalPythonDetails.get().path()); } else { throw new IllegalStateException(String.format("No id '%s' available for python path type. Available python type paths are: %n%s", pythonPathId, String.format("%n---%n%s---%n", pythonDetailsList.stream() .map(pythonDetails -> String.format("-\tid: %s%n\tpath: %s%n\tversion: %s", pythonDetails.id(), pythonDetails.path(), pythonDetails.version())) .collect(Collectors.joining(System.lineSeparator())) ))); } } public static String pythonLibrariesFromAbsolutePath(String pythonPath) { File pythonPathFile = new File(pythonPath); if(pythonPathFile.exists() && pythonPathFile.isFile()) { return ProcessUtils.runAndGetOutput(pythonPath, "-c", "import sys, os; print(os.pathsep.join([path for path in sys.path]))").replace(System.lineSeparator(), "").trim(); } else { throw new IllegalStateException(String.format("No python executable path exist at: '%s'", pythonPathFile.getAbsoluteFile())); } } public enum PythonPathResolution { STATIC, DYNAMIC } }
0
java-sources/ai/konduit/serving/konduit-serving-python-config/0.3.0/ai/konduit/serving
java-sources/ai/konduit/serving/konduit-serving-python-config/0.3.0/ai/konduit/serving/model/PythonIO.java
/* * * * ****************************************************************************** * * * Copyright (c) 2015-2019 Skymind Inc. * * * Copyright (c) 2022 Konduit K.K. * * * * * * This program and the accompanying materials are made available under the * * * terms of the Apache License, Version 2.0 which is available at * * * https://www.apache.org/licenses/LICENSE-2.0. * * * * * * Unless required by applicable law or agreed to in writing, software * * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * * License for the specific language governing permissions and limitations * * * under the License. * * * * * * SPDX-License-Identifier: Apache-2.0 * * ***************************************************************************** * * */ package ai.konduit.serving.model; import ai.konduit.serving.pipeline.api.TextConfig; import ai.konduit.serving.pipeline.api.data.ValueType; import io.swagger.v3.oas.annotations.media.Schema; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import java.io.Serializable; @Data @Accessors(fluent = true) @NoArgsConstructor @Schema(description = "Python base") public class PythonIO implements Serializable, TextConfig { private String name,pythonType; //relevant for lists, byte de serialization private ValueType secondaryType = ValueType.NONE; private ValueType type = ValueType.NONE; @Builder public PythonIO(String name, String pythonType, ValueType secondaryType, ValueType type) { this.name = name; this.pythonType = pythonType; if(secondaryType == null) secondaryType = ValueType.NONE; if(type == null) type = ValueType.NONE; this.secondaryType = secondaryType; this.type = type; validate(); } /** * Returns true if this io is a list * with a secondary type defined * @return */ public boolean isDictWithType() { if(type == null) return false; return type == ValueType.BOUNDING_BOX || type == ValueType.POINT && secondaryType != ValueType.NONE; } /** * Returns true if this is a list type * with no secondary type defined * @return */ public boolean isDictWithUndefinedType() { return !isDictWithType(); } /** * Returns true if this io is a list * with a secondary type defined * @return */ public boolean isListWithType() { if(type == null) return false; return type == ValueType.LIST && secondaryType != ValueType.NONE; } /** * Returns true if this is a list type * with no secondary type defined * @return */ public boolean isListWithUndefinedType() { if(type == null) return false; return !isListWithType(); } private void validate() { } }
0
java-sources/ai/konduit/serving/konduit-serving-python-config/0.3.0/ai/konduit/serving
java-sources/ai/konduit/serving/konduit-serving-python-config/0.3.0/ai/konduit/serving/python/DictUtils.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.python; import ai.konduit.serving.pipeline.api.data.BoundingBox; import ai.konduit.serving.pipeline.api.data.Point; import java.util.LinkedHashMap; import java.util.Map; /** * Dictionary Utils for handling * type conversion for dictionaries * and serialized specialized objects * for python pipeline step. * * @author Adam Gibson */ public class DictUtils { private DictUtils(){} /** * Create a {@link Point} * object from an input dictionary * @param dictPoint the input dictionary * @return the created {@link Point} */ public static Point fromPointDict(Map<String,Object> dictPoint) { Point point = Point.create( castNumber(dictPoint.get("x")).doubleValue(), castNumber(dictPoint.get("y")).doubleValue(), castNumber(dictPoint.get("z")).doubleValue(), dictPoint.getOrDefault("label","").toString(), castNumber(dictPoint.getOrDefault("probability",0.0)) .doubleValue() ); return point; } private static Number castNumber(Object input) { return (Number) input; } /** * Create a dictionary from {@link Point} * based on the defined attributes * @param point the input point * @return the returned dictionary */ public static Map<String,Object> toPointDict(Point point) { Map<String,Object> ret = new LinkedHashMap<>(5); ret.put("x",point.x()); ret.put("y",point.y()); ret.put("label",point.label()); if(point.dimensions() > 2) ret.put("z",point.z()); ret.put("dimensions",point.dimensions()); return ret; } /** * Convert a {@link BoundingBox} * to a dictionary * @param boundingBox the bounding box to convert * @return the bounding box as a dictionary */ public static Map<String,Object> toBoundingBoxDict(BoundingBox boundingBox) { Map<String,Object> boundingBoxValues = new LinkedHashMap<>(); boundingBoxValues.put("cx",boundingBox.cx()); boundingBoxValues.put("cy",boundingBox.cy()); boundingBoxValues.put("width",boundingBox.width()); boundingBoxValues.put("height",boundingBox.height()); boundingBoxValues.put("label",boundingBox.label()); boundingBoxValues.put("probability",boundingBox.probability()); boundingBoxValues.put("cy",boundingBox.cy()); boundingBoxValues.put("x1",boundingBox.x1()); boundingBoxValues.put("x2",boundingBox.x2()); boundingBoxValues.put("y1",boundingBox.y1()); boundingBoxValues.put("y2",boundingBox.y2()); return boundingBoxValues; } /** * Create a {@link BoundingBox} from a dictionary. * The attributes are: * cx, * cy * height * width * label * probability * @param dict a dict with the above attributes * @return the equivalent bounding box with the given attributes */ public static BoundingBox boundingBoxFromDict(Map<String,Object> dict) { BoundingBox boundingBox = BoundingBox.create( (double) dict.get("cx"), (double) dict.get("cy"), (double) dict.get("height"), (double) dict.get("width"), (String) dict.get("label"), (double) dict.get("probability")); return boundingBox; } }
0
java-sources/ai/konduit/serving/konduit-serving-python-config/0.3.0/ai/konduit/serving
java-sources/ai/konduit/serving/konduit-serving-python-config/0.3.0/ai/konduit/serving/python/KonduitServingPythonConfigJsonMapping.java
package ai.konduit.serving.python;import ai.konduit.serving.pipeline.api.serde.JsonSubType; import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping; import ai.konduit.serving.pipeline.api.serde.JsonSubType; import java.util.ArrayList; import java.util.List; //GENERATED CLASS DO NOT EDIT public class KonduitServingPythonConfigJsonMapping implements JsonSubTypesMapping { @Override public List<JsonSubType> getSubTypesMapping() { List<JsonSubType> l = new ArrayList<>(); l.add(new JsonSubType("PYTHON", ai.konduit.serving.python.PythonStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); return l; } }
0
java-sources/ai/konduit/serving/konduit-serving-python-config/0.3.0/ai/konduit/serving
java-sources/ai/konduit/serving/konduit-serving-python-config/0.3.0/ai/konduit/serving/python/PythonConfigModuleInfo.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.python; import ai.konduit.serving.annotation.module.ModuleInfo; @ModuleInfo("konduit-serving-python-config") public class PythonConfigModuleInfo { private PythonConfigModuleInfo(){ } }
0
java-sources/ai/konduit/serving/konduit-serving-python-config/0.3.0/ai/konduit/serving
java-sources/ai/konduit/serving/konduit-serving-python-config/0.3.0/ai/konduit/serving/python/PythonStep.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.python; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.model.PythonConfig; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import lombok.experimental.SuperBuilder; import org.nd4j.shade.jackson.annotation.JsonProperty; @Data @SuperBuilder @AllArgsConstructor @Accessors(fluent = true) @JsonName("PYTHON") @NoArgsConstructor @Schema(description = "A pipeline step that configures a python script that is to be executed.") public class PythonStep implements PipelineStep { @JsonProperty("pythonConfig") @Schema(description = "The python configuration associated with this python step. This controls how the python step will be executed. When describing inputs and outputs (ioInput,ioOutput) ensure that values are surrounded in quotes as string literals." + "You can escape a \" with a \\ character. Each input/output is then space separated within the quotes. The format is:" + "name, python type, konduit serving value type.") private PythonConfig pythonConfig; }
0
java-sources/ai/konduit/serving/konduit-serving-samediff/0.3.0/ai/konduit/serving/models
java-sources/ai/konduit/serving/konduit-serving-samediff/0.3.0/ai/konduit/serving/models/samediff/KonduitServingSamediffJsonMapping.java
package ai.konduit.serving.models.samediff;import ai.konduit.serving.pipeline.api.serde.JsonSubType; import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping; import ai.konduit.serving.pipeline.api.serde.JsonSubType; import java.util.ArrayList; import java.util.List; //GENERATED CLASS DO NOT EDIT public class KonduitServingSamediffJsonMapping implements JsonSubTypesMapping { @Override public List<JsonSubType> getSubTypesMapping() { List<JsonSubType> l = new ArrayList<>(); return l; } }
0
java-sources/ai/konduit/serving/konduit-serving-samediff/0.3.0/ai/konduit/serving/models
java-sources/ai/konduit/serving/konduit-serving-samediff/0.3.0/ai/konduit/serving/models/samediff/SameDiffModuleInfo.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.models.samediff; import ai.konduit.serving.annotation.module.InheritRequiredDependencies; import ai.konduit.serving.annotation.module.ModuleInfo; @ModuleInfo("konduit-serving-samediff") @InheritRequiredDependencies("konduit-serving-nd4j") public class SameDiffModuleInfo { private SameDiffModuleInfo(){ } }
0
java-sources/ai/konduit/serving/konduit-serving-samediff/0.3.0/ai/konduit/serving/models/samediff
java-sources/ai/konduit/serving/konduit-serving-samediff/0.3.0/ai/konduit/serving/models/samediff/step/SameDiffPipelineStepRunnerFactory.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.models.samediff.step; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class SameDiffPipelineStepRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep pipelineStep) { return pipelineStep instanceof SameDiffStep; } @Override public PipelineStepRunner create(PipelineStep pipelineStep) { Preconditions.checkState(canRun(pipelineStep), "Unable to run pipeline step: %s", pipelineStep.getClass()); SameDiffStep ps = (SameDiffStep)pipelineStep; return new SameDiffRunner(ps); } }
0
java-sources/ai/konduit/serving/konduit-serving-samediff/0.3.0/ai/konduit/serving/models/samediff
java-sources/ai/konduit/serving/konduit-serving-samediff/0.3.0/ai/konduit/serving/models/samediff/step/SameDiffRunner.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.models.samediff.step; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.NDArray; import ai.konduit.serving.pipeline.api.data.ValueType; import ai.konduit.serving.pipeline.api.exception.ModelLoadingException; import ai.konduit.serving.pipeline.api.protocol.URIResolver; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import org.nd4j.autodiff.listeners.At; import org.nd4j.autodiff.samediff.SameDiff; import org.nd4j.autodiff.samediff.internal.InferenceSession; import org.nd4j.common.base.Preconditions; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import java.io.File; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @CanRun(SameDiffStep.class) public class SameDiffRunner implements PipelineStepRunner { public static final String DEFAULT_OUT_NAME_SINGLE = "default"; private SameDiffStep step; private final SameDiff sd; public SameDiffRunner(SameDiffStep step) { this.step = step; String uri = step.modelUri(); Preconditions.checkState(uri != null && !uri.isEmpty(), "No model URI was provided (model URI was null or empty)"); try { File f = URIResolver.getFile(uri); Preconditions.checkState(f.exists(), "No model file exists at URI: %s", uri); sd = SameDiff.load(f, true); } catch (Throwable e) { throw new ModelLoadingException("Failed to load SameDiff model from URI " + step.modelUri(), e); } Nd4j.getExecutioner().enableDebugMode(step.debugMode()); Nd4j.getExecutioner().enableVerboseMode(step.verboseMode()); } @Override public void close() { } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { //First: Get array Map<String,INDArray> m = new HashMap<>(); List<String> inputs = sd.inputs(); for(String s : inputs){ if(!data.has(s)) throw new IllegalStateException("Expected to find NDArray with name \"" + s + "\" in data - not found. Data keys: " + data.keys()); if(data.type(s) != ValueType.NDARRAY) throw new IllegalStateException("Input Data field \"" + s + "\" is not an NDArray - is type : " + data.type(s)); m.put(s, data.getNDArray(s).getAs(INDArray.class)); } List<String> outNames = step.outputNames(); Preconditions.checkState(outNames != null && !outNames.isEmpty(), "No output names were provided in the SameDiffStep configuration"); ; Map<String,INDArray> out = sd.output(m,outNames.toArray(new String[outNames.size()])); Data d = Data.empty(); for(Map.Entry<String,INDArray> e : out.entrySet()){ d.put(e.getKey(), NDArray.create(e.getValue())); } return d; } }
0
java-sources/ai/konduit/serving/konduit-serving-samediff/0.3.0/ai/konduit/serving/models/samediff/step
java-sources/ai/konduit/serving/konduit-serving-samediff/0.3.0/ai/konduit/serving/models/samediff/step/trainer/SameDiffTrainerPipelineStepRunnerFactory.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.models.samediff.step.trainer; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class SameDiffTrainerPipelineStepRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep pipelineStep) { return pipelineStep instanceof SameDiffTrainerStep; } @Override public PipelineStepRunner create(PipelineStep pipelineStep) { Preconditions.checkState(canRun(pipelineStep), "Unable to run pipeline step: %s", pipelineStep.getClass()); SameDiffTrainerStep ps = (SameDiffTrainerStep)pipelineStep; return new SameDiffTrainerRunner(ps); } }
0
java-sources/ai/konduit/serving/konduit-serving-samediff/0.3.0/ai/konduit/serving/models/samediff/step
java-sources/ai/konduit/serving/konduit-serving-samediff/0.3.0/ai/konduit/serving/models/samediff/step/trainer/SameDiffTrainerRunner.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.models.samediff.step.trainer; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.NDArray; import ai.konduit.serving.pipeline.api.data.ValueType; import ai.konduit.serving.pipeline.api.exception.ModelLoadingException; import ai.konduit.serving.pipeline.api.protocol.URIResolver; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import lombok.SneakyThrows; import org.nd4j.autodiff.listeners.At; import org.nd4j.autodiff.loss.LossReduce; import org.nd4j.autodiff.samediff.SameDiff; import org.nd4j.autodiff.samediff.TrainingConfig; import org.nd4j.autodiff.samediff.VariableType; import org.nd4j.autodiff.samediff.internal.InferenceSession; import org.nd4j.common.base.Preconditions; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.dataset.MultiDataSet; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.weightinit.impl.ZeroInitScheme; import java.io.File; import java.util.*; @CanRun(SameDiffTrainerStep.class) public class SameDiffTrainerRunner implements PipelineStepRunner { private SameDiffTrainerStep step; private final SameDiff sd; public SameDiffTrainerRunner(SameDiffTrainerStep step) { this.step = step; String uri = step.modelUri(); Preconditions.checkState(uri != null && !uri.isEmpty(), "No model URI was provided (model URI was null or empty)"); try { File f = URIResolver.getFile(uri); Preconditions.checkState(f.exists(), "No model file exists at URI: %s", uri); sd = SameDiff.load(f, true); TrainingConfig.Builder builder = TrainingConfig.builder(); if(step.initialLossType() != null) { builder.initialLossDataType(step.initialLossType()); } if(step.l1() > 0) { builder.l1(step.l1()); } if(step.updater() != null) builder.updater(step.updater()); if(step.l2() > 0) { builder.l2(step.l2()); } if(step.lossVariables() != null && !step.lossVariables().isEmpty()) { builder.minimize(step.lossVariables().toArray(new String[step.lossVariables().size()])); } if(step.weightDecayCoefficient() > 0) { builder.weightDecay(step.weightDecayCoefficient(), step.weightDecayApplyLearningRate()); } Preconditions.checkState(step.inputFeatures() != null && !step.inputFeatures().isEmpty(),"Model inputs must not be empty! Please specify inputs on the same diff model."); builder.dataSetFeatureMapping(step.inputFeatures().toArray(new String[step.inputFeatures().size()])); Preconditions.checkState(step.lossVariables() != null && !step.lossVariables().isEmpty(),"No loss variables for training found! Please specify loss variables on the training step."); builder.dataSetLabelMapping(step.labels()); if(step.lossFunction() != null && step.lossVariables() != null && step.labels() != null) { if(step.lossVariables().size() != step.labels().size() || step.labels().size() != step.targetVariables().size()) { throw new IllegalArgumentException("Loss variables, Labels and Prediction variables must all be the same size. Please ensure that all variable lists specified match."); } for(int i = 0; i < step.lossVariables().size(); i++) { String labelVariable = step.labels().get(i); if(!sd.hasVariable(labelVariable)) { sd.var(labelVariable,VariableType.PLACEHOLDER,new ZeroInitScheme(),step.initialLossType()); } String lossVariableName = step.lossVariables().get(i); String predictVariable = step.targetVariables().get(i); switch(step.lossFunction()) { case L2: sd.loss().l2Loss(lossVariableName,sd.getVariable(predictVariable)); break; case MSE: case SQUARED_LOSS: sd.loss().meanSquaredError(lossVariableName,sd.getVariable(labelVariable),sd.getVariable(predictVariable),null); break; case XENT: sd.loss().sigmoidCrossEntropy(lossVariableName,sd.getVariable(labelVariable),sd.getVariable(predictVariable),null); break; case HINGE: sd.loss().hingeLoss(lossVariableName,sd.getVariable(labelVariable),sd.getVariable(predictVariable),null); break; case MCXENT: sd.loss().softmaxCrossEntropy(lossVariableName,sd.getVariable(predictVariable),sd.getVariable(labelVariable),null, LossReduce.SUM,0.0); break; case POISSON: sd.loss().logPoisson(lossVariableName,sd.getVariable(predictVariable),sd.getVariable(labelVariable),null,true); break; case SPARSE_MCXENT: sd.loss().sparseSoftmaxCrossEntropy(lossVariableName,sd.getVariable(predictVariable),sd.getVariable(labelVariable)); break; case SQUARED_HINGE: sd.loss().sparseSoftmaxCrossEntropy(lossVariableName,sd.getVariable(predictVariable),sd.getVariable(labelVariable)); break; case NEGATIVELOGLIKELIHOOD: sd.loss().logLoss(lossVariableName,sd.getVariable(predictVariable),sd.getVariable(labelVariable)); break; case L1: case WASSERSTEIN: case KL_DIVERGENCE: case COSINE_PROXIMITY: case MEAN_ABSOLUTE_ERROR: case RECONSTRUCTION_CROSSENTROPY: case MEAN_ABSOLUTE_PERCENTAGE_ERROR: case MEAN_SQUARED_LOGARITHMIC_ERROR: throw new IllegalArgumentException(step.lossFunction().name() + " is unimplemented!"); default: throw new IllegalArgumentException("Invalid loss function " + step.lossFunction()); } } } sd.setTrainingConfig(builder .build()); Nd4j.getExecutioner().enableDebugMode(step.debugMode()); Nd4j.getExecutioner().enableVerboseMode(step.verboseMode()); } catch (Throwable e) { throw new ModelLoadingException("Failed to load SameDiff model from URI " + step.modelUri(), e); } } @Override public void close() { } @Override public PipelineStep getPipelineStep() { return step; } @SneakyThrows @Override public Data exec(Context ctx, Data data) { List<String> inputs = step.inputFeatures(); List<INDArray> inputArrays = new ArrayList<>(); List<INDArray> labels = new ArrayList<>(); for(String s : inputs) { if(!data.has(s)) throw new IllegalStateException("Expected to find NDArray with name \"" + s + "\" in data - not found. Data keys: " + data.keys()); if(data.type(s) != ValueType.NDARRAY) throw new IllegalStateException("Input Data field \"" + s + "\" is not an NDArray - is type : " + data.type(s)); //labels are also placeholders and maybe present in the input if(!step.labels().contains(s)) { INDArray arr = data.getNDArray(s).getAs(INDArray.class); inputArrays.add(arr); } } for(String s : step.labels()) { INDArray arr = data.getNDArray(s).getAs(INDArray.class); labels.add(arr); } MultiDataSet multiDataSet = new MultiDataSet(inputArrays.toArray(new INDArray[inputArrays.size()]), labels.toArray(new INDArray[labels.size()])); //TODO: test is adding a samediff sub function in the define function solves the gradient definition problem List<String> outNames = step.lossVariables(); Preconditions.checkState(outNames != null && !outNames.isEmpty(), "No output names were provided in the SameDiffStep configuration"); sd.fit(multiDataSet); if(step.modelSaveOutputPath() != null) sd.save(new File(step.modelSaveOutputPath()),true); Data d = Data.empty(); return d; } }
0
java-sources/ai/konduit/serving/konduit-serving-samediff-config/0.3.0/ai/konduit/serving/models
java-sources/ai/konduit/serving/konduit-serving-samediff-config/0.3.0/ai/konduit/serving/models/samediff/KonduitServingSamediffConfigJsonMapping.java
package ai.konduit.serving.models.samediff;import ai.konduit.serving.pipeline.api.serde.JsonSubType; import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping; import ai.konduit.serving.pipeline.api.serde.JsonSubType; import java.util.ArrayList; import java.util.List; //GENERATED CLASS DO NOT EDIT public class KonduitServingSamediffConfigJsonMapping implements JsonSubTypesMapping { @Override public List<JsonSubType> getSubTypesMapping() { List<JsonSubType> l = new ArrayList<>(); l.add(new JsonSubType("SAMEDIFF", ai.konduit.serving.models.samediff.step.SameDiffStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("SAMEDIFF_TRAINING", ai.konduit.serving.models.samediff.step.trainer.SameDiffTrainerStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); return l; } }
0
java-sources/ai/konduit/serving/konduit-serving-samediff-config/0.3.0/ai/konduit/serving/models
java-sources/ai/konduit/serving/konduit-serving-samediff-config/0.3.0/ai/konduit/serving/models/samediff/SamediffConfigModuleInfo.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.models.samediff; import ai.konduit.serving.annotation.module.ModuleInfo; @ModuleInfo("konduit-serving-samediff-config") public class SamediffConfigModuleInfo { private SamediffConfigModuleInfo(){ } }
0
java-sources/ai/konduit/serving/konduit-serving-samediff-config/0.3.0/ai/konduit/serving/models/samediff
java-sources/ai/konduit/serving/konduit-serving-samediff-config/0.3.0/ai/konduit/serving/models/samediff/step/SameDiffStep.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.models.samediff.step; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import lombok.experimental.SuperBuilder; import lombok.experimental.Tolerate; import org.nd4j.shade.jackson.annotation.JsonProperty; import java.util.Arrays; import java.util.List; @SuperBuilder @Data @Accessors(fluent = true) @JsonName("SAMEDIFF") @NoArgsConstructor @Schema(description = "A pipeline step that configures a SameDiff model that is to be executed.") public class SameDiffStep implements PipelineStep { @Schema(description = "Specifies the location of a saved model file.") private String modelUri; @Schema(description = "A list of names of the output arrays - i.e., the names of the arrays to predict/return.") private List<String> outputNames; @Schema(description = "Enable debug mode, defaults to false") private boolean debugMode = false; @Schema(description = "Enable verbose mode, defaults to false") private boolean verboseMode = false; public SameDiffStep(@JsonProperty("modelUri") String modelUri, @JsonProperty("outputNames") List<String> outputNames, @JsonProperty("debugMode") boolean debugMode, @JsonProperty("verboseMode") boolean verboseMode){ this.modelUri = modelUri; this.outputNames = outputNames; this.debugMode = debugMode; this.verboseMode = verboseMode; } public SameDiffStep(String modelUri, List<String> outputNames) { this.modelUri = modelUri; this.outputNames = outputNames; } @Tolerate public SameDiffStep outputNames(String... outputNames) { return this.outputNames(Arrays.asList(outputNames)); } }
0
java-sources/ai/konduit/serving/konduit-serving-samediff-config/0.3.0/ai/konduit/serving/models/samediff/step
java-sources/ai/konduit/serving/konduit-serving-samediff-config/0.3.0/ai/konduit/serving/models/samediff/step/trainer/SameDiffTrainerStep.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.models.samediff.step.trainer; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import lombok.experimental.SuperBuilder; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.learning.config.IUpdater; import org.nd4j.linalg.lossfunctions.LossFunctions; import org.nd4j.linalg.schedule.ISchedule; import org.nd4j.shade.jackson.annotation.JsonProperty; import java.util.List; @SuperBuilder @Data @Accessors(fluent = true) @JsonName("SAMEDIFF_TRAINING") @NoArgsConstructor @Schema(description = "A pipeline step that configures a SameDiff model that is to be executed.") public class SameDiffTrainerStep implements PipelineStep { @Schema(description = "Specifies the location of a saved model file.") private String modelUri; @Schema(description = "An L1 regularization coefficient for application during training. Set this value for l1 regularization. Not applied by default.") private double l1 = -1.0; @Schema(description = "An L2 regularization coefficient for application during training. Set this value for l2 regularization. Not applied by default.") private double l2 = -1.0; @Schema(description = "A weight regularization coefficient for application during training. Set this value to enable weight decay. Disabled byd efault.") private double weightDecayCoefficient; @Schema(description = "Whether to apply learning rate during weight decay,defaults to true") private boolean weightDecayApplyLearningRate = true; @Schema(description = "Specifies the location of the model save path") private String modelSaveOutputPath; @Schema(description = "Specifies the number of epochs to run training for") private int numEpochs = 1; @Schema(description = "A list of names of the loss variables- the names of the targets to train against for the loss function") private List<String> lossVariables; @Schema(description = "A list of names of the input variables- the names of the input variables for training") private List<String> inputFeatures; @Schema(description = "A list of names of the labels variables- the names of the true labels for prediction to calculate error against") private List<String> labels; @Schema(description = "A list of names of the prediction variables- the names of the prediction labels for prediction to calculate error against") private List<String> targetVariables; @Schema(description = "The updater to use for training. When specifying an updater on the command line, the type is needed. Valid types include: AMSGRAD,ADABELIEF,ADAGRAD,ADADELTA,ADAMAX,ADAM,NADAM,NESTEROVS,NOOP,RMSPROP,SGD . Each field for the updater must be specified in terms of field name = value separated by commas. Relevant updaters and their fields can be found here: https://github.com/eclipse/deeplearning4j/tree/master/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/learning/config") private IUpdater updater; @Schema(description = "The learning rate to use for training") private double learningRate; @Schema(description = "The learning rate schedule to use for training. When specifying a learning rate or momentum schedule, comma separated values with key=value for each field is required. Valid values include: poly,step,cycle,fixed,inverse,sigmoid,exponential. Relevant schedules and their fields can be found here: https://github.com/eclipse/deeplearning4j/tree/master/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/schedule - it is recommended when specifying this value on the command line to use \" to ensure the value gets parsed properly.") private ISchedule learningRateSchedule; @Schema(description = "The initial loss type for the training, defaults to float") private DataType initialLossType = DataType.FLOAT; @Schema(description = "The loss function to use for training models") private LossFunctions.LossFunction lossFunction; @Schema(description = "Enable debug mode, defaults to false") private boolean debugMode = false; @Schema(description = "Enable verbose mode, defaults to false") private boolean verboseMode = false; public SameDiffTrainerStep(@JsonProperty("modelUri") String modelUri, @JsonProperty("l1") double l1, @JsonProperty("l2") double l2, @JsonProperty("modelSaveOutputPath") String modelSaveOutputPath, @JsonProperty("numEpochs") int numEpochs, @JsonProperty("inputFeatures") List<String> inputFeatures, @JsonProperty("lossVariables") List<String> lossVariables, @JsonProperty("labels") List<String> labels, @JsonProperty("targetVariables") List<String> targetVariables, @JsonProperty("weightDecayCoefficient") double weightDecayCoefficient, @JsonProperty("weightDecayApplyLearningRate") boolean weightDecayApplyLearningRate, @JsonProperty("updater") IUpdater updater, @JsonProperty("learningRate") double learningRate, @JsonProperty("learningRateSchedule") ISchedule learningRateSchedule, @JsonProperty("initialLossType") DataType initialLossType, @JsonProperty("lossFunction") LossFunctions.LossFunction lossFunction, @JsonProperty("debugMode") boolean debugMode, @JsonProperty("verboseMode") boolean verboseMode ) { this.modelUri = modelUri; this.l1 = l1; this.l2 = l2; this.modelSaveOutputPath = modelSaveOutputPath; this.numEpochs = numEpochs; this.lossVariables = lossVariables; this.inputFeatures = inputFeatures; this.targetVariables = targetVariables; this.labels = labels; this.weightDecayApplyLearningRate = weightDecayApplyLearningRate; this.weightDecayCoefficient = weightDecayCoefficient; this.learningRate = learningRate; this.learningRateSchedule = learningRateSchedule; this.updater = updater; this.lossFunction = lossFunction; if(initialLossType != null) this.initialLossType = initialLossType; if(learningRate > 0 && learningRateSchedule != null) { this.updater.setLrAndSchedule(learningRate,learningRateSchedule); } this.debugMode = debugMode; this.verboseMode = verboseMode; } }
0
java-sources/ai/konduit/serving/konduit-serving-tensorflow/0.3.0/ai/konduit/serving/models
java-sources/ai/konduit/serving/konduit-serving-tensorflow/0.3.0/ai/konduit/serving/models/tensorflow/KonduitServingTensorflowJsonMapping.java
package ai.konduit.serving.models.tensorflow;import ai.konduit.serving.pipeline.api.serde.JsonSubType; import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping; import ai.konduit.serving.pipeline.api.serde.JsonSubType; import java.util.ArrayList; import java.util.List; //GENERATED CLASS DO NOT EDIT public class KonduitServingTensorflowJsonMapping implements JsonSubTypesMapping { @Override public List<JsonSubType> getSubTypesMapping() { List<JsonSubType> l = new ArrayList<>(); return l; } }
0
java-sources/ai/konduit/serving/konduit-serving-tensorflow/0.3.0/ai/konduit/serving/models
java-sources/ai/konduit/serving/konduit-serving-tensorflow/0.3.0/ai/konduit/serving/models/tensorflow/TensorflowModuleInfo.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.models.tensorflow; import ai.konduit.serving.annotation.module.ModuleInfo; @ModuleInfo("konduit-serving-tensorflow") //TODO AB 2020/05/29 Need to add required dependencies - CPU/GPU public class TensorflowModuleInfo { private TensorflowModuleInfo(){} }
0
java-sources/ai/konduit/serving/konduit-serving-tensorflow/0.3.0/ai/konduit/serving/models/tensorflow
java-sources/ai/konduit/serving/konduit-serving-tensorflow/0.3.0/ai/konduit/serving/models/tensorflow/format/TFNDArray.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.models.tensorflow.format; import ai.konduit.serving.models.tensorflow.util.TensorFlowUtil; import ai.konduit.serving.pipeline.api.data.NDArrayType; import ai.konduit.serving.pipeline.impl.data.ndarray.BaseNDArray; import org.nd4j.common.base.Preconditions; import org.tensorflow.DataType; import org.tensorflow.Tensor; import java.util.Arrays; public class TFNDArray extends BaseNDArray<Tensor> { public TFNDArray(Tensor array) { super(array); } @Override public NDArrayType type() { DataType dt = array.dataType(); return TensorFlowUtil.fromTFType(dt); } @Override public long[] shape() { return array.shape(); } @Override public long size(int dimension) { int rank = rank(); Preconditions.checkState(dimension >= -rank && dimension < rank, "Invalid dimension: Got %s for rank %s array", dimension, rank); if(dimension < 0) dimension += rank; return array.shape()[dimension]; } @Override public int rank() { return array.shape().length; } @Override public String toString() { return "TensorFlowNDArray(type=" + type() + ",shape=" + Arrays.toString(shape()) + ")"; } }
0
java-sources/ai/konduit/serving/konduit-serving-tensorflow/0.3.0/ai/konduit/serving/models/tensorflow
java-sources/ai/konduit/serving/konduit-serving-tensorflow/0.3.0/ai/konduit/serving/models/tensorflow/format/TFNDArrayFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.models.tensorflow.format; import ai.konduit.serving.pipeline.api.data.NDArray; import ai.konduit.serving.pipeline.api.format.NDArrayFactory; import org.nd4j.common.base.Preconditions; import org.tensorflow.Tensor; import java.util.HashSet; import java.util.Set; public class TFNDArrayFactory implements NDArrayFactory { @Override public Set<Class<?>> supportedTypes() { Set<Class<?>> s = new HashSet<>(); s.add(Tensor.class); return s; } @Override public boolean canCreateFrom(Object o) { return o instanceof Tensor; } @Override public NDArray create(Object o) { Preconditions.checkState(canCreateFrom(o), "Unable to create TensorFlow NDArray from object of %s", o.getClass()); Tensor a; if(o instanceof Tensor){ a = (Tensor) o; } else { throw new IllegalStateException("Format not supported: " + o.getClass()); } //TODO add all the other java types! return new TFNDArray(a); } }
0
java-sources/ai/konduit/serving/konduit-serving-tensorflow/0.3.0/ai/konduit/serving/models/tensorflow
java-sources/ai/konduit/serving/konduit-serving-tensorflow/0.3.0/ai/konduit/serving/models/tensorflow/format/TensorFlowConverters.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.models.tensorflow.format; import ai.konduit.serving.models.tensorflow.util.TensorFlowUtil; import ai.konduit.serving.pipeline.api.data.NDArray; import ai.konduit.serving.pipeline.api.data.NDArrayType; import ai.konduit.serving.pipeline.api.format.NDArrayConverter; import ai.konduit.serving.pipeline.api.format.NDArrayFormat; import ai.konduit.serving.pipeline.impl.data.ndarray.SerializedNDArray; import lombok.AllArgsConstructor; import org.nd4j.common.base.Preconditions; import org.nd4j.common.util.ArrayUtil; import org.tensorflow.Tensor; import java.nio.Buffer; import java.nio.ByteBuffer; import java.nio.ByteOrder; public class TensorFlowConverters { private TensorFlowConverters(){ } @AllArgsConstructor public static class SerializedToTensorFlowConverter implements NDArrayConverter { @Override public boolean canConvert(NDArray from, NDArrayFormat to) { return canConvert(from, to.formatType()); } @Override public boolean canConvert(NDArray from, Class<?> to) { return SerializedNDArray.class.isAssignableFrom(from.get().getClass()) && Tensor.class.isAssignableFrom(to); } @Override public <U> U convert(NDArray from, Class<U> to) { Preconditions.checkState(canConvert(from, to), "Unable to convert NDArray to %s", to); SerializedNDArray t = (SerializedNDArray) from.get(); Tensor<?> arr = convert(t); return (U)arr; } @Override public <U> U convert(NDArray from, NDArrayFormat<U> to) { Preconditions.checkState(canConvert(from, to), "Unable to convert to format: %s", to); SerializedNDArray f = (SerializedNDArray) from.get(); Tensor<?> arr = convert(f); return (U)arr; } public Tensor<?> convert(SerializedNDArray from){ long[] shape = from.getShape(); Class<?> tfType = TensorFlowUtil.toTFType(from.getType()); Buffer buffer = (Buffer) from.getBuffer(); buffer.rewind(); Tensor<?> t = Tensor.create(tfType, shape, from.getBuffer()); return t; } } @AllArgsConstructor public static class TensorFlowToSerializedConverter implements NDArrayConverter { @Override public boolean canConvert(NDArray from, NDArrayFormat to) { return canConvert(from, to.formatType()); } @Override public boolean canConvert(NDArray from, Class<?> to) { return Tensor.class.isAssignableFrom(from.get().getClass()) && SerializedNDArray.class.isAssignableFrom(to); } @Override public <U> U convert(NDArray from, Class<U> to) { Preconditions.checkState(canConvert(from, to), "Unable to convert NDArray to %s", to); Tensor<?> t = (Tensor<?>) from.get(); SerializedNDArray arr = convert(t); return (U)arr; } @Override public <U> U convert(NDArray from, NDArrayFormat<U> to) { Preconditions.checkState(canConvert(from, to), "Unable to convert to format: %s", to); Tensor<?> t = (Tensor<?>) from.get(); SerializedNDArray arr = convert(t); return (U)arr; } public SerializedNDArray convert(Tensor<?> from){ long[] shape = from.shape(); NDArrayType t = TensorFlowUtil.fromTFType(from.dataType()); int w = t.width(); long length = ArrayUtil.prodLong(shape); long lengthBytes = w * length; ByteBuffer bb = ByteBuffer.allocateDirect((int)lengthBytes).order(ByteOrder.nativeOrder()); //TODO SerializedNDArray should be in little endian... from.writeTo(bb); return new SerializedNDArray(t, shape, bb); } } }
0
java-sources/ai/konduit/serving/konduit-serving-tensorflow/0.3.0/ai/konduit/serving/models/tensorflow
java-sources/ai/konduit/serving/konduit-serving-tensorflow/0.3.0/ai/konduit/serving/models/tensorflow/step/TensorFlowPipelineStepRunnerFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.models.tensorflow.step; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class TensorFlowPipelineStepRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep step) { return step instanceof TensorFlowStep; } @Override public PipelineStepRunner create(PipelineStep step) { Preconditions.checkState(canRun(step), "Unable to run step of type: %s", step.getClass()); return new TensorFlowRunner((TensorFlowStep) step); } }
0
java-sources/ai/konduit/serving/konduit-serving-tensorflow/0.3.0/ai/konduit/serving/models/tensorflow
java-sources/ai/konduit/serving/konduit-serving-tensorflow/0.3.0/ai/konduit/serving/models/tensorflow/step/TensorFlowRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.models.tensorflow.step; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.NDArray; import ai.konduit.serving.pipeline.api.data.ValueType; import ai.konduit.serving.pipeline.api.protocol.URIResolver; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.impl.data.ValueNotFoundException; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.FileUtils; import org.bytedeco.javacpp.Loader; import org.nd4j.common.base.Preconditions; import org.tensorflow.Graph; import org.tensorflow.SavedModelBundle; import org.tensorflow.Session; import org.tensorflow.Tensor; import java.io.File; import java.util.Arrays; import java.util.List; @Slf4j @CanRun(TensorFlowStep.class) public class TensorFlowRunner implements PipelineStepRunner { static { //ensure native libraries get loaded Loader.load(org.bytedeco.tensorflow.presets.tensorflow.class); } private final TensorFlowStep step; private Graph graph; private Session sess; public TensorFlowRunner(@NonNull TensorFlowStep step) { this.step = step; init(); } @Override public void close() { if(sess != null) sess.close(); //TODO AB 2020/05/13 - For some reason this deadlocks, never returns :/ // if(graph != null ) // graph.close(); } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { Preconditions.checkState(step.inputNames() != null, "TensorFlowStep input array names are not set (null)"); Session.Runner r = sess.runner(); for (String s : step.inputNames()) { if(!data.has(s)){ throw new ValueNotFoundException( "Error in TensorFlowStep: Input data does not have a value corresponding to TensorFlowStep.inputNames value \"" + s + "\" - data keys = " + data.keys()); } if(data.type(s) != ValueType.NDARRAY){ String listType = data.type(s) == ValueType.LIST ? data.listType(s).toString() : null; throw new ValueNotFoundException( "Error in TensorFlowStep (" + name() + "): Input data value corresponding to TensorFlowStep.inputNames value \"" + s + "\" is not an NDArray type - is " + (listType == null ? data.type(s) : "List<" + listType + ">")); } NDArray arr = data.getNDArray(s); //TODO checks Tensor<?> t = arr.getAs(Tensor.class); //TODO casting r.feed(s, t); } List<String> outNames = step.outputNames(); for (String s : outNames) { String name; int idx; if (s.contains(":")) { //TODO checks int i = s.indexOf(":"); name = s.substring(0, i); idx = Integer.parseInt(s.substring(i + 1)); } else { name = s; idx = 0; } r.fetch(name, idx); } List<Tensor<?>> l; try{ l = r.run(); } catch (Throwable t){ StringBuilder sb = new StringBuilder(); sb.append("TensorFlow exception in TensorFlowStep (" + name() + "). Input shapes:\n"); for(String s : step.inputNames()){ NDArray arr = data.getNDArray(s); sb.append(s).append(": ").append(Arrays.toString(arr.shape())).append("\n"); } throw new RuntimeException(sb.toString(), t); } Data out = Data.empty(); for (int i = 0; i < outNames.size(); i++) { Tensor<?> t = l.get(i); NDArray arr = NDArray.create(t); out.put(outNames.get(i), arr); } return out; } protected void init() { try { initHelper(); } catch (Throwable t) { throw new RuntimeException("Error loading TensorFlow model", t); } } protected void initHelper() throws Exception { //File origFile = new File(new URI(step.getModelUri())); String uri = step.modelUri(); File origFile = URIResolver.getFile(uri); Preconditions.checkState(origFile.exists(), "Model file does not exist: " + uri); //Try to load frozen model: Throwable frozenErr = null; try { byte[] bytes = FileUtils.readFileToByteArray(origFile); graph = new Graph(); graph.importGraphDef(bytes); log.info("Loaded TensorFlow frozen model"); } catch (Throwable t) { frozenErr = t; graph = null; } //Try to load saved model: //TF has bad API here: The DIRECTORY path is provided, and the file must be exactly "saved_model.pb" - this is hardcoded in TF if (graph == null) { try { File dir = ai.konduit.serving.pipeline.util.FileUtils.getTempFileDir("tf_model_" + System.nanoTime()); File f = new File(dir, "saved_model.pb"); FileUtils.copyFile(origFile, f); SavedModelBundle b = SavedModelBundle.load(dir.getAbsolutePath(), "serve"); graph = b.graph(); log.info("Loaded TensorFlow SavedModel"); } catch (Throwable t) { log.error("Error loading graph: Attempted to load as both a frozen model .pb and a SavedModel .pb - both failed"); log.error("Frozen model loading exception:", frozenErr); log.error("SavedModel loading exception:", t); throw new IllegalStateException("Unable to load TensorFlow model as either a frozen model .pb or Savedmodel .pb", t); } } this.sess = new Session(graph); } }
0
java-sources/ai/konduit/serving/konduit-serving-tensorflow/0.3.0/ai/konduit/serving/models/tensorflow
java-sources/ai/konduit/serving/konduit-serving-tensorflow/0.3.0/ai/konduit/serving/models/tensorflow/util/TensorFlowUtil.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.models.tensorflow.util; import ai.konduit.serving.pipeline.api.data.NDArrayType; import org.tensorflow.DataType; import org.tensorflow.types.UInt8; public class TensorFlowUtil { private TensorFlowUtil(){ } public static Class<?> toTFType(NDArrayType t){ switch (t){ case DOUBLE: return Double.class; case FLOAT: return Float.class; case INT64: return Long.class; case INT32: return Integer.class; case INT16: return Short.class; case INT8: return Byte.class; case UINT8: return UInt8.class; case BOOL: return Boolean.class; case UTF8: return String.class; case UINT64: case UINT32: case UINT16: case BFLOAT16: case FLOAT16: default: throw new UnsupportedOperationException("Type not supported by TF Java: " + t); } } public static NDArrayType fromTFType(DataType dataType){ switch (dataType){ case FLOAT: return NDArrayType.FLOAT; case DOUBLE: return NDArrayType.DOUBLE; case INT32: return NDArrayType.INT32; case UINT8: return NDArrayType.UINT8; case STRING: return NDArrayType.UTF8; case INT64: return NDArrayType.INT64; case BOOL: return NDArrayType.BOOL; default: throw new UnsupportedOperationException("Unknown TF type: " + dataType); } } }
0
java-sources/ai/konduit/serving/konduit-serving-tensorflow-config/0.3.0/ai/konduit/serving/models/tensorflow
java-sources/ai/konduit/serving/konduit-serving-tensorflow-config/0.3.0/ai/konduit/serving/models/tensorflow/step/KonduitServingTensorflowConfigJsonMapping.java
package ai.konduit.serving.models.tensorflow.step;import ai.konduit.serving.pipeline.api.serde.JsonSubType; import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping; import ai.konduit.serving.pipeline.api.serde.JsonSubType; import java.util.ArrayList; import java.util.List; //GENERATED CLASS DO NOT EDIT public class KonduitServingTensorflowConfigJsonMapping implements JsonSubTypesMapping { @Override public List<JsonSubType> getSubTypesMapping() { List<JsonSubType> l = new ArrayList<>(); l.add(new JsonSubType("TENSORFLOW", ai.konduit.serving.models.tensorflow.step.TensorFlowStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); return l; } }
0
java-sources/ai/konduit/serving/konduit-serving-tensorflow-config/0.3.0/ai/konduit/serving/models/tensorflow
java-sources/ai/konduit/serving/konduit-serving-tensorflow-config/0.3.0/ai/konduit/serving/models/tensorflow/step/TensorFlowStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.models.tensorflow.step; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import lombok.experimental.Tolerate; import java.util.Arrays; import java.util.List; @Data @NoArgsConstructor @AllArgsConstructor @Accessors(fluent = true) @JsonName("TENSORFLOW") @Schema(description = "A pipeline step that configures a TensorFlow model that is to be executed.") public class TensorFlowStep implements PipelineStep { @Schema(description = "A list of names of the input placeholders.") private List<String> inputNames; @Schema(description = "A list of names of the output arrays - i.e., what should be predicted.") private List<String> outputNames; @Schema(description = "Uniform Resource Identifier of model") private String modelUri; @Tolerate public TensorFlowStep inputNames(String... inputNames) { return this.inputNames(Arrays.asList(inputNames)); } @Tolerate public TensorFlowStep outputNames(String... outputNames) { return this.outputNames(Arrays.asList(outputNames)); } }