index
int64
repo_id
string
file_path
string
content
string
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/capture/FrameCaptureRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.capture; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.Image; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import lombok.extern.slf4j.Slf4j; import org.bytedeco.javacv.*; @Slf4j @CanRun(CameraFrameCaptureStep.class) public class FrameCaptureRunner implements PipelineStepRunner { protected final PipelineStep step; protected final int skip; protected final String outputKey; protected boolean initialized; protected FrameGrabber grabber; protected OpenCVFrameConverter.ToIplImage converter; protected boolean loop = false; private Runnable init; public FrameCaptureRunner(CameraFrameCaptureStep step){ this.outputKey = step.outputKey(); this.step = step; init = () -> { this.initOpenCVFrameGrabber(step); }; this.skip = -1; } public FrameCaptureRunner(VideoFrameCaptureStep step){ this.outputKey = step.outputKey(); this.step = step; init = () -> { this.initFFmpegFrameGrabber(step); }; this.skip = step.skipFrames() == null ? 0 : step.skipFrames(); } @Override public synchronized void close() { if(initialized){ initialized = false; try { grabber.stop(); grabber.close(); } catch (Throwable t){ log.warn("Error stopping/closing FrameGrabber", t); } } } @Override public PipelineStep getPipelineStep() { return step; } @Override public synchronized Data exec(Context ctx, Data data) { if(!initialized) init.run(); try { Frame frame = grabber.grab(); if(frame == null && loop){ frame = grabber.grab(); } frame = frame.clone(); //Clone otherwise buffer will be reused and async overwritten in async pipelines Image i = Image.create(frame); //System.out.println("IMAGE: h=" + i.height() + ", w=" + i.width()); if(skip > 0){ //using setFrameNumber can be costly - seems to require decoding from the last keyframe? //i.e., cost of calling setFrameNumber grows linearly over time, then jumps back to a few MS periodically // (presumably once it hits next keyframe) int maxFrames = grabber.getLengthInFrames(); if(skip >= 20){ //TODO this threshold should be selected based int currFrame = grabber.getFrameNumber(); int setFrame = Math.min(maxFrames, currFrame + skip); grabber.setFrameNumber(setFrame); } else { for( int j=0; j<skip; j++ ) { if(grabber.grab() == null) break; } } } return Data.singleton(outputKey, i); } catch (Throwable t){ throw new RuntimeException("Error getting frame", t); } } protected void initOpenCVFrameGrabber(CameraFrameCaptureStep step){ grabber = new OpenCVFrameGrabber(step.camera()); converter = new OpenCVFrameConverter.ToIplImage(); int w = step.width(); int h = step.height(); grabber.setImageHeight(h); grabber.setImageWidth(w); try { grabber.start(); } catch (Throwable t){ log.error("Failed to start video frame grabber with step {}", step); throw new RuntimeException("Failed to start video frame grabber", t); } initialized = true; } protected void initFFmpegFrameGrabber(VideoFrameCaptureStep step){ grabber = new FFmpegFrameGrabber(step.filePath()); loop = step.loop(); converter = new OpenCVFrameConverter.ToIplImage(); try { grabber.start(); } catch (Throwable t){ log.error("Failed to start video frame grabber with step {}", step); throw new RuntimeException("Failed to start video frame grabber", t); } initialized = true; } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/capture/FrameCaptureStepRunnerFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.capture; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class FrameCaptureStepRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep pipelineStep) { return (pipelineStep instanceof CameraFrameCaptureStep) || (pipelineStep instanceof VideoFrameCaptureStep); } @Override public PipelineStepRunner create(PipelineStep step) { Preconditions.checkState(canRun(step), "Unable to run pipeline step of type %s", step.getClass()); if(step instanceof CameraFrameCaptureStep){ return new FrameCaptureRunner((CameraFrameCaptureStep) step); }else{ return new FrameCaptureRunner((VideoFrameCaptureStep) step); } } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/crop/ImageCropFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.crop; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class ImageCropFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep pipelineStep) { return pipelineStep instanceof ImageCropStep; } @Override public PipelineStepRunner create(PipelineStep pipelineStep) { Preconditions.checkState(canRun(pipelineStep), "Unable to run step: %s", pipelineStep); return new ImageCropRunner((ImageCropStep) pipelineStep); } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/crop/ImageCropRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.crop; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.data.image.convert.ImageToNDArray; import ai.konduit.serving.data.image.convert.config.AspectRatioHandling; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.*; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.util.DataUtils; import org.bytedeco.javacpp.Loader; import org.bytedeco.javacpp.indexer.UByteIndexer; import org.bytedeco.opencv.opencv_core.Mat; import org.bytedeco.opencv.opencv_core.Rect; import org.bytedeco.opencv.opencv_core.Size; import org.nd4j.common.base.Preconditions; import org.nd4j.common.primitives.Pair; import java.util.ArrayList; import java.util.List; @CanRun(ImageCropStep.class) public class ImageCropRunner implements PipelineStepRunner { private final ImageCropStep step; public ImageCropRunner(ImageCropStep step){ this.step = step; } @Override public void close() { //No op } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { //First: get names String name = step.imageName(); if(name == null || name.isEmpty()){ String errMultipleKeys = "ImageCropStep: Image field name was not provided and could not be inferred: multiple Image or List<Image> fields exist: %s and %s"; String errNoKeys = "ImageCropStep: Image field name was not provided and could not be inferred: no Image or List<Image> fields exist in input Data"; name = DataUtils.inferField(data, ValueType.IMAGE, true, errMultipleKeys, errNoKeys); } if(!data.has(name)){ throw new IllegalStateException("Input image name \"" + name + "\" (via ImageCropStep.imageName config) " + "is not present in the input Data instance. Data keys: " + data.keys()); } if(!(data.type(name) == ValueType.IMAGE || (data.type(name) == ValueType.LIST && data.listType(name) == ValueType.IMAGE))){ String t = data.type(name) == ValueType.LIST ? "List<" + data.listType(name) + ">" : data.type(name).name(); throw new IllegalStateException("Input image name \"" + name + "\" (via ImageCropStep.imageName config) " + "is present but is not an Image or List<Image> type. Data[\"" + name + "\"].type == " + t); } Data out = data.clone(); if(data.type(name) == ValueType.IMAGE){ Image i = data.getImage(name); out.put(name, crop(data, i)); } else { //Must be list List<Image> list = data.getListImage(name); List<Image> newList = new ArrayList<>(list.size()); for(Image i : list){ newList.add(crop(data, i)); } out.putListImage(name, newList); } return out; } protected Image crop(Data d, Image in){ double x1, y1, x2, y2; if(step.cropBox() != null){ BoundingBox bb = step.cropBox(); x1 = bb.x1(); y1 = bb.y1(); x2 = bb.x2(); y2 = bb.y2(); } else if(step.cropPoints() != null){ List<Point> p = step.cropPoints(); Preconditions.checkState(p.size() == 2, "Expected 2 points for ImageCropStep.cropPoints field, got %s", p); x1 = p.get(0).x(); y1 = p.get(0).y(); x2 = p.get(1).x(); y2 = p.get(1).y(); } else if(step.cropName() != null){ String n = step.cropName(); Preconditions.checkState(d.has(n), "ImageCropStep.cropName = \"%s\" but input data does not have any field by this name", n); if(d.type(n) != ValueType.BOUNDING_BOX && !(d.type(n) == ValueType.LIST && d.listType(n) == ValueType.POINT)){ String typeName = d.type(n) != ValueType.LIST ? "" + d.type(n) : "List<" + d.listType(n) + ">"; throw new IllegalStateException("ImageCropStep.cropName must specify a BoundingBox or List<Point> field in the input Data instance, " + "but Data[\"" + n + "\"] has type " + typeName); } if(d.type(n) == ValueType.BOUNDING_BOX){ BoundingBox bb = d.getBoundingBox(n); x1 = bb.x1(); y1 = bb.y1(); x2 = bb.x2(); y2 = bb.y2(); } else { List<Point> p = d.getListPoint(n); Preconditions.checkState(p.size() == 2, "Expected 2 points for Data[ImageCropStep.cropName] field, got %s", p); x1 = p.get(0).x(); y1 = p.get(0).y(); x2 = p.get(1).x(); y2 = p.get(1).y(); } } else { throw new IllegalStateException("Error in ImageCropStep: one of cropPoints, cropBox or cropName must be set, but all are null"); } if(!step.coordsArePixels()){ x1 *= in.width(); x2 *= in.width(); y1 *= in.height(); y2 *= in.height(); } int px1 = (int)Math.round(x1); int px2 = (int)Math.round(x2); int py1 = (int)Math.round(y1); int py2 = (int)Math.round(y2); Mat m = in.getAs(Mat.class); if(inBoundW(px1, in) && inBoundW(px2, in) && inBoundH(py1, in) && inBoundH(py2, in)){ //Easy/normal case - crop within image Mat out = m.apply(new Rect(px1, py1, (px2-px1), (py2-py1))); return Image.create(out); } else { //Crop region is at least partially outside the input image region Mat out = new Mat(px2-px1, py2-py1, m.type()); UByteIndexer u = out.createIndexer(!Loader.getPlatform().startsWith("android")); u.pointer().zero(); if((inBoundW(px1, in) || inBoundW(px2, in)) && (inBoundH(py1, in) || inBoundH(py2, in))){ //Part of the input image overlap with the output crop region int ix1 = Math.max(0, px1); int iy1 = Math.max(0, py1); int ix2 = Math.min(in.width()-1, px2); int iy2 = Math.min(in.height()-1, py2); Mat sub = m.apply(new Rect(ix1, iy1, (ix2-ix1), (iy2-iy1))); //Subset of the input image //Now, need to work out the coordinates of the output image to copy it to int x1c = 0; int y1c = 0; if (px1 < 0) { x1c = out.cols() - sub.cols(); } if(py1 < 0){ y1c = out.rows() - sub.rows(); } Mat outSub = out.apply(new Rect(x1c, y1c, sub.cols(), sub.rows())); sub.copyTo(outSub); } return Image.create(out); } } protected boolean inBoundW(int x, Image img){ return x >= 0 && x < img.width(); } protected boolean inBoundH(int y, Image img){ return y >= 0 && y < img.height(); } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/face/CropUtil.java
package ai.konduit.serving.data.image.step.face; import ai.konduit.serving.data.image.convert.ImageToNDArray; import ai.konduit.serving.data.image.convert.ImageToNDArrayConfig; import ai.konduit.serving.pipeline.api.data.BoundingBox; import ai.konduit.serving.pipeline.api.data.Image; import org.bytedeco.opencv.opencv_core.Mat; import org.bytedeco.opencv.opencv_core.Size; import org.nd4j.common.base.Preconditions; public class CropUtil { public static Mat scaleIfRequired(Mat m, DrawFaceKeyPointsStep step){ if(step.scale() != null && step.scale() != step.scale().NONE){ boolean scaleRequired = false; int newH = 0; int newW = 0; if(step.scale() == step.scale().AT_LEAST){ if(m.rows() < step.resizeH() || m.cols() < step.resizeW()){ scaleRequired = true; double ar = m.cols() / (double)m.rows(); if(m.rows() < step.resizeH() && m.cols() >= step.resizeW()){ //Scale height newW = step.resizeW(); newH = (int)(newW / ar); } else if(m.rows() > step.resizeH() && m.cols() < step.resizeW()){ //Scale width newH = step.resizeH(); newW = (int) (ar * newH); } else { //Scale both dims... if((int)(step.resizeW() / ar) < step.resizeH()){ //Scale height newW = step.resizeW(); newH = (int)(newW / ar); } else { //Scale width newH = step.resizeH(); newW = (int) (ar * newH); } } } } else if(step.scale() == step.scale().AT_MOST){ Preconditions.checkState(step.resizeH() > 0 && step.resizeW() > 0, "Invalid resize: resizeH=%s, resizeW=%s", step.resizeH(), step.resizeW()); if(m.rows() > step.resizeH() || m.cols() > step.resizeW()){ scaleRequired = true; double ar = m.cols() / (double)m.rows(); if(m.rows() > step.resizeH() && m.cols() <= step.resizeW()){ //Scale height newW = step.resizeW(); newH = (int)(newW / ar); } else if(m.rows() < step.resizeH() && m.cols() > step.resizeW()){ //Scale width newH = step.resizeH(); newW = (int) (ar * newH); } else { //Scale both dims... if((int)(step.resizeW() / ar) > step.resizeH()){ //Scale height newW = step.resizeW(); newH = (int)(newW / ar); } else { //Scale width newH = step.resizeH(); newW = (int) (ar * newH); } } } } if(scaleRequired){ Mat resized = new Mat(); org.bytedeco.opencv.global.opencv_imgproc.resize(m, resized, new Size(newH, newW)); return resized; } else { return m; } } else { return m; } } public static BoundingBox accountForCrop(Image image, BoundingBox bbox, ImageToNDArrayConfig config){ if(config == null) return bbox; BoundingBox cropRegion = ImageToNDArray.getCropRegion(image, config); double cropWidth = cropRegion.width(); double cropHeight = cropRegion.height(); double x1 = cropRegion.x1() + cropWidth * bbox.x1(); double x2 = cropRegion.x1() + cropWidth * bbox.x2(); double y1 = cropRegion.y1() + cropHeight * bbox.y1(); double y2 = cropRegion.y1() + cropHeight * bbox.y2(); return BoundingBox.createXY(x1, x2, y1, y2, bbox.label(), bbox.probability()); } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/face/DrawFaceKeyPointsRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.face; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.data.image.convert.ImageToNDArrayConfig; import ai.konduit.serving.data.image.util.ColorUtil; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.BoundingBox; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.Image; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import lombok.NonNull; import org.bytedeco.opencv.opencv_core.*; import java.util.List; import static ai.konduit.serving.data.image.step.face.CropUtil.accountForCrop; import static ai.konduit.serving.data.image.step.face.CropUtil.scaleIfRequired; @CanRun(DrawFaceKeyPointsStep.class) public class DrawFaceKeyPointsRunner implements PipelineStepRunner { protected final DrawFaceKeyPointsStep step; public DrawFaceKeyPointsRunner(@NonNull DrawFaceKeyPointsStep step) { this.step = step; } @Override public void close() { } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { Image img = data.getImage(step.image()); Mat m = img.getAs(Mat.class); float[][] landmarkArr = data.getNDArray(step.landmarkArray()).getAs(float[][].class); List<BoundingBox> faces_bboxes = data.getListBoundingBox("img_bbox"); Mat scaled = scaleIfRequired(m, this.step); ImageToNDArrayConfig im2ndConf = step.imageToNDArrayConfig(); if (!faces_bboxes.isEmpty()) { for (BoundingBox face_bbox : faces_bboxes) { BoundingBox bb = accountForCrop(img, face_bbox, im2ndConf); if(step.drawFaceBox()) { double x1 = Math.min(bb.x1(), bb.x2()); double y1 = Math.min(bb.y1(), bb.y2()); int x = (int) (x1 * scaled.cols()); int y = (int) (y1 * scaled.rows()); int h = (int) Math.round(bb.height() * scaled.rows()); int w = (int) Math.round(bb.width() * scaled.cols()); Rect r = new Rect(x, y, w, h); Scalar s; if(step.faceBoxColor() == null){ s = ColorUtil.stringToColor(DrawFaceKeyPointsStep.DEFAULT_BOX_COLOR); } else { s = ColorUtil.stringToColor(step.faceBoxColor()); } org.bytedeco.opencv.global.opencv_imgproc.rectangle(scaled, r, s, 2, 8, 0); } int prod = landmarkArr.length * landmarkArr[0].length; float[][] keypoints = new float[prod/2][2]; int pos = 0; for(int i=0; i<landmarkArr.length; i++ ){ for( int j=0; j<landmarkArr[0].length; j++ ){ keypoints[pos/2][pos%2] = landmarkArr[i][j]; pos++; } } for (int i = 0; i < keypoints.length; i++) { //Currently, keypoints coordinates are specified in terms of the face bounding box. //We need to translate them to overall image pixels double xp = (bb.x1() + keypoints[i][0] * bb.width()) * img.width(); double yp = (bb.y1() + keypoints[i][1] * bb.height()) * img.height(); Point point = new Point((int)xp, (int)yp); Scalar s; if(step.pointColor() == null){ s = ColorUtil.stringToColor(DrawFaceKeyPointsStep.DEFAULT_POINT_COLOR); } else { s = ColorUtil.stringToColor(step.pointColor()); } int size = step.pointSize(); org.bytedeco.opencv.global.opencv_imgproc.circle(scaled, point, size, s); } if (im2ndConf.listHandling() == ImageToNDArrayConfig.ListHandling.FIRST || im2ndConf.listHandling() == ImageToNDArrayConfig.ListHandling.NONE) { break; } } } String outputName = step.outputName(); if (outputName == null) { outputName = DrawFaceKeyPointsStep.DEFAULT_OUTPUT_NAME; } return Data.singleton(step.image(), Image.create(scaled)); } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/face/DrawFaceKeyPointsStepRunnerFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.face; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class DrawFaceKeyPointsStepRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep pipelineStep) { return pipelineStep instanceof DrawFaceKeyPointsStep; } @Override public PipelineStepRunner create(PipelineStep pipelineStep) { Preconditions.checkState(canRun(pipelineStep), "Unable to run step: %s", pipelineStep); return new DrawFaceKeyPointsRunner((DrawFaceKeyPointsStep) pipelineStep); } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/grayscale/GrayScaleRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.grayscale; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.Image; import ai.konduit.serving.pipeline.api.data.ValueType; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.util.DataUtils; import lombok.NonNull; import org.bytedeco.opencv.opencv_core.Mat; import static org.bytedeco.opencv.global.opencv_imgproc.*; @CanRun({ GrayScaleStep.class}) public class GrayScaleRunner implements PipelineStepRunner { protected final GrayScaleStep step; public GrayScaleRunner(@NonNull GrayScaleStep step){ this.step = step; } @Override public void close() { } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { String imgName = step.imageName(); if (imgName == null) { String errMultipleKeys = "DrawGridStep points field name was not provided and could not be inferred: multiple List<Point> fields exist: %s and %s"; String errNoKeys = "DrawGridStep points field name was not provided and could not be inferred: no List<Point> fields exist"; imgName = DataUtils.inferField(data, ValueType.IMAGE, false, errMultipleKeys, errNoKeys); } Image i = data.getImage(imgName); Mat m = i.getAs(Mat.class).clone(); cvtColor(m,m, COLOR_BGR2GRAY); //after gray scaling, convert the image to 3 channels if(step.outputChannels() != 1) cvtColor(m,m, COLOR_GRAY2BGR); Data out = data.clone(); Image outImg = Image.create(m); out.put(imgName, outImg); return out; } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/grayscale/GrayScaleStepRunnerFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.grayscale; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class GrayScaleStepRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep pipelineStep) { return pipelineStep instanceof GrayScaleStep; } @Override public PipelineStepRunner create(PipelineStep pipelineStep) { Preconditions.checkState(canRun(pipelineStep), "Unable to run step: %s", pipelineStep); return new GrayScaleRunner((GrayScaleStep) pipelineStep); } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/grid
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/grid/crop/CropGridRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.grid.crop; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.*; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.util.DataUtils; import lombok.NonNull; import org.bytedeco.opencv.opencv_core.Mat; import org.bytedeco.opencv.opencv_core.Rect; import org.nd4j.common.base.Preconditions; import org.nd4j.common.primitives.Pair; import java.util.ArrayList; import java.util.List; @CanRun({CropGridStep.class, CropFixedGridStep.class}) public class CropGridRunner implements PipelineStepRunner { protected final CropGridStep step; protected final CropFixedGridStep fStep; public CropGridRunner(@NonNull CropGridStep step) { this.step = step; this.fStep = null; } public CropGridRunner(@NonNull CropFixedGridStep step) { this.step = null; this.fStep = step; } @Override public void close() { } @Override public PipelineStep getPipelineStep() { if (step != null) return step; return fStep; } @Override public Data exec(Context ctx, Data data) { boolean fixed = fStep != null; String imgName = fixed ? fStep.imageName() : step.imageName(); if (imgName == null) { String errMultipleKeys = "Image field name was not provided and could not be inferred: multiple image fields exist: %s and %s"; String errNoKeys = "Image field name was not provided and could not be inferred: no image fields exist"; imgName = DataUtils.inferField(data, ValueType.IMAGE, false, errMultipleKeys, errNoKeys); } Image i = data.getImage(imgName); List<Point> points; if (fixed) { points = fStep.points(); Preconditions.checkState(points != null, "Error in CropFixedGridStep: points field was null (corder points" + " must be provided for cropping via CropFixedGridStep.points field)"); } else { String pName = step.pointsName(); if (pName == null) { String errMultipleKeys = "CropGridStep pointsName field name was not provided and could not be inferred: multiple List<Point> fields exist: %s and %s"; String errNoKeys = "CropGridStep pointsName field name was not provided and could not be inferred: no List<Point> fields exist"; pName = DataUtils.inferListField(data, ValueType.POINT, errMultipleKeys, errNoKeys); } Preconditions.checkState(data.has(pName), "Error in CropGridStep: Input Data does not have any values for pointName=\"%s\"", pName); if (data.type(pName) != ValueType.LIST || data.listType(pName) != ValueType.POINT) { String type = (data.type(pName) == ValueType.LIST ? "List<" + data.listType(pName).toString() + ">" : "" + data.type(pName)); throw new IllegalStateException("pointName = \"" + pName + "\" should be a length 4 List<Point> but is type " + type); } points = data.getListPoint(pName); } boolean isPx = fixed ? fStep.coordsArePixels() : step.coordsArePixels(); List<Point> pxPoints; if (isPx) { pxPoints = points; } else { pxPoints = new ArrayList<>(4); for (Point p : points) { pxPoints.add(Point.create(p.x() * i.width(), p.y() * i.height())); } } Mat m = i.getAs(Mat.class); double gx = fixed ? fStep.gridX() : step.gridX(); double gy = fixed ? fStep.gridY() : step.gridY(); Pair<List<Image>, List<BoundingBox>> p = cropGrid(m, pxPoints, gx, gy); Data out; if (step != null ? step.keepOtherFields() : fStep.keepOtherFields()) { out = data.clone(); } else { out = Data.empty(); } String outName = (step != null ? step.outputName() : fStep.outputName()); if (outName == null) outName = CropGridStep.DEFAULT_OUTPUT_NAME; out.putListImage(outName, p.getFirst()); if (step != null ? step.boundingBoxName() != null : fStep.boundingBoxName() != null) { out.putListBoundingBox(step != null ? step.boundingBoxName() : fStep.boundingBoxName(), p.getSecond()); } return out; } protected Pair<List<Image>, List<BoundingBox>> cropGrid(Mat m, List<Point> pxPoints, double gx, double gy) { Point tl = pxPoints.get(0); Point tr = pxPoints.get(1); Point bl = pxPoints.get(2); Point br = pxPoints.get(3); List<Image> out = new ArrayList<>(); List<BoundingBox> bbox = (step != null ? step.boundingBoxName() != null : fStep.boundingBoxName() != null) ? new ArrayList<>() : null; //Note we are iterating (adding to output) in order: (0,0), (0, 1), ..., (0, C-1), ..., (R-1, C-1) - i.e., per row for (int j = 0; j < gy; j++) { for (int i = 0; i < gx; i++) { //Work out the corners of the current crop box Point boxTL = topLeft(j, i, (int) gy, (int) gx, tl, tr, bl, br); Point boxTR = topRight(j, i, (int) gy, (int) gx, tl, tr, bl, br); Point boxBL = bottomLeft(j, i, (int) gy, (int) gx, tl, tr, bl, br); Point boxBR = bottomRight(j, i, (int) gy, (int) gx, tl, tr, bl, br); double minX = min(boxTL.x(), boxTR.x(), boxBL.x(), boxBR.x()); double maxX = max(boxTL.x(), boxTR.x(), boxBL.x(), boxBR.x()); double minY = min(boxTL.y(), boxTR.y(), boxBL.y(), boxBR.y()); double maxY = max(boxTL.y(), boxTR.y(), boxBL.y(), boxBR.y()); int w = (int)(maxX - minX); int h = (int)(maxY - minY); if ((step != null && step.aspectRatio() != null) || (fStep != null && fStep.aspectRatio() != null)) { double currAr = w / (double) h; double ar = step != null ? step.aspectRatio() : fStep.aspectRatio(); if (ar < currAr) { //Need to increase height dimension to give desired AR int newH = (int) (w / ar); minY -= (newH - h) / 2.0; h = newH; } else if (ar > currAr) { //Need ot increase width dimension to give desired AR int newW = (int) (h * ar); minX -= (newW - w) / 2.0; w = newW; } } //Make sure bounds are inside image. TODO handle this differently for aspect ratio preserving? if (minX < 0) { w += minX; minX = 0; } if (minX + w > m.cols()) { w = m.cols() - (int)minX; } if (minY < 0) { h += minY; minY = 0; } if (minY + h > m.rows()) { h = m.rows() - (int)minY; } Rect r = new Rect((int)minX, (int)minY, w, h); Mat crop = m.apply(r).clone(); out.add(Image.create(crop)); if (bbox != null) { bbox.add(BoundingBox.createXY(minX / (double) m.cols(), (minX + w) / (double) m.cols(), minY / (double) m.rows(), (minY + h) / (double) m.rows())); } } } return Pair.of(out, bbox); } private Point topLeft(int row, int col, int numRows, int numCols, Point tl, Point tr, Point bl, Point br) { //Here, we are stepping "rows/numRows" between TL/BL and TR/BR //This gives us the line along which the //Then we just need to step between those points /* i.e., for O=(1,2) we work out (x,y) for A and B, then step 2/numCols from A to B |-----------------| | | | | A-----|-----O-----B | | | | |-----------------| */ Point tlbl = fracBetween(row / (double) numRows, tl, bl); Point trbr = fracBetween(row / (double) numRows, tr, br); return fracBetween(col / (double) numCols, tlbl, trbr); } private Point bottomRight(int row, int col, int numRows, int numCols, Point tl, Point tr, Point bl, Point br) { return topLeft(row + 1, col + 1, numRows, numCols, tl, tr, bl, br); } private Point bottomLeft(int row, int col, int numRows, int numCols, Point tl, Point tr, Point bl, Point br) { return topLeft(row + 1, col, numRows, numCols, tl, tr, bl, br); } private Point topRight(int row, int col, int numRows, int numCols, Point tl, Point tr, Point bl, Point br) { return topLeft(row, col + 1, numRows, numCols, tl, tr, bl, br); } Point fracBetween(double frac, Point p1, Point p2) { return Point.create(fracBetween(frac, p1.x(), p2.x()), fracBetween(frac, p1.y(), p2.y())); } private double fracBetween(double frac, double a, double b) { return a + frac * (b - a); } private double min(double a, double b, double c, double d) { return Math.min(Math.min(a, b), Math.min(c, d)); } private double max(double a, double b, double c, double d) { return Math.max(Math.max(a, b), Math.max(c, d)); } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/grid
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/grid/crop/CropGridStepRunnerFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.grid.crop; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class CropGridStepRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep pipelineStep) { return pipelineStep instanceof CropGridStep || pipelineStep instanceof CropFixedGridStep; } @Override public PipelineStepRunner create(PipelineStep pipelineStep) { Preconditions.checkState(canRun(pipelineStep), "Unable to run step: %s", pipelineStep); if(pipelineStep instanceof CropGridStep){ return new CropGridRunner((CropGridStep) pipelineStep); } else { return new CropGridRunner((CropFixedGridStep) pipelineStep); } } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/grid
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/grid/draw/DrawGridRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.grid.draw; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.data.image.util.ColorConstants; import ai.konduit.serving.data.image.util.ColorUtil; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.Image; import ai.konduit.serving.pipeline.api.data.Point; import ai.konduit.serving.pipeline.api.data.ValueType; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.util.DataUtils; import lombok.NonNull; import org.bytedeco.opencv.opencv_core.Mat; import org.bytedeco.opencv.opencv_core.Scalar; import org.nd4j.common.base.Preconditions; import java.util.ArrayList; import java.util.List; @CanRun({DrawGridStep.class, DrawFixedGridStep.class}) public class DrawGridRunner implements PipelineStepRunner { protected final DrawGridStep step; protected final DrawFixedGridStep fStep; public DrawGridRunner(@NonNull DrawGridStep step){ this.step = step; this.fStep = null; } public DrawGridRunner(@NonNull DrawFixedGridStep step){ this.step = null; this.fStep = step; } @Override public void close() { } @Override public PipelineStep getPipelineStep() { if(step != null) return step; return fStep; } @Override public Data exec(Context ctx, Data data) { boolean fixed = fStep != null; String imgName = fixed ? fStep.imageName() : step.imageName(); if (imgName == null) { String errMultipleKeys = "DrawGridStep points field name was not provided and could not be inferred: multiple List<Point> fields exist: %s and %s"; String errNoKeys = "DrawGridStep points field name was not provided and could not be inferred: no List<Point> fields exist"; imgName = DataUtils.inferField(data, ValueType.IMAGE, false, errMultipleKeys, errNoKeys); } Image i = data.getImage(imgName); List<Point> points; if(fixed){ points = fStep.points(); } else { String pName = step.pointsName(); if (pName == null || pName.isEmpty()) { String errMultipleKeys = "DrawGridStep points field name was not provided and could not be inferred: multiple List<Point> fields exist: %s and %s"; String errNoKeys = "DrawGridStep points field name was not provided and could not be inferred: no List<Point> fields exist"; pName = DataUtils.inferListField(data, ValueType.POINT, errMultipleKeys, errNoKeys); } Preconditions.checkState(data.has(pName), "Error in CropGridStep: Input Data does not have any values for pointName=\"%s\"", pName); if (data.type(pName) != ValueType.LIST || data.listType(pName) != ValueType.POINT) { String type = (data.type(pName) == ValueType.LIST ? "List<" + data.listType(pName).toString() + ">" : "" + data.type(pName)); throw new IllegalStateException("pointName = \"" + pName + "\" should be a length 4 List<Point> but is type " + type); } points = data.getListPoint(pName); } Preconditions.checkState(points != null && points.size() == 4, "Input List<Points> must have length 4, got %s", points); Mat m = i.getAs(Mat.class).clone(); Scalar borderColor; int borderThickness; if(fixed){ borderColor = fStep.borderColor() == null ? ColorUtil.stringToColor(DrawGridStep.DEFAULT_COLOR) : ColorUtil.stringToColor(fStep.borderColor()); borderThickness = fStep.borderThickness(); } else { borderColor = step.borderColor() == null ? ColorUtil.stringToColor(DrawGridStep.DEFAULT_COLOR) : ColorUtil.stringToColor(step.borderColor()); borderThickness = step.borderThickness(); } if(borderThickness <= 0) borderThickness = 1; List<Point> pixelPoints; if(fixed && fStep.coordsArePixels() || !fixed && step.coordsArePixels()){ pixelPoints = points; } else { //Fraction points pixelPoints = new ArrayList<>(4); for(Point p : points){ pixelPoints.add(Point.create(p.x() * i.width(), p.y() * i.height())); } } //Draw border: drawLine(m, borderColor, borderThickness, pixelPoints.get(0), pixelPoints.get(1)); //TL -> TR drawLine(m, borderColor, borderThickness, pixelPoints.get(0), pixelPoints.get(2)); //TL -> BL drawLine(m, borderColor, borderThickness, pixelPoints.get(1), pixelPoints.get(3)); //TR -> BR drawLine(m, borderColor, borderThickness, pixelPoints.get(2), pixelPoints.get(3)); //BL -> BR Scalar gridColor; int gridThickness; if(fixed){ gridColor = fStep.gridColor() == null ? borderColor : ColorUtil.stringToColor(fStep.gridColor()); gridThickness = fStep.gridThickness() == null ? borderThickness : fStep.gridThickness(); } else { gridColor = step.gridColor() == null ? borderColor : ColorUtil.stringToColor(step.gridColor()); gridThickness = step.gridThickness() == null ? borderThickness : step.gridThickness(); } if(gridThickness <= 0) gridThickness = 1; int gridX = fixed ? fStep.gridX() : step.gridX(); int gridY = fixed ? fStep.gridY() : step.gridY(); drawGrid(m, gridColor, gridThickness, pixelPoints, gridX, gridY); Data out = data.clone(); Image outImg = Image.create(m); out.put(imgName, outImg); return out; } protected void drawLine(Mat m, Scalar color, int thickness, Point p1, Point p2){ org.bytedeco.opencv.opencv_core.Point pa = new org.bytedeco.opencv.opencv_core.Point((int)p1.x(), (int)p1.y()); org.bytedeco.opencv.opencv_core.Point pb = new org.bytedeco.opencv.opencv_core.Point((int)p2.x(), (int)p2.y()); int lineType = 8; int shift = 0; org.bytedeco.opencv.global.opencv_imgproc.line(m, pa, pb, color, thickness, lineType, shift); } protected void drawLine(Mat m, Scalar color, int thickness, int x1, int x2, int y1, int y2){ org.bytedeco.opencv.opencv_core.Point p1 = new org.bytedeco.opencv.opencv_core.Point(x1, y1); org.bytedeco.opencv.opencv_core.Point p2 = new org.bytedeco.opencv.opencv_core.Point(x2, y2); int lineType = 8; int shift = 0; org.bytedeco.opencv.global.opencv_imgproc.line(m, p1, p2, color, thickness, lineType, shift); } protected void drawGrid(Mat m, Scalar color, int thickness, List<Point> pxPoints, int gridX, int gridY) { drawGridLines(m, pxPoints, false, gridX, color, thickness); drawGridLines(m, pxPoints, true, gridY, color, thickness); } protected void drawGridLines(Mat m, List<Point> pxPoints, boolean horizontalLines, int num, Scalar color, int thickness){ Point p1a, p1b, p2a, p2b; if(horizontalLines){ //Horizontal lines - perpendicular to (TL, BL) and (TR, BR) p1a = pxPoints.get(0); p1b = pxPoints.get(2); p2a = pxPoints.get(1); p2b = pxPoints.get(3); } else { //Vertical lines - perpendicular to (TL, TR) and (BL, BR) p1a = pxPoints.get(0); p1b = pxPoints.get(1); p2a = pxPoints.get(2); p2b = pxPoints.get(3); } for( int j=1; j<num; j++ ){ double frac = j / (double)num; double deltaX1 = p1b.x()-p1a.x(); double deltaX2 = p2b.x()-p2a.x(); double deltaY1 = p1b.y()-p1a.y(); double deltaY2 = p2b.y()-p2a.y(); int x1Px = (int) (p1a.x() + frac * deltaX1); int x2Px = (int) (p2a.x() + frac * deltaX2); int y1Px = (int) (p1a.y() + frac * deltaY1); int y2Px = (int) (p2a.y() + frac * deltaY2); drawLine(m, color, thickness, x1Px, x2Px, y1Px, y2Px); } } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/grid
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/grid/draw/DrawGridStepRunnerFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.grid.draw; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class DrawGridStepRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep pipelineStep) { return pipelineStep instanceof DrawGridStep || pipelineStep instanceof DrawFixedGridStep; } @Override public PipelineStepRunner create(PipelineStep pipelineStep) { Preconditions.checkState(canRun(pipelineStep), "Unable to run step: %s", pipelineStep); if(pipelineStep instanceof DrawGridStep){ return new DrawGridRunner((DrawGridStep) pipelineStep); } else { return new DrawGridRunner((DrawFixedGridStep) pipelineStep); } } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/ndarray/ImageToNDArrayRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.ndarray; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.data.image.convert.ImageToNDArray; import ai.konduit.serving.data.image.convert.ImageToNDArrayConfig; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.*; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.impl.data.ValueNotFoundException; import ai.konduit.serving.pipeline.impl.data.ndarray.SerializedNDArray; import lombok.NonNull; import org.bytedeco.javacpp.Loader; import org.nd4j.common.base.Preconditions; import org.nd4j.common.primitives.Pair; import java.nio.Buffer; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @CanRun(ImageToNDArrayStep.class) public class ImageToNDArrayRunner implements PipelineStepRunner { protected final ImageToNDArrayStep step; public ImageToNDArrayRunner(@NonNull ImageToNDArrayStep step){ this.step = step; } @Override public void close() { //No-op } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { /* Behaviour: (a) If keys are defined, convert only those (b) In no keys are defined, convert all images */ List<String> toConvert = step.keys(); List<String> outNames = step.outputNames(); boolean inferOutNames = (outNames == null) || outNames.isEmpty(); if(inferOutNames) { outNames = new ArrayList<>(); } if(toConvert == null){ toConvert = new ArrayList<>(); for(String s : data.keys()){ if(data.type(s) == ValueType.IMAGE) { toConvert.add(s); if(inferOutNames) outNames.add(s); } else if(step.config().listHandling() != ImageToNDArrayConfig.ListHandling.NONE && data.type(s) == ValueType.LIST && data.listType(s) == ValueType.IMAGE){ toConvert.add(s); if(inferOutNames) outNames.add(s); } } } Preconditions.checkState(!toConvert.isEmpty(), "No input images were specified, and no Image field could be inferred from input"); Preconditions.checkState(toConvert.size() == outNames.size(), "Got (or inferred) a difference number of input images key" + " vs. output names: inputToConvert=%s, outputNames=%s", toConvert, outNames); boolean meta = step.metadata(); List<BoundingBox> cropRegionMeta = meta ? new ArrayList<>(toConvert.size()) : null; List<Long> origHMeta = meta ? new ArrayList<>(toConvert.size()) : null; List<Long> origWMeta = meta ? new ArrayList<>(toConvert.size()) : null; Data d = Data.empty(); int idx = 0; for(String s : toConvert){ if(!data.has(s)) { throw new ValueNotFoundException("Error in ImageToNDArrayStep: Input field \"" + s + "\" (via ImageToNDArrayStep.keys configuration)" + " does not exist in the provided input Data instance (data keys: " + data.keys() + ")"); } boolean isList = data.type(s) == ValueType.LIST && data.listType(s) == ValueType.IMAGE; if(isList){ List<NDArray> l = new ArrayList<>(); boolean batch = false; switch (step.config().listHandling()){ default: case NONE: throw new IllegalStateException("Error in step " + name() + " of type ImageToNDArrayStep: input field \"" + s + "\" is a List<Image> but ImageToNDArrayConfig.listHandling == ListHandling.NONE.\n" + "For List<Image> --> List<NDArray>, use ListHandling.LIST_OUT\n" + "For List<Image> --> NDArray, use ListHandling.BATCH (where arrays are batched along dimension 0)\n" + "For List<Image>.get(0) --> NDArray, use ListHandling.FIRST"); case FIRST: List<Image> imgList = data.getListImage(s); if(imgList.isEmpty()){ empty(d, outNames.get(idx++)); } else { NDArray array = ImageToNDArray.convert(data.getListImage(s).get(0), step.config()); d.put(outNames.get(idx++), array); } return d; case BATCH: batch = true; //Fall through case LIST_OUT: List<Image> images = data.getListImage(s); for(Image i : images){ NDArray out = ImageToNDArray.convert(i, step.config()); l.add(out); } break; } if(batch){ if(l.size() == 0) { //Return empty NDArray empty(d, outNames.get(idx++)); continue; } else if(l.size() == 1) { d.put(outNames.get(idx++), l.get(0)); } else { //Check that all have the same shape before combining long[] first = l.get(0).shape(); for (NDArray arr : l) { long[] curr = arr.shape(); Preconditions.checkState(Arrays.equals(first, curr), "Error in ImageToNDArrayStep:" + "ImageToNDArrayStep.config.listHandling == BATCH but at least two output NDArrays have different shapes" + "(%s vs. %s). Unable to combine multiple NDArrays according to batch dimension if they have different shapes." + " Setting ImageToNDArrayStep.config.height/width or only passing in all the same size images will solve this problem", first, curr); } //Concatenate. Note that C order along dimension 0 means we can just copy buffers SerializedNDArray nd = l.get(0).getAs(SerializedNDArray.class); int size = nd.getBuffer().capacity(); int newSize = size * l.size(); long[] newShape = l.get(0).shape().clone(); if(!step.config().includeMinibatchDim()){ newShape = new long[]{0, newShape[0], newShape[1], newShape[2]}; } newShape[0] = l.size(); boolean direct = !Loader.getPlatform().startsWith("android"); ByteBuffer outBuff = direct ? ByteBuffer.allocateDirect(newSize).order(ByteOrder.LITTLE_ENDIAN) : ByteBuffer.allocate(newSize).order(ByteOrder.LITTLE_ENDIAN); nd.getBuffer().rewind(); outBuff.put(nd.getBuffer()); for( int i = 1; i < l.size(); i++) { SerializedNDArray ndarr = l.get(i).getAs(SerializedNDArray.class); ndarr.getBuffer().rewind(); outBuff.put(ndarr.getBuffer()); } SerializedNDArray outArr = new SerializedNDArray(l.get(0).type(), newShape, outBuff); d.put(outNames.get(idx++), NDArray.create(outArr)); } } else { d.putListNDArray(outNames.get(idx++), l); } } else { //Single image case Image i = data.getImage(s); if (meta) { Pair<NDArray, BoundingBox> p = ImageToNDArray.convertWithMetadata(i, step.config()); cropRegionMeta.add(p.getSecond()); origHMeta.add((long) i.height()); origWMeta.add((long) i.width()); } else { NDArray array = ImageToNDArray.convert(i, step.config()); d.put(outNames.get(idx++), array); } } } if(step.keepOtherValues()) { for (String s : data.keys()) { if(toConvert.contains(s)) continue; d.copyFrom(s, data); } } if(meta){ Data dMeta = meta ? Data.empty() : null; if(cropRegionMeta.size() == 1){ //If only 1 image is converted: store as single values dMeta.put(ImageToNDArrayStep.META_INNAME_KEY, toConvert.get(0)); dMeta.put(ImageToNDArrayStep.META_OUTNAME_KEY, outNames.get(0)); dMeta.put(ImageToNDArrayStep.META_IMG_H, origHMeta.get(0)); dMeta.put(ImageToNDArrayStep.META_IMG_W, origWMeta.get(0)); dMeta.put(ImageToNDArrayStep.META_CROP_REGION, cropRegionMeta.get(0)); } else { //Multiple images converted: store as multiple values dMeta.putListString(ImageToNDArrayStep.META_INNAME_KEY, toConvert); dMeta.putListString(ImageToNDArrayStep.META_OUTNAME_KEY, outNames); dMeta.putListInt64(ImageToNDArrayStep.META_IMG_H, origHMeta); dMeta.putListInt64(ImageToNDArrayStep.META_IMG_W, origWMeta); dMeta.putListBoundingBox(ImageToNDArrayStep.META_CROP_REGION, cropRegionMeta); } String key = step.metadataKey(); if(key == null) key = ImageToNDArrayStep.DEFAULT_METADATA_KEY; Data m = Data.singleton(key, dMeta); //Note we embed it in a Data instance, to not conflict with other metadata keys d.setMetaData(m); } return d; } private void empty(Data d, String outName) { long[] shape = ImageToNDArray.getOutputShape(step.config()); if(shape.length == 3){ shape = new long[]{0, shape[0], shape[1], shape[2]}; } else { shape[0] = 0; } SerializedNDArray arr = new SerializedNDArray(step.config().dataType(), shape, ByteBuffer.allocate(0)); d.put(outName, NDArray.create(arr)); } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/ndarray/ImageToNDArrayStepRunnerFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.ndarray; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class ImageToNDArrayStepRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep pipelineStep) { return pipelineStep instanceof ImageToNDArrayStep; } @Override public PipelineStepRunner create(PipelineStep pipelineStep) { Preconditions.checkState(canRun(pipelineStep), "Unable to run pipeline step: %s", pipelineStep.getClass()); return new ImageToNDArrayRunner((ImageToNDArrayStep) pipelineStep); } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/point
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/point/convert/RelativeToAbsoluteRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.point.convert; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.data.image.convert.ImageToNDArray; import ai.konduit.serving.data.image.convert.ImageToNDArrayConfig; import ai.konduit.serving.data.image.util.ImageUtils; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.*; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import org.nd4j.common.base.Preconditions; import java.util.ArrayList; import java.util.List; @CanRun(RelativeToAbsoluteStep.class) public class RelativeToAbsoluteRunner implements PipelineStepRunner { private final RelativeToAbsoluteStep step; public RelativeToAbsoluteRunner(RelativeToAbsoluteStep step){ this.step = step; } @Override public void close() { } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { List<String> toConvert = step.toConvert(); if(toConvert == null || toConvert.isEmpty()){ toConvert = new ArrayList<>(); for(String s : data.keys()){ if(data.type(s) == ValueType.POINT || data.type(s) == ValueType.BOUNDING_BOX){ toConvert.add(s); } else if(data.type(s) == ValueType.LIST && (data.listType(s) == ValueType.POINT || data.listType(s) == ValueType.BOUNDING_BOX)){ toConvert.add(s); } } } //Work out image dims int h; int w; if(step.imageName() != null){ Preconditions.checkState(data.has(step.imageName()), "RelativeToAbsoluteStep.imageName=\"%s\" but Data has no field with this name", step.imageName()); Preconditions.checkState(data.type(step.imageName()) == ValueType.IMAGE, "RelativeToAbsoluteStep.imageName=\"%s\" but Data[\"%s\" has type %s", step.imageName(), step.imageName(), data.type(step.imageName())); Image i = data.getImage(step.imageName()); h = i.height(); w = i.width(); } else if(step.imageH() != null && step.imageW() != null){ h = step.imageH(); w = step.imageW(); } else { throw new IllegalStateException("RelativeToAbsoluteStep: Either imageH and imageW must be set, or imageName must be set, " + "in order to determine the image size"); } Data out = data.clone(); for(String s : toConvert){ Preconditions.checkState(data.has(s), "Error in RelativeToAbsoluteStep: data does not have an input of name \"%s\"", s); ValueType vt = data.type(s); boolean list = vt == ValueType.LIST; if(vt != ValueType.POINT && vt != ValueType.BOUNDING_BOX && (!list || !(data.listType(s) == ValueType.POINT || data.listType(s) == ValueType.BOUNDING_BOX))){ String type = list ? "List<" + data.listType(s) + ">" : vt.toString(); throw new IllegalStateException("Error in RelativeToAbsoluteStep: Value for input \"" + s + "\" must be POINT, BOUNDING_BOX, " + "LIST<POINT> or LIST<BOUNDING_BOX> but was " + type); } if(vt == ValueType.POINT){ Point p = data.getPoint(s); p = ImageUtils.accountForCrop(p, w, h, step.imageToNDArrayConfig()); p = p.toAbsolute(w, h); out.put(s, p); } else if(vt == ValueType.BOUNDING_BOX){ BoundingBox b = ImageUtils.accountForCrop(data.getBoundingBox(s), w, h, step.imageToNDArrayConfig()); BoundingBox absolute = b; if(b.cx() < 1.0 && b.cy() < 1.0){ absolute = BoundingBox.create(b.cx()*w, b.cy()*h, b.height()*h, b.width()*w, b.label(), b.probability()); } out.put(s, absolute); } else if(data.listType(s) == ValueType.POINT){ List<Point> lIn = data.getListPoint(s); List<Point> lOut = new ArrayList<>(); for(Point p : lIn){ p = ImageUtils.accountForCrop(p, w, h, step.imageToNDArrayConfig()); p = p.toAbsolute(w, h); lOut.add(p); } out.putListPoint(s, lOut); } else if(data.listType(s) == ValueType.BOUNDING_BOX){ List<BoundingBox> lIn = data.getListBoundingBox(s); List<BoundingBox> lOut = new ArrayList<>(); for(BoundingBox bb : lIn){ BoundingBox b = ImageUtils.accountForCrop(bb, w, h, step.imageToNDArrayConfig()); BoundingBox absolute = b; if(b.cx() < 1.0 && b.cy() < 1.0){ absolute = BoundingBox.create(b.cx()*w, b.cy()*h, b.height()*h, b.width()*w, b.label(), b.probability()); } lOut.add(absolute); } out.putListBoundingBox(s, lOut); } else { throw new RuntimeException(); //Should never happen } } return out; } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/point
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/point/convert/RelativeToAbsoluteStepFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.point.convert; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class RelativeToAbsoluteStepFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep step) { return step instanceof RelativeToAbsoluteStep; } @Override public PipelineStepRunner create(PipelineStep step) { Preconditions.checkState(canRun(step), "Unable to run step: %s", step); return new RelativeToAbsoluteRunner((RelativeToAbsoluteStep) step); } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/point
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/point/draw/DrawPointsRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.point.draw; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.data.image.convert.ImageToNDArray; import ai.konduit.serving.data.image.convert.ImageToNDArrayConfig; import ai.konduit.serving.data.image.util.ColorUtil; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.*; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import lombok.NonNull; import org.bytedeco.opencv.global.opencv_imgproc; import org.bytedeco.opencv.opencv_core.Mat; import org.bytedeco.opencv.opencv_core.Scalar; import org.opencv.core.CvType; import java.util.*; @CanRun(DrawPointsStep.class) public class DrawPointsRunner implements PipelineStepRunner { protected final DrawPointsStep step; protected Map<String, Scalar> labelMap; public DrawPointsRunner(@NonNull DrawPointsStep step) { this.step = step; } @Override public void close() { } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { Data out = Data.empty(); for(String key : data.keys()){ out.copyFrom(key, data); } if(step.points() == null || step.points().size() == 0){ throw new IllegalArgumentException("No point input data fields defined. Nothing to draw."); } // collect points List<Point> points = new LinkedList<>(); for (String pointName : step.points()) { ValueType type = data.type(pointName); if(type == ValueType.POINT){ Point point = data.getPoint(pointName); if(point.dimensions() != 2){ throw new IllegalArgumentException("Point in input "+pointName+" has "+point.dimensions()+" dimensions, but only 2 dimensional points are supported for drawing!"); } points.add(point); }else if(type == ValueType.LIST){ List<Point> pointList = data.getListPoint(pointName); for (Point point : pointList) { if(point.dimensions() != 2){ throw new IllegalArgumentException("Point in input "+pointName+" has "+point.dimensions()+" dimensions, but only 2 dimensional points are supported for drawing!"); } } points.addAll(pointList); }else { throw new IllegalArgumentException("The configured input "+pointName+" is neither a point nor a list of points!"); } } // get reference size and initialize image int width; int height; Mat image; if(step.image() != null){ ValueType type = data.type(step.image()); if(type == ValueType.IMAGE){ Image img = data.getImage(step.image()); width = img.width(); height = img.height(); image = img.getAs(Mat.class); }else{ throw new IllegalArgumentException("The configured reference image input "+step.image()+" is not an Image!"); } }else if(step.width() != null && step.height() != null){ width = step.width(); height = step.height(); image = new Mat(); image.put(Mat.zeros(height, width, CvType.CV_8UC3)); }else{ throw new IllegalArgumentException("You have to provide either a reference image or width AND height!"); } // turn points with relative addressing to absolute addressing List<Point> absPoints = new ArrayList<>(points.size()); for (Point point : points) { absPoints.add(accountForCrop(point, width, height, step.imageToNDArrayConfig())); } // draw points on image with color according to labels int radius = step.radius() == null ? 5 : step.radius(); for (Point point : absPoints) { Scalar color; if(point.label() == null){ if(step.noClassColor() == null){ color = ColorUtil.stringToColor(DrawPointsStep.DEFAULT_NO_POINT_COLOR); } else { color = ColorUtil.stringToColor(step.noClassColor()); } } else { // Initialize colors first if they weren't initialized at all if(labelMap == null) { Map<String, String> classColors = step.classColors(); if(classColors == null){ throw new IllegalArgumentException("A label to color configuration has to be passed!"); } initColors(classColors, classColors.size()); } color = labelMap.get(point.label()); if(color == null){ throw new IllegalArgumentException("No color provided for label " + point.label()); } } opencv_imgproc.circle( image, new org.bytedeco.opencv.opencv_core.Point((int)point.x(), (int)point.y()), radius, color, opencv_imgproc.FILLED, opencv_imgproc.LINE_AA, 0 ); } // return image out.put(step.outputName() == null ? DrawPointsStep.DEFAULT_OUTPUT_NAME : step.outputName(), Image.create(image)); return out; } private Point accountForCrop(Point relPoint, int width, int height, ImageToNDArrayConfig imageToNDArrayConfig) { if(imageToNDArrayConfig == null){ return relPoint.toAbsolute(width, height); } BoundingBox cropRegion = ImageToNDArray.getCropRegion(width, height, imageToNDArrayConfig); double cropWidth = cropRegion.width(); double cropHeight = cropRegion.height(); return Point.create( cropRegion.x1() + cropWidth * relPoint.x(), cropRegion.y1() + cropHeight * relPoint.y(), relPoint.label(), relPoint.probability() ).toAbsolute(width, height); } private void initColors(Map<String, String> classColors, int max) { labelMap = new HashMap<>(classColors.size()); for (Map.Entry<String, String> entry : classColors.entrySet()) { labelMap.put(entry.getKey(), ColorUtil.stringToColor(entry.getValue())); } } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/point
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/point/draw/DrawPointsStepRunnerFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.point.draw; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class DrawPointsStepRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep pipelineStep) { return pipelineStep instanceof DrawPointsStep; } @Override public PipelineStepRunner create(PipelineStep pipelineStep) { Preconditions.checkState(canRun(pipelineStep), "Unable to run step: %s", pipelineStep); return new DrawPointsRunner((DrawPointsStep)pipelineStep); } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/point
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/point/heatmap/DrawHeatmapRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.point.heatmap; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.data.image.convert.ImageToNDArray; import ai.konduit.serving.data.image.convert.ImageToNDArrayConfig; import ai.konduit.serving.data.image.util.ImageUtils; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.*; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.bytedeco.javacpp.DoublePointer; import org.bytedeco.opencv.global.opencv_core; import org.bytedeco.opencv.global.opencv_imgproc; import org.bytedeco.opencv.opencv_core.Mat; import org.bytedeco.opencv.opencv_core.Rect; import org.bytedeco.opencv.opencv_core.Size; import org.opencv.core.CvType; import java.util.LinkedList; import java.util.List; @Slf4j @CanRun(DrawHeatmapStep.class) public class DrawHeatmapRunner implements PipelineStepRunner { protected final DrawHeatmapStep step; protected Mat prev; protected Mat brush; public DrawHeatmapRunner(@NonNull DrawHeatmapStep step) { this.step = step; } @Override public void close() { } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { Data out = Data.empty(); if(step.keepOtherValues()) { for (String key : data.keys()) { out.copyFrom(key, data); } } // get reference size int width; int height; Mat targetImage = null; if(step.image() != null){ ValueType type = data.type(step.image()); if(type == ValueType.IMAGE){ Image image = data.getImage(step.image()); width = image.width(); height = image.height(); targetImage = image.getAs(Mat.class); }else{ throw new IllegalArgumentException("The configured reference image input "+step.image()+" is not an Image!"); } }else if(step.width() != null && step.height() != null){ width = step.width(); height = step.height(); }else{ throw new IllegalArgumentException("You have to provide either a reference image or width AND height!"); } if(prev == null){ prev = new Mat(); prev.put(Mat.zeros(height, width, CvType.CV_64FC1)); } // collect points List<Point> points = new LinkedList<>(); for (String pointName : step.points()) { ValueType type = data.type(pointName); if(type == ValueType.POINT){ Point point = data.getPoint(pointName); if(point.dimensions() != 2){ throw new IllegalArgumentException("Point in input "+pointName+" has "+point.dimensions()+" dimensions, but only 2 dimensional points are supported for drawing!"); } points.add(ImageUtils.accountForCrop(point, width, height, step.imageToNDArrayConfig())); }else if(type == ValueType.LIST){ List<Point> pointList = data.getListPoint(pointName); for (Point point : pointList) { if(point.dimensions() != 2){ throw new IllegalArgumentException("Point in input "+pointName+" has "+point.dimensions()+" dimensions, but only 2 dimensional points are supported for drawing!"); } points.add(ImageUtils.accountForCrop(point, width, height, step.imageToNDArrayConfig())); } }else { throw new IllegalArgumentException("The configured input "+pointName+" is neither a point nor a list of points!"); } } int radius = step.radius() == null ? 15 : step.radius(); int kSize = radius * 8 + 1; if(brush == null){ Size kernelSize = new Size(kSize, kSize); brush = new Mat(); brush.put(Mat.zeros(kSize, kSize, CvType.CV_64FC1)); brush.createIndexer().putDouble(new long[]{kSize / 2, kSize / 2}, 255); opencv_imgproc.GaussianBlur(brush, brush, kernelSize, radius, radius, opencv_core.BORDER_ISOLATED); } Mat mat = new Mat(); mat.put(Mat.zeros(height, width, CvType.CV_64FC1)); for (Point point : points) { int row = (int) point.y(); int col = (int) point.x(); if(row > height || col > width){ log.warn("{} is out of bounds ({}, {})", point, width, height); }else { int offsetRow = row - kSize / 2; int offsetCol = col - kSize / 2; int brushWidth = kSize; int brushHeight = kSize; int brushOffsetRow = 0; int brushOffsetCol = 0; if(offsetRow < 0){ brushHeight += offsetRow; brushOffsetRow -= offsetRow; offsetRow = 0; } if(offsetCol < 0){ brushWidth += offsetCol; brushOffsetCol -= offsetCol; offsetCol = 0; } if(offsetRow + brushHeight > mat.arrayHeight()){ brushHeight = mat.arrayHeight() - offsetRow; } if(offsetCol + brushWidth > mat.arrayWidth()){ brushWidth = mat.arrayWidth() - offsetCol; } Mat region = mat.apply(new Rect(offsetCol, offsetRow, brushWidth, brushHeight)); Mat brushRegion = brush.apply(new Rect(brushOffsetCol, brushOffsetRow, brushWidth, brushHeight)); opencv_core.add(region, brushRegion, region); } } opencv_core.addWeighted(prev, step.fadingFactor() == null ? 0.9 : step.fadingFactor(), mat, 1.0, 0, mat); prev.close(); prev = mat; DoublePointer maxVal = new DoublePointer(1); opencv_core.minMaxLoc(mat, null, maxVal, null, null, null); Mat scaledOut = new Mat(); mat.convertTo(scaledOut, CvType.CV_8UC1, 255/maxVal.get(), 0); maxVal.close(); Mat image = new Mat(); opencv_imgproc.applyColorMap(scaledOut, image, opencv_imgproc.COLORMAP_TURBO); // return image Image outputImage; if(targetImage == null){ outputImage = Image.create(image); }else{ opencv_core.addWeighted(targetImage, 1.0, image, step.opacity() == null ? 0.5 : step.opacity(), 0, image); outputImage = Image.create(image); } out.put(step.outputName() == null ? DrawHeatmapStep.DEFAULT_OUTPUT_NAME : step.outputName(), outputImage); return out; } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/point
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/point/heatmap/DrawHeatmapStepRunnerFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.point.heatmap; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class DrawHeatmapStepRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep pipelineStep) { return pipelineStep instanceof DrawHeatmapStep; } @Override public PipelineStepRunner create(PipelineStep pipelineStep) { Preconditions.checkState(canRun(pipelineStep), "Unable to run step: %s", pipelineStep); return new DrawHeatmapRunner((DrawHeatmapStep)pipelineStep); } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/point/perspective
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/point/perspective/convert/PerspectiveTransformRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.point.perspective.convert; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.*; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import lombok.val; import org.bytedeco.javacpp.indexer.DoubleIndexer; import org.bytedeco.javacpp.indexer.DoubleRawIndexer; import org.bytedeco.javacpp.indexer.FloatIndexer; import org.bytedeco.opencv.global.opencv_core; import org.bytedeco.opencv.global.opencv_imgproc; import org.bytedeco.opencv.opencv_core.Mat; import org.bytedeco.opencv.opencv_core.Size; import org.opencv.core.CvType; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.stream.Collectors; import java.util.stream.DoubleStream; @Slf4j @CanRun(PerspectiveTransformStep.class) public class PerspectiveTransformRunner implements PipelineStepRunner { protected final PerspectiveTransformStep step; public PerspectiveTransformRunner(@NonNull PerspectiveTransformStep step){ this.step = step; } @Override public void close() { } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { // Step 0: Get source and target points List<Point> source = null; if(step.sourcePoints() != null && step.sourcePointsName() != null){ throw new IllegalStateException("You must not define both sourcePoints and sourcePointsName simultaneously on PerspectiveTransformStep!"); } if(step.sourcePoints() == null && step.sourcePointsName() == null){ throw new IllegalStateException("You have to define either sourcePoints or sourcePointsName on PerspectiveTransformStep!"); } if(step.sourcePoints() != null){ source = step.sourcePoints(); }else{ ValueType sourceType = data.type(step.sourcePointsName()); if(sourceType == ValueType.LIST && data.listType(step.sourcePointsName()) == ValueType.POINT){ List<Point> points = data.getListPoint(step.sourcePointsName()); if(points.size() != 4){ throw new IllegalArgumentException("field "+step.sourcePointsName()+" for source points in PerspectiveTransformStep does not contain exactly 4 points (found: "+points.size()+")"); } source = points; } } List<Point> target = null; if(step.targetPoints() != null && step.targetPointsName() != null){ throw new IllegalStateException("You must not define both targetPoints and targetPointsName simultaneously on PerspectiveTransformStep!"); } if(step.targetPoints() == null && step.targetPointsName() == null){ target = calculateTargetPoints(source); } else if( step.targetPoints() != null){ target = step.targetPoints(); }else{ ValueType targetType = data.type(step.targetPointsName()); if(targetType == ValueType.LIST && data.listType(step.targetPointsName()) == ValueType.POINT){ List<Point> points = data.getListPoint(step.targetPointsName()); if(points.size() != 4){ throw new IllegalArgumentException("field "+step.targetPointsName()+" for target points in PerspectiveTransformStep does not contain exactly 4 points (found: "+points.size()+")"); } source = points; } } int refWidth = -1; int refHeight = -1; if(step.referenceImage() != null){ ValueType type = data.type(step.referenceImage()); Image refImg; if(type == ValueType.IMAGE){ refImg = data.getImage(step.referenceImage()); }else if(type == ValueType.LIST && data.listType(step.referenceImage()) == ValueType.IMAGE){ List<Image> images = data.getListImage(step.referenceImage()); if(images.size() == 0){ throw new IllegalArgumentException("fild "+step.referenceImage()+" is an empty list"); } refImg = images.get(0); }else{ throw new IllegalArgumentException("field "+step.referenceImage()+" is neither an image nor a list of images"); } refWidth = refImg.width(); refHeight = refImg.height(); }else if(step.referenceWidth() != null && step.referenceHeight() != null){ refWidth = step.referenceWidth(); refHeight = step.referenceHeight(); } // Step 1: create transformation matrix Mat sourceMat = pointsToMat(source); Mat targetMat = pointsToMat(target); Mat transMat = getPerspectiveTransform(sourceMat, targetMat, refWidth, refHeight); // Step 2: find fields to apply transformation to List<String> fields = step.inputNames(); if(fields == null){ fields = new LinkedList<>(); for (String key : data.keys()) { // Skip points that are used to define the transform if(key.equals(step.targetPointsName()) || key.equals(step.sourcePointsName())){ continue; } ValueType keyType = data.type(key); if(keyType == ValueType.LIST){ keyType = data.listType(key); } if(keyType == ValueType.IMAGE || keyType == ValueType.BOUNDING_BOX || keyType == ValueType.POINT){ fields.add(key); } } } if(fields.size() == 0){ throw new IllegalStateException("No fields found where PerspectiveTransformRunner could be applied."); } List<String> outNames = step.outputNames(); if(outNames == null || outNames.size() == 0){ outNames = fields; }else if(outNames.size() != fields.size()){ throw new IllegalStateException("You must provide only as many outputNames as there are fields to be transformed! outputNames.size = "+step.outputNames().size()+" fields.size = "+fields.size()); } // Step 3: apply transformation matrix to fields appropriately val out = Data.empty(); if(step.keepOtherFields()){ for(String key : data.keys()){ out.copyFrom(key, data); } } int rW = refWidth; int rH = refHeight; for (int i = 0; i < fields.size(); i++) { String key = fields.get(i); ValueType keyType = data.type(key); String outKey = outNames.get(i); if(keyType == ValueType.LIST){ keyType = data.listType(key); switch (keyType){ case POINT: out.putListPoint( outKey, data.getListPoint(key).stream().map(it -> transform(transMat, it, rW, rH)).collect(Collectors.toList()) ); break; case IMAGE: out.putListImage( outKey, data.getListImage(key).stream().map(it -> transform(transMat, it)).collect(Collectors.toList()) ); break; case BOUNDING_BOX: out.putListBoundingBox( outKey, data.getListBoundingBox(key).stream().map(it -> transform(transMat, it, rW, rH)).collect(Collectors.toList()) ); break; default: throw new IllegalStateException("Field "+key+" with data type "+keyType+" is not supported for perspective transform!"); } }else{ switch (keyType){ case POINT: out.put(outKey, transform(transMat, data.getPoint(key), rW, rH)); break; case IMAGE: out.put(outKey, transform(transMat, data.getImage(key))); break; case BOUNDING_BOX: out.put(outKey, transform(transMat, data.getBoundingBox(key), rW, rH)); break; default: throw new IllegalStateException("Field "+key+" with data type "+keyType+" is not supported for perspective transform!"); } } } return out; } private Point transform(Mat transform, Point it, int refW, int refH) { it = it.toAbsolute(refW, refH); Mat dst = new Mat(); Mat src = new Mat(1, 1, CvType.CV_64FC(it.dimensions())); DoubleIndexer idx = src.createIndexer(); DoubleRawIndexer doubleRawIndexer = (DoubleRawIndexer) idx; for (int i = 0; i < it.dimensions(); i++) { doubleRawIndexer.putRaw( i, it.get(i)); } opencv_core.perspectiveTransform(src, dst, transform); idx = dst.createIndexer(); double[] coords = new double[it.dimensions()]; idx.get(0L, coords); return Point.create(coords, it.label(), it.probability()); } private BoundingBox transform(Mat transform, BoundingBox it, int refW, int refH) { Point transformedCenter = transform(transform, Point.create(it.cx(), it.cy()), refW, refH); return BoundingBox.create(transformedCenter.x(), transformedCenter.y(), it.width(), it.height(), it.label(), it.probability()); } private Image transform(Mat transform, Image it) { Mat dst = new Mat(); Mat src = it.getAs(Mat.class); Size outputSize = calculateOutputSize(transform, it.width(), it.height()); opencv_imgproc.warpPerspective(src, dst, transform, outputSize); return Image.create(dst); } private Mat getPerspectiveTransform(Mat sourceMat, Mat targetMat, int refWidth, int refHeight) { Mat initialTransform = opencv_imgproc.getPerspectiveTransform(sourceMat, targetMat); if(refWidth == -1 || refHeight == -1) { return initialTransform; } // Calculate where edges will end up in this case double[] extremes = calculateExtremes(initialTransform, refWidth, refHeight); FloatIndexer tIdx = targetMat.createIndexer(); long rows = tIdx.size(0); for (long i = 0; i < rows; i++) { tIdx.put(i, 0, (float)(tIdx.get(i, 0) - extremes[0])); tIdx.put(i, 1, (float)(tIdx.get(i, 1) - extremes[1])); } return opencv_imgproc.getPerspectiveTransform(sourceMat, targetMat); } private double[] calculateExtremes(Mat transform, int width, int height) { Mat src = new Mat(4, 1, CvType.CV_64FC2); DoubleIndexer idx = src.createIndexer(); DoubleRawIndexer idxRaw = src.createIndexer(); // topLeft idxRaw.putRaw(0, 0); idxRaw.putRaw(1, 0); // topRight idxRaw.putRaw(2, width); idxRaw.putRaw(3, 0); // bottomLeft idxRaw.putRaw(4, 0); idxRaw.putRaw(5, height); // bottomRight idxRaw.putRaw(6, width); idxRaw.putRaw(7, height); Mat dst = new Mat(); opencv_core.perspectiveTransform(src, dst, transform); idx = dst.createIndexer(); idxRaw = (DoubleRawIndexer) idx; double[] xValues = new double[] { idxRaw.getRaw(0), idxRaw.getRaw(2), idxRaw.getRaw(4), idxRaw.getRaw(6) }; double[] yValues = new double[]{ idxRaw.getRaw(1), idxRaw.getRaw(3), idxRaw.getRaw(5), idxRaw.getRaw(7) }; double minX = DoubleStream.of(xValues).min().getAsDouble(); double maxX = DoubleStream.of(xValues).max().getAsDouble(); double minY = DoubleStream.of(yValues).min().getAsDouble(); double maxY = DoubleStream.of(yValues).max().getAsDouble(); return new double[]{minX, minY, maxX, maxY}; } private Size calculateOutputSize(Mat transform, int width, int height) { double[] extremes = calculateExtremes(transform, width, height); double minX = extremes[0]; double minY = extremes[1]; double maxX = extremes[2]; double maxY = extremes[3]; int outputWidth = (int) Math.round(maxX - minX); int outputHeight = (int) Math.round(maxY - minY); if(outputWidth > 4096 || outputHeight > 4096){ log.warn("Selected transform would create a too large output image ({}, {}})", outputWidth, outputHeight); outputWidth = Math.min(outputWidth, 4096); outputHeight = Math.min(outputHeight, 4096); } return new Size(outputWidth, outputHeight); } private List<Point> calculateTargetPoints(List<Point> source) { Point topLeft = source.get(0); Point topRight = source.get(1); Point bottomLeft = source.get(2); Point bottomRight = source.get(3); double width = Math.max( Math.sqrt(Math.pow(topLeft.x() - bottomLeft.x(), 2) + Math.pow(topLeft.y() - bottomLeft.y(), 2)), Math.sqrt(Math.pow(topRight.x() - bottomRight.x(), 2) + Math.pow(topRight.y() - bottomRight.y(), 2)) ); double height = Math.max( Math.sqrt(Math.pow(topLeft.x() - topRight.x(), 2) + Math.pow(topLeft.y() - topRight.y(), 2)), Math.sqrt(Math.pow(bottomLeft.x() - bottomRight.x(), 2) + Math.pow(bottomLeft.y() - bottomRight.y(), 2)) ); double originX = topLeft.x() <= width / 2 ? topLeft.x() : width - topLeft.x(); double originY = topLeft.y() <= height / 2 ? topLeft.y() : height - topLeft.y(); return Arrays.asList( Point.create(originX, originY), Point.create(originX + width, originY), Point.create(originX, originY + height), Point.create(originX + width, originY + height) ); } private Mat pointsToMat(List<Point> points){ int rows = points.size(); int cols = points.get(1).dimensions(); Mat mat = new Mat(rows, cols, CvType.CV_32F); FloatIndexer idx = mat.createIndexer(); for (int i = 0; i < rows; i++) { for (int j = 0; j < cols; j++) { idx.put(i, j, (float)points.get(i).get(j)); } } return mat; } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/point/perspective
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/point/perspective/convert/PerspectiveTransformStepRunnerFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.point.perspective.convert; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class PerspectiveTransformStepRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep pipelineStep) { return pipelineStep instanceof PerspectiveTransformStep; } @Override public PipelineStepRunner create(PipelineStep pipelineStep) { Preconditions.checkState(canRun(pipelineStep), "Unable to run step: %s", pipelineStep); return new PerspectiveTransformRunner((PerspectiveTransformStep) pipelineStep); } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/resize/ImageResizeFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.resize; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class ImageResizeFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep pipelineStep) { return pipelineStep instanceof ImageResizeStep; } @Override public PipelineStepRunner create(PipelineStep pipelineStep) { Preconditions.checkState(canRun(pipelineStep), "Unable to run step: %s", pipelineStep); return new ImageResizeRunner((ImageResizeStep) pipelineStep); } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/resize/ImageResizeRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.resize; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.data.image.convert.ImageToNDArray; import ai.konduit.serving.data.image.convert.config.AspectRatioHandling; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.BoundingBox; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.Image; import ai.konduit.serving.pipeline.api.data.ValueType; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import org.bytedeco.javacpp.Loader; import org.bytedeco.javacpp.indexer.UByteIndexer; import org.bytedeco.opencv.opencv_core.Mat; import org.bytedeco.opencv.opencv_core.Rect; import org.bytedeco.opencv.opencv_core.Scalar; import org.bytedeco.opencv.opencv_core.Size; import org.nd4j.common.base.Preconditions; import org.nd4j.common.primitives.Pair; import java.util.ArrayList; import java.util.List; @CanRun(ImageResizeStep.class) public class ImageResizeRunner implements PipelineStepRunner { private final ImageResizeStep step; public ImageResizeRunner(ImageResizeStep step){ this.step = step; Preconditions.checkState(step.height() != null || step.width() != null, "Error in ImageResizeStep: " + "at least one of height or width (for output) must be set"); } @Override public void close() { //No op } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { //First: get names List<String> names = step.inputNames(); if(names == null || names.isEmpty()){ names = new ArrayList<>(); for(String s : data.keys()){ if(data.type(s) == ValueType.IMAGE){ names.add(s); } else if(data.type(s) == ValueType.LIST && data.listType(s) == ValueType.IMAGE){ names.add(s); } } if(names.isEmpty()){ throw new IllegalStateException("Error in ImageResizeStep execution: No configuration was provided for " + "inputNames and input Data does not have any Image or List<Image> fields. Data keys: " + data.keys()); } } else { //Check for(String s : names){ if(!data.has(s)){ throw new IllegalStateException("Input image name \"" + s + "\" (via ImageResizeStep.inputNames config) " + "is not present in the input Data instance. Data keys: " + data.keys()); } if(!(data.type(s) == ValueType.IMAGE || (data.type(s) == ValueType.LIST && data.listType(s) == ValueType.IMAGE))){ String t = data.type(s) == ValueType.LIST ? "List<" + data.listType(s) + ">" : data.type(s).name(); throw new IllegalStateException("Input image name \"" + s + "\" (via ImageResizeStep.inputNames config) " + "is present but is not an Image or List<Image> type. Data[\"" + s + "\"].type == " + t); } } } Data out = data.clone(); for(String s : names){ if(data.type(s) == ValueType.IMAGE){ Image i = data.getImage(s); out.put(s, resize(i)); } else { //Must be list List<Image> list = data.getListImage(s); List<Image> newList = new ArrayList<>(list.size()); for(Image i : list){ newList.add(resize(i)); } out.putListImage(s, newList); } } return out; } protected Image resize(Image in){ if((step.height() == null) != (step.width() == null)){ //Only one is specified - no need to worry about aspect ratio int h, w; double ar = in.width() / (double)in.height(); if(step.height() == null){ w = step.width(); h = (int)Math.round(w / ar); } else { h = step.height(); w = (int)Math.round(ar * h); } Mat m = in.getAs(Mat.class); Mat resized = new Mat(); org.bytedeco.opencv.global.opencv_imgproc.resize(m, resized, new Size(w, h)); return Image.create(resized); } else { //Both h/w are specified - need to check and maybe handle aspect ratio if(in.height() == step.height() && in.width() == step.width()){ return in; } double arCurrent = in.width() / (double)in.height(); double arOut = step.width() / (double)step.height(); Mat m = in.getAs(Mat.class); if(arCurrent == arOut || step.aspectRatioHandling() == AspectRatioHandling.STRETCH || step.aspectRatioHandling() == null){ //Aspect ratio OK - or just stretching Mat resized = new Mat(); org.bytedeco.opencv.global.opencv_imgproc.resize(m, resized, new Size(step.width(), step.height())); return Image.create(resized); } else { if(step.aspectRatioHandling() == AspectRatioHandling.CENTER_CROP){ Pair<Mat, BoundingBox> p = ImageToNDArray.centerCrop(m, step.height(), step.width(), false); Mat crop = p.getFirst(); if(crop.rows() == step.height() && crop.cols() == step.width()){ return Image.create(crop); } Mat resized = new Mat(); org.bytedeco.opencv.global.opencv_imgproc.resize(crop, resized, new Size(step.width(), step.height())); return Image.create(resized); } else if(step.aspectRatioHandling() == AspectRatioHandling.PAD){ if(arCurrent > arOut){ //Pad height int newH = (int)Math.round(in.width() / arOut); Mat padded = new Mat(newH, in.width(), m.type()); UByteIndexer u = padded.createIndexer(!Loader.getPlatform().startsWith("android")); u.pointer().zero(); int delta = newH - in.height(); Mat sub = padded.apply(new Rect(0,delta/2,in.width(),in.height())); m.copyTo(sub); Mat resized = new Mat(); org.bytedeco.opencv.global.opencv_imgproc.resize(padded, resized, new Size(step.width(), step.height())); return Image.create(resized); } else { //Pad width int newW = (int)Math.round(in.height() * arOut); Mat padded = new Mat(in.height(), newW, m.type()); UByteIndexer u = padded.createIndexer(!Loader.getPlatform().startsWith("android")); u.pointer().zero(); int delta = newW - in.width(); Mat sub = padded.apply(new Rect(delta/2,0, in.width(), in.height())); m.copyTo(sub); Mat resized = new Mat(); org.bytedeco.opencv.global.opencv_imgproc.resize(padded, resized, new Size(step.height(), step.width())); return Image.create(resized); } } else { throw new IllegalStateException("Unknown or not supported aspect ratio handling: " + step.aspectRatioHandling()); } } } } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/segmentation
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/segmentation/index/DrawSegmentationRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.segmentation.index; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.data.image.convert.ImageToNDArray; import ai.konduit.serving.data.image.util.ColorUtil; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.*; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.impl.data.ndarray.SerializedNDArray; import lombok.NonNull; import org.bytedeco.javacpp.indexer.UByteIndexer; import org.bytedeco.javacpp.indexer.UByteRawIndexer; import org.bytedeco.opencv.opencv_core.Mat; import org.bytedeco.opencv.opencv_core.Rect; import org.bytedeco.opencv.opencv_core.Scalar; import org.bytedeco.opencv.opencv_core.Size; import org.nd4j.common.base.Preconditions; import org.opencv.core.CvType; import java.nio.IntBuffer; import java.nio.LongBuffer; import java.util.Arrays; import java.util.List; import java.util.Random; @CanRun(DrawSegmentationStep.class) public class DrawSegmentationRunner implements PipelineStepRunner { protected final DrawSegmentationStep step; protected int[] colorsB; protected int[] colorsG; protected int[] colorsR; public DrawSegmentationRunner(@NonNull DrawSegmentationStep step) { this.step = step; } @Override public void close() { } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { if(colorsB == null) { List<String> classColors = step.classColors(); initColors(classColors, 32); } NDArray segmentArr = data.getNDArray(step.segmentArray()); long[] shape = segmentArr.shape(); Preconditions.checkState(shape.length == 3 && shape[0] == 1, "Expected segment indices array with shape [1, height, width]," + " got array with shape %s", shape); boolean drawingOnImage; Mat drawOn; String imgName = step.image(); Mat backgroundMask = null; boolean resizeRequired = false; if (imgName == null) { //Just drawing the mask drawOn = new Mat((int) shape[1], (int) shape[2], CvType.CV_8UC3); //8 bits per chanel RGB drawingOnImage = false; } else { //Drawing the mask on the image Image i = data.getImage(imgName); int iH = i.height(); int iW = i.width(); double arImg = iW / (double)iH; double arSegment = shape[2] / (double)shape[1]; if(iH != shape[1] && iW != shape[2]) { resizeRequired = true; if(arImg != arSegment){ Preconditions.checkState(step.imageToNDArrayConfig() != null, "Image and segment indices array dimensions do not match in terms" + " of aspect ratio, and no ImageToNDArrayConfig was provided. Expected segment indices array with shape [1, height, width] - got array with shape %s and image with h=%s, w=%s", shape, iH, iW); } drawOn = new Mat((int) shape[1], (int) shape[2], CvType.CV_8UC3); //8 bits per chanel RGB drawingOnImage = false; } else { drawOn = new Mat(); i.getAs(Mat.class).clone().convertTo(drawOn, CvType.CV_8UC3); drawingOnImage = true; } } SerializedNDArray nd = segmentArr.getAs(SerializedNDArray.class); long[] maskShape = nd.getShape(); int h = (int) maskShape[1]; int w = (int) maskShape[2]; //TODO ideally we'd use OpenCV's bitwise methods to do this, but it seems like JavaCV doesn't have those... UByteIndexer idx = drawOn.createIndexer(); //HWC BGR format UByteRawIndexer uByteRawIndexer = (UByteRawIndexer) idx; IntGetter ig = null; if (nd.getType() == NDArrayType.INT32) { IntBuffer ib = nd.getBuffer().asIntBuffer(); ig = ib::get; } else if (nd.getType() == NDArrayType.INT64) { nd.getBuffer().rewind(); LongBuffer lb = nd.getBuffer().asLongBuffer(); ig = () -> (int)lb.get(); } else { throw new RuntimeException(); } final boolean skipBackgroundClass = step.backgroundClass() != null; final int backgroundClass = skipBackgroundClass ? step.backgroundClass() : -1; if(skipBackgroundClass && !drawingOnImage) { backgroundMask = new Mat(drawOn.rows(), drawOn.cols(), CvType.CV_8UC1); //8 bit grayscale } if(drawingOnImage){ double opacity; if(step.opacity() == null) { opacity = DrawSegmentationStep.DEFAULT_OPACITY; } else { opacity = step.opacity(); Preconditions.checkState(opacity >= 0.0 && opacity <= 1.0, "Opacity value (if set) must be between 0.0 and 1.0, got %s", opacity); } double o2 = 1.0 - opacity; for (int y = 0; y < h; y++) { for (int x = 0; x < w; x++) { int classIdx = ig.get(); if(classIdx >= colorsB.length) initColors(step.classColors(), colorsB.length + 32); long idxB = (3 * w * y) + (3 * x); int b,g,r; if(skipBackgroundClass && classIdx == backgroundClass){ b = uByteRawIndexer.getRaw(idxB); g = uByteRawIndexer.getRaw(idxB+1); r = uByteRawIndexer.getRaw(idxB+2); } else { b = (int) (opacity * colorsB[classIdx] + o2 * uByteRawIndexer.getRaw(idxB)); g = (int) (opacity * colorsG[classIdx] + o2 * uByteRawIndexer.getRaw(idxB+1)); r = (int) (opacity * colorsR[classIdx] + o2 * uByteRawIndexer.getRaw(idxB+2)); } uByteRawIndexer.putRaw(idxB, b); uByteRawIndexer.putRaw(idxB + 1, g); uByteRawIndexer.putRaw(idxB + 2, r); } } } else { UByteIndexer bMaskIdx = backgroundMask == null ? null : backgroundMask.createIndexer(); UByteRawIndexer uByteRawIndexer2 = (UByteRawIndexer) bMaskIdx; for (int y = 0; y < h; y++) { for (int x = 0; x < w; x++) { int classIdx = ig.get(); if(classIdx >= colorsB.length) initColors(step.classColors(), colorsB.length + 32); long idxB = (3 * w * y) + (3 * x); uByteRawIndexer.putRaw(idxB,colorsB[classIdx]); uByteRawIndexer.putRaw(idxB + 1, colorsG[classIdx]); uByteRawIndexer.putRaw(idxB + 2, colorsR[classIdx]); if(backgroundMask != null) { long idxMask = w * y + x; uByteRawIndexer2.putRaw(idxMask, classIdx == backgroundClass ? 0 : 1); } } } } if(resizeRequired){ Image im = data.getImage(imgName); BoundingBox bb = ImageToNDArray.getCropRegion(im, step.imageToNDArrayConfig()); int oH = (int) (bb.height() * im.height()); int oW = (int) (bb.width() * im.width()); int x1 = (int) (bb.x1() * im.width()); int y1 = (int) (bb.y1() * im.height()); Mat resized = new Mat(); org.bytedeco.opencv.global.opencv_imgproc.resize(drawOn, resized, new Size(oW, oH)); //Now that we've resized - need to apply to the original image... //Note that to use accumulateWeighted we need to use a float type - method doesn't support integer types Mat resizedFloat = new Mat(); resized.convertTo(resizedFloat, CvType.CV_32FC3); Mat asFloat = new Mat(); im.getAs(Mat.class).convertTo(asFloat, CvType.CV_32FC3); Mat subset = asFloat.apply(new Rect(x1, y1, oW, oH)); double opacity = step.opacity(); if(backgroundMask == null){ org.bytedeco.opencv.global.opencv_imgproc.accumulateWeighted(resized, subset, opacity); } else { Mat maskResized = new Mat(); org.bytedeco.opencv.global.opencv_imgproc.resize(backgroundMask, maskResized, new Size(oW, oH)); org.bytedeco.opencv.global.opencv_imgproc.accumulateWeighted(resized, subset, opacity, maskResized); } Mat out = new Mat(); asFloat.convertTo(out, CvType.CV_8UC3); drawOn = out; } String outputName = step.outputName(); if(outputName == null) outputName = DrawSegmentationStep.DEFAULT_OUTPUT_NAME; return Data.singleton(outputName, Image.create(drawOn)); } private void initColors(List<String> classColors, int max){ if (colorsB == null && classColors != null) { colorsB = new int[classColors.size()]; colorsG = new int[classColors.size()]; colorsR = new int[classColors.size()]; for (int i = 0; i < colorsB.length; i++) { Scalar c = ColorUtil.stringToColor(classColors.get(i)); colorsB[i] = (int) c.blue(); colorsG[i] = (int) c.green(); colorsR[i] = (int) c.red(); } } if(colorsB == null || colorsB.length < max){ //Generate some random colors, because we don't have any labels, or enough labels int start; if(colorsB == null){ colorsB = new int[max]; colorsG = new int[max]; colorsR = new int[max]; start = 0; } else { start = colorsB.length; colorsB = Arrays.copyOf(colorsB, max); colorsG = Arrays.copyOf(colorsG, max); colorsR = Arrays.copyOf(colorsR, max); } Random rng = new Random(12345); if(start > 0){ //Hack to advance RNG seed, so we get repeatability for( int i=0; i<start; i++){ rng.nextInt(255); rng.nextInt(255); rng.nextInt(255); } } for( int i=start; i < max; i++ ){ Scalar s = ColorUtil.randomColor(rng); colorsB[i] = (int) s.blue(); colorsG[i] = (int) s.green(); colorsR[i] = (int) s.red(); } } } private interface IntGetter { int get(); } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/segmentation
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/segmentation/index/DrawSegmentationStepRunnerFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.segmentation.index; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class DrawSegmentationStepRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep pipelineStep) { return pipelineStep instanceof DrawSegmentationStep; } @Override public PipelineStepRunner create(PipelineStep pipelineStep) { Preconditions.checkState(canRun(pipelineStep), "Unable to run step: %s", pipelineStep); return new DrawSegmentationRunner((DrawSegmentationStep)pipelineStep); } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/show/ShowImageRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.show; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.Image; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.data.ValueType; import ai.konduit.serving.pipeline.util.DataUtils; import org.bytedeco.javacv.CanvasFrame; import org.bytedeco.javacv.Frame; import org.nd4j.common.base.Preconditions; import java.util.ArrayList; import java.util.Collections; import java.util.List; @CanRun(ShowImageStep.class) public class ShowImageRunner implements PipelineStepRunner { private static final int MIN_HEIGHT = 64; private static final int MIN_WIDTH = 64; private ShowImageStep step; private boolean initialized; private List<CanvasFrame> canvas; public ShowImageRunner(ShowImageStep step){ this.step = step; } @Override public synchronized void close() { initialized = false; canvas = null; } @Override public PipelineStep getPipelineStep() { return step; } @Override public synchronized Data exec(Context ctx, Data data) { String name = step.imageName(); if(name == null) { String errMultipleKeys = "Image field name was not provided and could not be inferred: multiple image fields exist: %s and %s"; String errNoKeys = "Image field name was not provided and could not be inferred: no image fields exist"; name = DataUtils.inferField(data, ValueType.IMAGE, true, errMultipleKeys, errNoKeys); } boolean allowMultiple = step.allowMultiple(); boolean isSingle = data.has(name) && data.type(name) == ValueType.IMAGE; boolean validList = data.has(name) && data.type(name) == ValueType.LIST && data.listType(name) == ValueType.IMAGE && (allowMultiple || data.getListImage(name).size() == 1); if(allowMultiple){ Preconditions.checkState(isSingle || validList, "Data does not have Image value or List<Image> for name \"%s\" - data keys = %s", name, data.keys()); } else { Preconditions.checkState(isSingle || validList, "Data does not have image value (or size 1 List<Image>, given ShowImagePipeline) for name \"%s\" - data keys = %s", name, data.keys()); } List<Image> l; if(isSingle) l = Collections.singletonList(data.getImage(name)); else l = data.getListImage(name); if(!initialized) init(); if(isSingle){ Image i = l.get(0); Frame f = i.getAs(Frame.class); canvas.get(0).showImage(f); if(step.width() == null || step.height() == null || step.width() == 0 || step.height() == 0){ canvas.get(0).setCanvasSize(Math.max(MIN_WIDTH, i.width()), Math.max(MIN_HEIGHT, i.height())); } } else { if(!initialized) init(); for( int i=0; i<l.size(); i++ ){ Image img = l.get(i); Frame f = img.getAs(Frame.class); if(canvas.size() <= i) canvas.add(newFrame(step.displayName() + "_" + i)); CanvasFrame cf = canvas.get(i); cf.showImage(f); if(step.width() == null || step.height() == null || step.width() == 0 || step.height() == 0){ cf.setCanvasSize(Math.max(MIN_WIDTH, img.width()), Math.max(MIN_HEIGHT, img.height())); } } } return data; } protected synchronized void init() { canvas = new ArrayList<>(); canvas.add(newFrame(step.displayName())); initialized = true; } protected CanvasFrame newFrame(String name){ CanvasFrame cf = new CanvasFrame(name); int w = (step.width() == null || step.width() == 0) ? MIN_WIDTH : step.width(); int h = (step.height() == null || step.height() == 0) ? MIN_HEIGHT : step.height(); cf.setCanvasSize(w, h); return cf; } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/step/show/ShowImageStepRunnerFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.show; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class ShowImageStepRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep pipelineStep) { return pipelineStep instanceof ShowImageStep; } @Override public PipelineStepRunner create(PipelineStep pipelineStep) { Preconditions.checkState(canRun(pipelineStep), "Unable to run pipeline of type %s", pipelineStep); return new ShowImageRunner((ShowImageStep) pipelineStep); } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/util/ColorUtil.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.util; import org.bytedeco.opencv.opencv_core.Scalar; import org.nd4j.common.base.Preconditions; import java.awt.*; import java.util.Random; public class ColorUtil { public static final String INVALID_COLOR = "Invalid color: Must be in one of the following formats: hex/HTML - #788E87, " + "RGB - rgb(128,0,255), or a HTML color name such as \"green\" (https://en.wikipedia.org/wiki/Web_colors#HTML_color_names) - got \"%s\""; private ColorUtil() { } /** * Convert a color to a Scalar in one of 3 formats:<br> * hex/HTML - {@code #788E87}<br> * RGB - "rgb(128,0,255)"<br> * A HTML color name such as "green" (https://en.wikipedia.org/wiki/Web_colors#HTML_color_names)<br> * @param s Color name * @return Color */ public static Scalar stringToColor(String s) { if (s.startsWith("#")) { String hex = s.substring(1); Color c = Color.decode(hex); return org.bytedeco.opencv.helper.opencv_core.RGB(c.getRed(), c.getGreen(), c.getBlue()); } else if (s.toLowerCase().startsWith("rgb(") && s.endsWith(")")) { String sub = s.substring(4, s.length() - 1); Preconditions.checkState(sub.matches("\\d+,\\d+,\\d+"), INVALID_COLOR, s); String[] split = sub.split(","); int r = Integer.parseInt(split[0]); int g = Integer.parseInt(split[1]); int b = Integer.parseInt(split[2]); return org.bytedeco.opencv.helper.opencv_core.RGB(r, g, b); } else { Scalar sc = getColorHTML(s); if (sc == null) { throw new UnsupportedOperationException(String.format(INVALID_COLOR, s)); } return sc; } } /** * Get a color from the 16 HTML color names (not case sensitive): * while, silver, gray, black, red, maroon, yellow, olive, lime, green, aqua, teal, blue, navy, fuscia, purple. * Returns null for any not found colors * @return The color name as a Scalar, or null if not found */ public static Scalar getColorHTML(String name) { switch (name.toLowerCase()) { case "white": return org.bytedeco.opencv.helper.opencv_core.RGB(255, 255, 255); case "silver": return org.bytedeco.opencv.helper.opencv_core.RGB(0xC0, 0xC0, 0xC0); case "gray": return org.bytedeco.opencv.helper.opencv_core.RGB(0x80, 0x80, 0x80); case "black": return org.bytedeco.opencv.helper.opencv_core.RGB(0x00, 0x00, 0x00); case "red": return org.bytedeco.opencv.helper.opencv_core.RGB(0xFF, 0x00, 0x00); case "maroon": return org.bytedeco.opencv.helper.opencv_core.RGB(0x80, 0x00, 0x00); case "yellow": return org.bytedeco.opencv.helper.opencv_core.RGB(0xFF, 0xFF, 0x00); case "olive": return org.bytedeco.opencv.helper.opencv_core.RGB(0x80, 0x80, 0x00); case "lime": return org.bytedeco.opencv.helper.opencv_core.RGB(0x00, 0xFF, 0x00); case "green": return org.bytedeco.opencv.helper.opencv_core.RGB(0x00, 0x80, 0x00); case "aqua": return org.bytedeco.opencv.helper.opencv_core.RGB(0x00, 0xFF, 0xFF); case "teal": return org.bytedeco.opencv.helper.opencv_core.RGB(0x00, 0x80, 0x80); case "blue": return org.bytedeco.opencv.helper.opencv_core.RGB(0x00, 0x00, 0xFF); case "navy": return org.bytedeco.opencv.helper.opencv_core.RGB(0x00, 0x00, 0x80); case "fuchsia": return org.bytedeco.opencv.helper.opencv_core.RGB(0xFF, 0x00, 0xFF); case "purple": return org.bytedeco.opencv.helper.opencv_core.RGB(0x80, 0x00, 0x80); } return null; } /** * Regenate a random color using the specified RGN * * @param rng RNG to use * @return Random color */ public static Scalar randomColor(Random rng) { return org.bytedeco.opencv.helper.opencv_core.RGB(rng.nextInt(255), rng.nextInt(255), rng.nextInt(255)); } }
0
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image
java-sources/ai/konduit/serving/konduit-serving-image/0.3.0/ai/konduit/serving/data/image/util/ImageUtils.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.util; import ai.konduit.serving.data.image.convert.ImageToNDArray; import ai.konduit.serving.data.image.convert.ImageToNDArrayConfig; import ai.konduit.serving.data.image.convert.config.ImageNormalization; import ai.konduit.serving.pipeline.api.data.BoundingBox; import ai.konduit.serving.pipeline.api.data.Image; import ai.konduit.serving.pipeline.api.data.NDArrayType; import ai.konduit.serving.pipeline.api.data.Point; import org.bytedeco.javacpp.*; import org.bytedeco.javacpp.indexer.*; import org.bytedeco.opencv.opencv_core.Mat; import org.nd4j.common.base.Preconditions; import org.nd4j.common.util.ArrayUtil; import org.nd4j.linalg.api.concurrency.AffinityManager; import org.nd4j.linalg.api.memory.pointers.PagedPointer; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.exception.ND4JIllegalStateException; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.indexing.INDArrayIndex; import org.nd4j.linalg.indexing.NDArrayIndex; import org.nd4j.nativeblas.NativeOpsHolder; import java.nio.*; import java.util.function.IntToDoubleFunction; public class ImageUtils { private ImageUtils(){ } /** * Adapted from datavec's NativeImageLoader * @param image the input image * @param direct whether the image is direct or not * @param ret the input ndarray */ public static void fillNDArray(Mat image, boolean direct,INDArray ret) { long rows = image.rows(); long cols = image.cols(); long channels = image.channels(); if (ret.length() != rows * cols * channels) { throw new ND4JIllegalStateException("INDArray provided to store image not equal to image: {channels: " + channels + ", rows: " + rows + ", columns: " + cols + "}"); } Indexer idx = image.createIndexer(direct); Pointer pointer = ret.data().pointer(); long[] stride = ret.stride(); boolean done = false; PagedPointer pagedPointer = new PagedPointer(pointer, rows * cols * channels, ret.data().offset() * Nd4j.sizeOfDataType(ret.data().dataType())); if (pointer instanceof FloatPointer) { FloatIndexer retidx = FloatIndexer.create(pagedPointer.asFloatPointer(), new long[] {channels, rows, cols}, new long[] {stride[0], stride[1], stride[2]}, direct); if (idx instanceof UByteIndexer) { UByteIndexer ubyteidx = (UByteIndexer) idx; for (long k = 0; k < channels; k++) { for (long i = 0; i < rows; i++) { for (long j = 0; j < cols; j++) { retidx.put(k, i, j, ubyteidx.get(i, j, k)); } } } done = true; } else if (idx instanceof UShortIndexer) { UShortIndexer ushortidx = (UShortIndexer) idx; for (long k = 0; k < channels; k++) { for (long i = 0; i < rows; i++) { for (long j = 0; j < cols; j++) { retidx.put(k, i, j, ushortidx.get(i, j, k)); } } } done = true; } else if (idx instanceof IntIndexer) { IntIndexer intidx = (IntIndexer) idx; for (long k = 0; k < channels; k++) { for (long i = 0; i < rows; i++) { for (long j = 0; j < cols; j++) { retidx.put(k, i, j, intidx.get(i, j, k)); } } } done = true; } else if (idx instanceof FloatIndexer) { FloatIndexer floatidx = (FloatIndexer) idx; for (long k = 0; k < channels; k++) { for (long i = 0; i < rows; i++) { for (long j = 0; j < cols; j++) { retidx.put(k, i, j, floatidx.get(i, j, k)); } } } done = true; } retidx.release(); } else if (pointer instanceof DoublePointer) { DoubleIndexer retidx = DoubleIndexer.create(pagedPointer.asDoublePointer(), new long[] {channels, rows, cols}, new long[] {stride[0], stride[1], stride[2]}, direct); if (idx instanceof UByteIndexer) { UByteIndexer ubyteidx = (UByteIndexer) idx; for (long k = 0; k < channels; k++) { for (long i = 0; i < rows; i++) { for (long j = 0; j < cols; j++) { retidx.put(k, i, j, ubyteidx.get(i, j, k)); } } } done = true; } else if (idx instanceof UShortIndexer) { UShortIndexer ushortidx = (UShortIndexer) idx; for (long k = 0; k < channels; k++) { for (long i = 0; i < rows; i++) { for (long j = 0; j < cols; j++) { retidx.put(k, i, j, ushortidx.get(i, j, k)); } } } done = true; } else if (idx instanceof IntIndexer) { IntIndexer intidx = (IntIndexer) idx; for (long k = 0; k < channels; k++) { for (long i = 0; i < rows; i++) { for (long j = 0; j < cols; j++) { retidx.put(k, i, j, intidx.get(i, j, k)); } } } done = true; } else if (idx instanceof FloatIndexer) { FloatIndexer floatidx = (FloatIndexer) idx; for (long k = 0; k < channels; k++) { for (long i = 0; i < rows; i++) { for (long j = 0; j < cols; j++) { retidx.put(k, i, j, floatidx.get(i, j, k)); } } } done = true; } retidx.release(); } if (!done) { for (long k = 0; k < channels; k++) { for (long i = 0; i < rows; i++) { for (long j = 0; j < cols; j++) { if (ret.rank() == 3) { ret.putScalar(k, i, j, idx.getDouble(i, j, k)); } else if (ret.rank() == 4) { ret.putScalar(1, k, i, j, idx.getDouble(i, j, k)); } else if (ret.rank() == 2) { ret.putScalar(i, j, idx.getDouble(i, j)); } else throw new ND4JIllegalStateException("NativeImageLoader expects 2D, 3D or 4D output array, but " + ret.rank() + "D array was given"); } } } } idx.release(); image.data(); Nd4j.getAffinityManager().tagLocation(ret, AffinityManager.Location.HOST); } public static BoundingBox accountForCrop(Image image, BoundingBox bbox, ImageToNDArrayConfig config) { return accountForCrop(bbox, image.width(), image.height(), config); } public static BoundingBox accountForCrop(BoundingBox bbox, int width, int height, ImageToNDArrayConfig config) { if (config == null) return bbox; BoundingBox cropRegion = ImageToNDArray.getCropRegion(width, height, config); double cropWidth = cropRegion.width(); double cropHeight = cropRegion.height(); double x1 = cropRegion.x1() + cropWidth * bbox.x1(); double x2 = cropRegion.x1() + cropWidth * bbox.x2(); double y1 = cropRegion.y1() + cropHeight * bbox.y1(); double y2 = cropRegion.y1() + cropHeight * bbox.y2(); return BoundingBox.createXY(x1, x2, y1, y2, bbox.label(), bbox.probability()); } public static Point accountForCrop(Point relPoint, int width, int height, ImageToNDArrayConfig imageToNDArrayConfig) { if(imageToNDArrayConfig == null){ return relPoint.toAbsolute(width, height); } BoundingBox cropRegion = ImageToNDArray.getCropRegion(width, height, imageToNDArrayConfig); double cropWidth = cropRegion.width(); double cropHeight = cropRegion.height(); return Point.create( cropRegion.x1() + cropWidth * relPoint.x(), cropRegion.y1() + cropHeight * relPoint.y(), relPoint.label(), relPoint.probability() ).toAbsolute(width, height); } /** * Get a float normalizer based on the input * {@link ImageToNDArrayConfig} * @param config the input configuration * @param rgb whether the image layout is rgb or bgr * @return the {@link FloatNormalizer} based on the * given configuration */ public static FloatNormalizer getFloatNormalizer(ImageToNDArrayConfig config, boolean rgb) { FloatNormalizer f; ImageNormalization n = config.normalization(); if(n == null || n.type() == ImageNormalization.Type.NONE) { f = (x,c) -> x; //No-op } else { switch (config.normalization().type()) { case SCALE: float scale = (n.maxValue() == null ? 255.0f : n.maxValue().floatValue()) / 2.0f; f = (x,c) -> (x / scale - 1.0f); break; case SCALE_01: float scale01 = n.maxValue() == null ? 255.0f : n.maxValue().floatValue(); f = (x,c) -> (x / scale01); break; case SUBTRACT_MEAN: //TODO support grayscale Preconditions.checkState(n.meanRgb() != null, "Error during normalization: Normalization type is set to " + "SUBTRACT_MEAN but not meanRgb array is provided"); double[] mrgb = n.meanRgb(); float[] channelMeans = rgb ? ArrayUtil.toFloats(mrgb) : new float[]{(float) mrgb[2], (float) mrgb[1], (float) mrgb[0]}; f = (x,c) -> (x - channelMeans[c]); break; case STANDARDIZE: Preconditions.checkState(n.meanRgb() != null, "Error during normalization: Normalization type is set to " + "STANDARDIZE but not meanRgb array is provided"); Preconditions.checkState(n.stdRgb() != null, "Error during normalization: Normalization type is set to " + "STANDARDIZE but not stdRgb array is provided"); double[] mrgb2 = n.meanRgb(); double[] stdrgb = n.stdRgb(); float[] channelMeans2 = rgb ? ArrayUtil.toFloats(mrgb2) : new float[]{(float) mrgb2[2], (float) mrgb2[1], (float) mrgb2[0]}; float[] channelStd = rgb ? ArrayUtil.toFloats(stdrgb) : new float[]{(float) stdrgb[2], (float) stdrgb[1], (float) stdrgb[0]}; f = (x,c) -> ( (x-channelMeans2[c]) / channelStd[c]); break; case INCEPTION: float scale2 = n.maxValue() == null ? 255.0f : n.maxValue().floatValue(); f = (x,c) -> ( ((x/scale2) - 0.5f) * 2.0f ); break; case VGG_SUBTRACT_MEAN: double[] mrgbVgg = ImageNormalization.getVggMeanRgb(); float[] channelMeansVGG = rgb ? ArrayUtil.toFloats(mrgbVgg) : new float[]{(float) mrgbVgg[2], (float) mrgbVgg[1], (float) mrgbVgg[0]}; f = (x,c) -> (x - channelMeansVGG[c]); break; case IMAGE_NET: double[] imagenetMeanRgb = ImageNormalization.getImagenetMeanRgb(); double[] imageNetMeanStd = ImageNormalization.getImageNetStdRgb(); float[] imageNetNormalized = rgb ? ArrayUtil.toFloats(imagenetMeanRgb) : new float[]{(float) imagenetMeanRgb[2], (float) imagenetMeanRgb[1], (float) imagenetMeanRgb[0]}; float[] imageNetStdNormalized = rgb ? ArrayUtil.toFloats(imageNetMeanStd) : new float[]{(float) imageNetMeanStd[2], (float) imageNetMeanStd[1], (float) imageNetMeanStd[0]}; f = (x,c) -> x - imageNetNormalized[c] / imageNetStdNormalized[c]; break; default: throw new UnsupportedOperationException("Unsupported image normalization type: " + config.normalization().type()); } } return f; } /** * Cast the given {@link ByteBuffer} * from the fromTYpe to the toType * @param from the input bytebuffer to cast * @param fromType the from type * @param toType the new type of the bytebuffer * @return the output bytebuffer */ public static ByteBuffer cast(ByteBuffer from, NDArrayType fromType, NDArrayType toType) { if (fromType == toType) return from; boolean direct = !Loader.getPlatform().startsWith("android"); IntToDoubleFunction f; int length; switch (fromType) { case DOUBLE: DoubleBuffer db = from.asDoubleBuffer(); length = db.limit(); f = db::get; break; case FLOAT: FloatBuffer fb = from.asFloatBuffer(); length = fb.limit(); f = fb::get; break; case INT64: LongBuffer lb = from.asLongBuffer(); length = lb.limit(); f = i -> (double) lb.get(); break; case INT32: IntBuffer ib = from.asIntBuffer(); length = ib.limit(); f = ib::get; break; case INT16: ShortBuffer sb = from.asShortBuffer(); length = sb.limit(); f = sb::get; break; case INT8: length = from.limit(); f = from::get; break; case FLOAT16: case BFLOAT16: case UINT64: case UINT32: case UINT16: case UINT8: case BOOL: case UTF8: default: throw new UnsupportedOperationException("Conversion to " + fromType + " not supported or not yet implemented"); } int bytesLength = toType.width() * length; ByteBuffer bb = direct ? ByteBuffer.allocateDirect(bytesLength).order(ByteOrder.LITTLE_ENDIAN) : ByteBuffer.allocate(bytesLength).order(ByteOrder.LITTLE_ENDIAN); switch (toType) { case DOUBLE: DoubleBuffer db = bb.asDoubleBuffer(); for (int i = 0; i < length; i++) db.put(f.applyAsDouble(i)); break; case FLOAT: FloatBuffer fb = bb.asFloatBuffer(); for (int i = 0; i < length; i++) fb.put((float) f.applyAsDouble(i)); break; case INT64: LongBuffer lb = bb.asLongBuffer(); for (int i = 0; i < length; i++) lb.put((long) f.applyAsDouble(i)); break; case INT32: IntBuffer ib = bb.asIntBuffer(); for (int i = 0; i < length; i++) ib.put((int) f.applyAsDouble(i)); break; case INT16: ShortBuffer sb = bb.asShortBuffer(); for (int i = 0; i < length; i++) sb.put((short) f.applyAsDouble(i)); break; case INT8: for (int i = 0; i < length; i++) bb.put((byte) f.applyAsDouble(i)); break; case UINT8: //TODO inefficient - x -> double -> int -> uint8 UByteIndexer idx_ui8 = UByteIndexer.create(bb); for( int i=0; i<length; i++ ) idx_ui8.put(i, (int)f.applyAsDouble(i)); break; case FLOAT16: HalfIndexer idx_f16 = HalfIndexer.create(bb.asShortBuffer()); for( int i=0; i<length; i++) idx_f16.put(i, (float)f.applyAsDouble(i)); break; case BFLOAT16: Bfloat16Indexer idx_bf16 = Bfloat16Indexer.create(bb.asShortBuffer()); for( int i=0; i<length; i++ ) idx_bf16.put(i, (float)f.applyAsDouble(i)); break; case UINT64: ULongIndexer idx_ui64 = ULongIndexer.create(bb.asLongBuffer()); for( int i=0; i<length; i++) idx_ui64.put(i, (long)f.applyAsDouble(i)); break; case UINT32: UIntIndexer idx_ui32 = UIntIndexer.create(bb.asIntBuffer()); for( int i=0; i<length; i++ ) idx_ui32.put(i, (int)f.applyAsDouble(i)); break; case UINT16: UShortIndexer idx_ui16 = UShortIndexer.create(bb.asShortBuffer()); for( int i=0; i<length; i++ ) idx_ui16.put(i, (int)f.applyAsDouble(i)); break; default: throw new UnsupportedOperationException("Conversion to " + fromType + " to " + toType + " not supported or not yet implemented"); } return bb; } public interface FloatNormalizer { float normalize(float f, int channel); } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/ImageConfigModuleInfo.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image; import ai.konduit.serving.annotation.module.ModuleInfo; @ModuleInfo("konduit-serving-image-config") public class ImageConfigModuleInfo { private ImageConfigModuleInfo(){ } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/KonduitServingImageConfigJsonMapping.java
package ai.konduit.serving.data.image;import ai.konduit.serving.pipeline.api.serde.JsonSubType; import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping; import ai.konduit.serving.pipeline.api.serde.JsonSubType; import java.util.ArrayList; import java.util.List; //GENERATED CLASS DO NOT EDIT public class KonduitServingImageConfigJsonMapping implements JsonSubTypesMapping { @Override public List<JsonSubType> getSubTypesMapping() { List<JsonSubType> l = new ArrayList<>(); l.add(new JsonSubType("FRAME_CAPTURE", ai.konduit.serving.data.image.step.capture.CameraFrameCaptureStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("IMAGE_TO_NDARRAY", ai.konduit.serving.data.image.step.ndarray.ImageToNDArrayStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("GRAY_SCALE", ai.konduit.serving.data.image.step.grayscale.GrayScaleStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("DRAW_POINTS", ai.konduit.serving.data.image.step.point.draw.DrawPointsStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("CROP_FIXED_GRID", ai.konduit.serving.data.image.step.grid.crop.CropFixedGridStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("DRAW_SEGMENTATION", ai.konduit.serving.data.image.step.segmentation.index.DrawSegmentationStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("DRAW_FACE_KEY_POINTS", ai.konduit.serving.data.image.step.face.DrawFaceKeyPointsStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("SHOW_IMAGE", ai.konduit.serving.data.image.step.show.ShowImageStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("IMAGE_CROP", ai.konduit.serving.data.image.step.crop.ImageCropStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("RELATIVE_TO_ABSOLUTE", ai.konduit.serving.data.image.step.point.convert.RelativeToAbsoluteStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("DRAW_BOUNDING_BOX", ai.konduit.serving.data.image.step.bb.draw.DrawBoundingBoxStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("IMAGE_RESIZE", ai.konduit.serving.data.image.step.resize.ImageResizeStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("CROP_GRID", ai.konduit.serving.data.image.step.grid.crop.CropGridStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("DRAW_FIXED_GRID", ai.konduit.serving.data.image.step.grid.draw.DrawFixedGridStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("EXTRACT_BOUNDING_BOX", ai.konduit.serving.data.image.step.bb.extract.ExtractBoundingBoxStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("PERSPECTIVE_TRANSFORM", ai.konduit.serving.data.image.step.point.perspective.convert.PerspectiveTransformStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("DRAW_GRID", ai.konduit.serving.data.image.step.grid.draw.DrawGridStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("VIDEO_CAPTURE", ai.konduit.serving.data.image.step.capture.VideoFrameCaptureStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("DRAW_HEATMAP", ai.konduit.serving.data.image.step.point.heatmap.DrawHeatmapStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); return l; } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/convert/ImageToNDArrayConfig.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.convert; import ai.konduit.serving.data.image.convert.config.AspectRatioHandling; import ai.konduit.serving.data.image.convert.config.ImageNormalization; import ai.konduit.serving.data.image.convert.config.NDChannelLayout; import ai.konduit.serving.data.image.convert.config.NDFormat; import ai.konduit.serving.pipeline.api.data.NDArrayType; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import org.nd4j.shade.jackson.annotation.JsonProperty; /** * Configuration for converting {@link ai.konduit.serving.pipeline.api.data.Image}s to {@link ai.konduit.serving.pipeline.api.data.NDArray}s. * <p> * The following can be configured:<br> * <ul> * <li><b>height</b>: Output NDArray image height: leave null to convert to the same size as the input. Default: null</li> * <li><b>width</b>: Output NDArray image width: leave null to convert to the same size as the input. Default: null</li> * <li><b>datatype</b>: {@link NDArrayType} (data type) of the output array</li> * <li><b>includeMinibatchDim</b>: If true: the output array will be rank 4 with shape [1, c, h, w] or [1, h, w, c]. * If false: return rank 3 array with shape [c, h, w] or [h, w, c]. Default: true</li> * <li><b>aspectRatioHandling</b>: How should input images with different aspect ratio to the output height/width be handled? Default: CENTER_CROP</li> * <ul> * <li><b>CENTER_CROP</b>: Crop the larger dimension down to the correct aspect ratio (and then resize if necessary).</li> * <li><b>PAD</b>: Zero pad the smaller dimension to make the aspect ratio match the output (and then resize if necessary)</li> * <li><b>STRETCH</b>: Simply resize the image to the required aspect ratio, distorting the image if necessary</li> * </ul> * <li><b>format</b>: CHANNELS_FIRST (output shape: [1, c, h, w] or [c, h, w]) or CHANNELS_LAST (output shape: [1, h, w, c] or [h, w, c])</li> * <li><b>channels</b>: The layout for the returned array. Note input images will be converted if necessary. Default: RGB</li> * <ul> * <li><b>RGB</b>: 3 channels, ordered according to: red, green, blue - most common for TensorFlow, Keras, and some other libraries</li> * <li><b>BGR</b>: 3 channels, ordered according to: blue, green, red - the default for OpenCV, JavaCV, DL4J</li> * <li><b>RGBA</b>: 4 channels, ordered according to: red, green, blue, alpha</li> * <li><b>BGRA</b>: 4 channels, ordered according to: blue, green, red, alpha</li> * <li><b>GRAYSCALE</b>: 1 channel - grayscale</li> * </ul> * <li><b>normalization</b>: How the image should be normalized. See {@link ImageNormalization} - support scaling ([0,1] range), * subtracting mean (out = (in-mean)), standardization (out = (in-mean)/stdev), inception ([-1, 1] range) and * VGG mean subtraction (fixed out = in - meanRgb, where meanRgb is hardcoded to [123.68, 116.779, 103.939]. * Default: simple scale normalization ([0, 1] range). * Note: If image normalization in null, or ImageNormalization.type == Type.NONE, no normalization is applied. * </li> * <li><b>listHandling</b>: Only applies in situations such as {@link ai.konduit.serving.data.image.step.ndarray.ImageToNDArrayStep}, * and only when {@code List<Image>} is passed in instead of {@code Image}. This setting determines what the output * should be. NONE: Error for {@code List<Image>} input (only single Images are allowed). BATCH: a single output * NDArray is returned, with the images batched along dimension 0. LIST_OUT: A {@code List<NDArray>} is returned * instead of a single {@code NDArray} - one entry for each entry in the input {@code List<Image>}. FIRST: * the first bounding box only is returned as as single {@code NDArray} - the remainder are discarded/ignored. * </li> * </ul> * * @author Alex Black */ @Data @Accessors(fluent = true) @NoArgsConstructor @Schema(description = "Configuration for converting an image into an n-dimensional array.") public class ImageToNDArrayConfig { /** * See {@link ImageToNDArrayConfig} - listHandling field */ @Schema(description = "An enum to specify how to handle a list of input images. <br><br>" + "NONE -> No list handling i.e. Simply convert an image to n-dimensional array (assuming the input is not a list of images), <br>" + "BATCH -> Convert a list of images to a batch of n-dimensional array (whose first axis will be first image index), <br>" + "LIST_OUT -> Convert a list of images to a list of n-dimensional array, <br>" + "FIRST -> Convert the first image in the list of images to an n-dimensional array.") public enum ListHandling {NONE, BATCH, LIST_OUT, FIRST} @Schema(description = "Output array image height. Leave null to convert to the same size as the image height.") private Integer height; @Schema(description = "Output array image width. Leave null to convert to the same size as the image width.") private Integer width; @Schema(description = "Data type of the n-dimensional array.", defaultValue = "FLOAT") private NDArrayType dataType = NDArrayType.FLOAT; @Schema(description = "If true, the output array will contain an extra dimension for the minibatch number. This " + "will look like (1, Channels, Height, Width) instead of (Channels, Height, Width) for format == CHANNELS_FIRST " + "or (1, Height, Width, Channels) instead of (Height, Width, Channels) for format == CHANNELS_LAST.", defaultValue = "true") private boolean includeMinibatchDim = true; @Schema(description = "An enum to Handle the situation where the input image and output NDArray have different aspect ratios. <br><br>" + "CENTER_CROP (crop larger dimension then resize if necessary), <br>" + "PAD (pad smaller dimension then resize if necessary), <br>" + "STRETCH (simply resize, distorting if necessary).", defaultValue = "CENTER_CROP") private AspectRatioHandling aspectRatioHandling = AspectRatioHandling.CENTER_CROP; @Schema(description = "The format to be used when converting an Image to an NDArray.", defaultValue = "CHANNELS_FIRST") private NDFormat format = NDFormat.CHANNELS_FIRST; @Schema(description = "An enum that represents the type (and order) of the color channels for an image after it has " + "been converted to an NDArray. For example, RGB vs. BGR etc", defaultValue = "RGB") private NDChannelLayout channelLayout = NDChannelLayout.RGB; @Schema(description = "Configuration that specifies the normalization type of an image array values.") private ImageNormalization normalization = new ImageNormalization(ImageNormalization.Type.SCALE); @Schema(description = "An enum to specify how to handle a list of input images.", defaultValue = "NONE") private ListHandling listHandling = ListHandling.NONE; public ImageToNDArrayConfig(@JsonProperty("height") Integer height, @JsonProperty("width") Integer width, @JsonProperty("dataType") NDArrayType dataType, @JsonProperty("includeMinibatchDim") boolean includeMinibatchDim, @JsonProperty("aspectRatioHandling") AspectRatioHandling aspectRatioHandling, @JsonProperty("format") NDFormat format, @JsonProperty("channelLayout") NDChannelLayout channelLayout, @JsonProperty("normalization") ImageNormalization normalization, @JsonProperty("listHandling") ListHandling listHandling){ this.height = height; this.width = width; this.dataType = dataType; this.includeMinibatchDim = includeMinibatchDim; this.aspectRatioHandling = aspectRatioHandling; this.format = format; this.channelLayout = channelLayout; this.normalization = normalization; this.listHandling = listHandling; } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/convert
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/convert/config/AspectRatioHandling.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.convert.config; import io.swagger.v3.oas.annotations.media.Schema; /** * How We should handle the situation where the input image and output image/NDArray have different aspect ratios?<br> * Use in {@link ai.konduit.serving.data.image.convert.ImageToNDArrayConfig} and {@link ai.konduit.serving.data.image.step.resize.ImageResizeStep} * See {@link ai.konduit.serving.data.image.convert.ImageToNDArrayConfig} for more details<br> * <ul> * <li>CENTER_CROP: Crop the larger dimension down to the correct aspect ratio (and then resize if necessary).</li> * <li>PAD: Zero pad the smaller dimension to make the aspect ratio match the output (and then resize if necessary)</li> * <li>STRETCH: Simply resize the image to the required aspect ratio, distorting the image if necessary</li> * </ul> */ @Schema(description = "An enum specifying how to handle the situation where the input image and output. NDArray have different aspect ratios. <br><br>" + "CENTER_CROP -> Crop the larger dimension down to the correct aspect ratio (and then resize if necessary), <br>" + "PAD -> Zero pad the smaller dimension to make the aspect ratio match the output (and then resize if necessary), <br>" + "STRETCH -> Simply resize the image to the required aspect ratio, distorting the image if necessary") public enum AspectRatioHandling { CENTER_CROP, PAD, STRETCH }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/convert
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/convert/config/ImageNormalization.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.convert.config; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.experimental.Accessors; /** * * <ul> * <li><b>NONE</b>: No image normalization will be applied</li> * <li><b>SCALE</b>: Divide images by maxValue/2, or divide by 255/2 if maxValue is not specified, then subtract 1.0, * to give output in range [-1,1]. This is the default.</li> * <li><b>SCALE_01</b>: Divide images by maxValue, or divide by 255 if maxValue is not specified, to give output in range [0,1].</li> * <li><b>SUBTRACT_MEAN</b>: Subtract the channels by the provided meanRgb array, with values [meanRed, meanGreen, meanBlue]. * out = (in - mean) for each channel. Note that if the output format is in BGR format, the meanRgb value should * still be provided in RGB order</li> * <li><b>STANDARDIZE</b>: Subtract the channels by the provided meanRgb array, and then divide by stdRgb, where meanRgb * is [meanRed, meanGreen, meanBlue], and stdRgb is [standardDeviationRed, standardDeviationGreen, standardDeviationBlue]. * out = (in - mean)/std for each channel. Note that if the output format is in BGR format, the meanRgb and stdRgb * values should still be provided in RGB order.</li> * <li><b>INCEPTION</b>: Applies inception preprocessing for inference/evaluation as described here: <a href="https://github.com/tensorflow/models/blob/master/research/slim/preprocessing/inception_preprocessing.py">inception_preprocessing.py</a> * Specifically: preprocess_for_eval method: scale to [-1, 1] range. * In practice this is done by dividing by 255 (assuming pixels are in range 0 to 255) to give [0, 1] then subtracting * 0.5 and multiplying by 2 to give [-1, 1]. Note uses maxValue (like SCALE) if provided. * </li> * <li><b>VGG_SUBTRACT_MEAN</b>: As per <i>SUBTRACT_MEAN</i> but the fixed values [meanRed, meanGreen, meanBlue] = * [123.68, 116.779, 103.939]. Note the meanRgb array with these values need not be provided explicitly. If the * output format is BGR, these are appropriately reordered before applying to the channels.</li> * </ul> * */ @AllArgsConstructor @Data @Accessors(fluent = true) @Builder @Schema(description = "Configuration that specifies the normalization type of the image array values.") public class ImageNormalization { protected static final double[] VGG_MEAN_RGB = {123.68, 116.779, 103.939}; protected static final double[] IMAGE_NET_MEAN_RGB = {0.485, 0.456, 0.406}; protected static final double[] IMAGE_NET_STD_RGB = {0.229, 0.224, 0.225}; public static double[] getVggMeanRgb(){ return VGG_MEAN_RGB.clone(); } public static double[] getImagenetMeanRgb(){ return IMAGE_NET_MEAN_RGB.clone(); } public static double[] getImageNetStdRgb(){ return IMAGE_NET_STD_RGB.clone(); } @Schema(description = "An enum that specifies the normalization type of an image array values. <br><br>" + "NONE -> No image normalization will be applied, <br>" + "SCALE -> Divide images by maxValue/2, or divide by 255/2 if maxValue is not specified, then subtract 1.0, to give output in range [-1,1], <br>" + "SCALE_01 -> Divide images by maxValue, or divide by 255 if maxValue is not specified, to give output in range [0,1], <br>" + "SUBTRACT_MEAN -> Subtract the channels by the provided meanRgb array, with values [meanRed, meanGreen, meanBlue], " + "out = (in - mean) for each channel. Note that if the output format is in BGR format, the meanRgb value should " + "still be provided in RGB order. <br>" + "STANDARDIZE -> Subtract the channels by the provided meanRgb array, and then divide by stdRgb, where meanRgb " + "is [meanRed, meanGreen, meanBlue], and stdRgb is [standardDeviationRed, standardDeviationGreen, standardDeviationBlue]. " + "out = (in - mean)/std for each channel. Note that if the output format is in BGR format, the meanRgb and stdRgb " + "values should still be provided in RGB order. <br>" + "INCEPTION -> Applies inception preprocessing for inference/evaluation as described here: https://github.com/tensorflow/models/blob/master/research/slim/preprocessing/inception_preprocessing.py " + "Specifically, preprocess_for_eval method: scale to [-1, 1] range. " + "In practice this is done by dividing by 255 (assuming pixels are in range 0 to 255) to give [0, 1] then subtracting " + "0.5 and multiplying by 2 to give [-1, 1]. Note uses maxValue (like SCALE) if provided. <br>" + "VGG_SUBTRACT_MEAN -> As per SUBTRACT_MEAN but the fixed values [meanRed, meanGreen, meanBlue] = " + " 123.68, 116.779, 103.939]. Note the meanRgb array with these values need not be provided explicitly. If the " + " output format is BGR, these are appropriately reordered before applying to the channels.") public enum Type { NONE, SCALE, SCALE_01, SUBTRACT_MEAN, STANDARDIZE, INCEPTION, VGG_SUBTRACT_MEAN, IMAGE_NET } @Schema(description = "An enum that specifies the type of normalization applied.", defaultValue = "SCALE") public Type type = Type.SCALE; @Schema(description = "Max value to divided each value in the image with. Used with SCALE and INCEPTION " + "normalization type.") private Double maxValue; @Schema(description = "An array of mean rgb values. Used with SUBTRACT_MEAN, STANDARDIZE and VGG_SUBTRACT_MEAN " + "normalization type.") private double[] meanRgb; @Schema(description = "The standard deviation of each color channel in the sequence of rgb. Used with " + "STANDARDIZE normalization type.") private double[] stdRgb; public ImageNormalization(){ this(Type.SCALE); } public ImageNormalization(Type type){ this.type = type; } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/convert
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/convert/config/NDChannelLayout.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.convert.config; import io.swagger.v3.oas.annotations.media.Schema; /** * Represents the type (and order) of the channels for an image after it has been converted to an NDArray * <ul> * <li>RGB: 3 channels, ordered according to: red, green, blue - most common for TensorFlow, Keras, and some other libraries</li> * <li>BGR: 3 channels, ordered according to: blue, green, red - the default for OpenCV, JavaCV, DL4J</li> * <li>RGBA: 4 channels, ordered according to: red, green, blue, alpha</li> * <li>BGRA: 4 channels, ordered according to: blue, green, red, alpha</li> * <li>GRAYSCALE: 1 channel - grayscale</li> * </ul> */ @Schema(description = "An enum that represents the type (and order) of the channels for an image after it has been converted to an NDArray. <br><br>" + "RGB -> 3 channels, ordered according to: red, green, blue - most common for TensorFlow, Keras, and some other libraries, <br>" + "BGR -> 3 channels, ordered according to: blue, green, red - the default for OpenCV, JavaCV, DL4J, <br>" + "RGBA -> 4 channels, ordered according to: red, green, blue, alpha, <br>" + "BGRA -> 4 channels, ordered according to: blue, green, red, alpha, GRAYSCALE -> 1 channel - grayscale.") public enum NDChannelLayout { RGB, RGBA, BGR, BGRA, GRAYSCALE; public int numChannels() { switch (this) { case RGB: case BGR: return 3; case RGBA: case BGRA: return 4; case GRAYSCALE: return 1; default: throw new RuntimeException("Unknown enum value: " + this); } } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/convert
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/convert/config/NDFormat.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.convert.config; /** * The format to be used when converting an Image to an NDArray<br> * CHANNELS_FIRST (output shape: [1, c, h, w] or [c, h, w]) or CHANNELS_LAST (output shape: [1, h, w, c] or [h, w, c])<br> * See {@link ai.konduit.serving.data.image.convert.ImageToNDArrayConfig} */ import io.swagger.v3.oas.annotations.media.Schema; @Schema(description = "The format to be used when converting an Image to an NDArray. " + "CHANNELS_FIRST -> (output shape: [1, c, h, w] or [c, h, w]), " + "CHANNELS_LAST -> (output shape: [1, h, w, c] or [h, w, c]).") public enum NDFormat { CHANNELS_FIRST, CHANNELS_LAST }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/bb
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/bb/draw/DrawBoundingBoxStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.bb.draw; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.data.image.convert.ImageToNDArrayConfig; import ai.konduit.serving.data.image.util.ColorConstants; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import java.util.Map; /** * * Scale: * <ul> * <li>NONE: No scaling</li> * <li>AT_LEAST: Scale up if necessary, so H >= scaleH and W >= scaleW</li> * <li>AT_MOST: Scale down if necessary, so H <= scaleH and W <= scaleW</li> * </ul> * */ @Data @Accessors(fluent = true) @AllArgsConstructor @NoArgsConstructor @JsonName("DRAW_BOUNDING_BOX") @Schema(description = "A pipeline step that configures how to draw a bounding box onto an image. The bounding box data, that's to " + "be drawn, is taken from the previous step's data instance.") public class DrawBoundingBoxStep implements PipelineStep { public static final String DEFAULT_COLOR = "lime"; @Schema(description = "A scaling policy enum, specifying how to scale the bounding box width and height. " + "NONE -> No scaling, AT_LEAST -> Scale up if necessary, so H >= scaleH and W >= scaleW, AT_MOST -> " + "Scale down if necessary, so H <= scaleH and W <= scaleW") public enum Scale { NONE, AT_LEAST, AT_MOST } @Schema(description = "Name of the input image key from the previous step. If set to null, it will try to find any image in the incoming data instance.") private String imageName; @Schema(description = "Name of the bounding boxes key from the previous step. If set to null, it will try to find any bounding box in the incoming data instance.") private String bboxName; @Schema(description = "If true, then draw the class label on top of the bounding box.") private boolean drawLabel; @Schema(description = "If true, then draw the class probability on top of the bounding box.") private boolean drawProbability; @Schema(description = "Specifies the color of different classes/labels that are drawn. " + ColorConstants.COLOR_DESCRIPTION) private Map<String, String> classColors; @Schema(description = "The default color to use in case a color for a label/class is not defined. " + ColorConstants.COLOR_DESCRIPTION, defaultValue = DEFAULT_COLOR) private String color; @Schema(description = "Line thickness to use to draw the bounding box (in pixels).", defaultValue = "1") private int lineThickness = 1; @Schema(description = "The scaling policy to use for scaling the bounding boxes.", defaultValue = "NONE") private Scale scale = Scale.NONE; @Schema(description = "Height threshold to be used with the scaling policy.") private int resizeH; @Schema(description = "Width threshold to be used with the scaling policy.") private int resizeW; @Schema(description = "Used to account for the fact that n-dimensional array from ImageToNDArrayConfig may be " + "used to crop images before passing to the network, when the image aspect ratio doesn't match the NDArray " + "aspect ratio. This allows the step to determine the subset of the image actually passed to the network." + "When specifying this on the command line, a comma separated list of fields with key=value for each field is the expected format. If specifying a normalization method, the same format applies. The normalization field must be specified in \" though. Please note that escaping the \" may also be necessary. ") private ImageToNDArrayConfig imageToNDArrayConfig; @Schema(description = "If true, the cropped region based on the image array is drawn.", defaultValue = "false") private boolean drawCropRegion = false; @Schema(description = "Color of the crop region. Only used if drawCropRegion = true.") private String cropRegionColor; /* Other things could add: - Upscale? (or minimum resolution, or always scale) - also aspect ratio part... - Text size - Text font - Line width */ }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/bb
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/bb/extract/ExtractBoundingBoxStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.bb.extract; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.data.image.convert.ImageToNDArrayConfig; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; /** * * ExtractBoundingBoxStep: Given one or more bounding boxes, and an input image, extract from the input region an image * that corresponds to the bounding box region.<br> * i.e.: output as images the input image region covered by the bounding boxes<br> * Note: supports both {@code BoundingBox} and {@code List<BoundingBox>} fields. If the input is as single value, * the output will be a single value; if the input is a list, the output will be a list.<br> * <br> * Note: If the aspect ratio field is set, the image cropping will increase the smaller dimension to ensure the cropped * image complies with the requested aspect ratio.<br> * <br> * Note: If resizeH and resizeW are specified, the cropped images will be resized to the specified size * */ @Data @Accessors(fluent = true) @AllArgsConstructor @NoArgsConstructor @JsonName("EXTRACT_BOUNDING_BOX") @Schema(description = "A pipeline step that extracts sub-images from an input image, based on the locations of input bounding boxes. " + "Returns List<Image> for the cropped image regions") public class ExtractBoundingBoxStep implements PipelineStep { @Schema(description = "Name of the input image key from the previous step. If set to null, it will try to find any image in the incoming data instance.") private String imageName; @Schema(description = "Name of the bounding boxes key from the previous step. If set to null, it will try to find any bounding box in the incoming data instance.") private String bboxName; @Schema(description = "Name of the output key that will contain the output as images the input image " + "region covered by the bounding boxes.") private String outputName; @Schema(description = "If true, other data key and values from the previous step are kept and passed on to the next step as well.", defaultValue = "true") private boolean keepOtherFields = true; @Schema(description = "If set, the smaller dimensions will be increased to keep the aspect ratio correct (which may crop outside the image border).") private Double aspectRatio = null; @Schema(description = "If specified, the cropped images will be resized to the specified height.") private Integer resizeH; @Schema(description = "If specified, the cropped images will be resized to the specified width.") private Integer resizeW; @Schema(description = "Used to account for the fact that n-dimensional array from ImageToNDArrayConfig may be " + "used to crop images before passing to the network, when the image aspect ratio doesn't match the NDArray " + "aspect ratio. This allows the step to determine the subset of the image actually passed to the network that " + "produced the bounding boxes.") private ImageToNDArrayConfig imageToNDArrayConfig; }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/capture/CameraFrameCaptureStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.capture; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import org.nd4j.shade.jackson.annotation.JsonProperty; // TODO: not sure if it's relevant here but maybe we can later add a variable to specify the frames per second // or a way to specify max number of inferences per second. This can potentially save us some compute.- SHAMS @Data @Accessors(fluent=true) @NoArgsConstructor @JsonName("FRAME_CAPTURE") @Schema(description = "A pipeline step that specifies an input that's taken from a camera feed.") public class CameraFrameCaptureStep implements PipelineStep { @Schema(description = "ID of the camera from which the input is taken from. Each system cameras is assigned an ID, " + "which is usually 0 for the first device, 1 for the second and so on...", defaultValue = "0") private int camera = 0; //TODO add other (more robust) ways to select camera @Schema(description = "Width of the incoming image frame. This will scale the original resolution width to the specified value.", defaultValue = "640") private int width = 640; @Schema(description = "Height of the incoming image frame. This will scale the original resolution height to the specified value.", defaultValue = "480") private int height = 480; @Schema(description = "Name of the output key that will contain and carry the image frame data to the later pipeline steps.", defaultValue = "image") private String outputKey = "image"; public CameraFrameCaptureStep(@JsonProperty("camera") int camera, @JsonProperty("width") int width, @JsonProperty("height") int height, @JsonProperty("outputKey") String outputKey, @JsonProperty("skipFrames") Integer skipFrames){ this.camera = camera; this.width = width; this.height = height; this.outputKey = outputKey; } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/capture/VideoFrameCaptureStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.capture; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import org.nd4j.shade.jackson.annotation.JsonProperty; /** * VideoFrameCaptureStep extracts a single frame from a video each time inference is called. * The video path is hardcoded * * Note that at present this makes it only practically useful for testing/demo purposes. * Other options for loading the video will be specified at a later date: https://github.com/KonduitAI/konduit-serving/issues/350 */ @Data @Accessors(fluent=true) @NoArgsConstructor @JsonName("VIDEO_CAPTURE") @Schema(description = "A pipeline step that configures how to extracts a single frame from a video each time inference is called." + " The video path is hardcoded, mainly used for testing/demo purposes given this") public class VideoFrameCaptureStep implements PipelineStep { @Schema(description = "Location of the video file.") private String filePath; @Schema(description = "Name of the output key where the image frame will be located.", defaultValue = "image") private String outputKey = "image"; @Schema(description = "Loop the video when it reaches the end?") private boolean loop = true; @Schema(description = "Optional - Number of frames to skip between returned frames. If not set: No frames are skipped.<br> " + "Values 0 is equivalent to no skipping. Value 1: skip 1 frame between returned frames (i.e., return every 2nd frame)." + "Value 3: skip 2 frames between returned frames (i.e., return every 3rd frame) and so on.", defaultValue = "image") private Integer skipFrames; public VideoFrameCaptureStep(@JsonProperty("filePath") String filePath, @JsonProperty("outputKey") String outputKey, @JsonProperty("loop") boolean loop, @JsonProperty("skipFrames") Integer skipFrames){ this.filePath = filePath; this.outputKey = outputKey; this.loop = loop; this.skipFrames = skipFrames; } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/crop/ImageCropStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.crop; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.data.BoundingBox; import ai.konduit.serving.pipeline.api.data.Point; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import org.nd4j.shade.jackson.annotation.JsonProperty; import java.util.List; /** * Crop an image to the specified rectangular region. The crop region may be specified in one of two ways:<br> * (a) Via a bounding box, or<br> * (b) Via a {@code List<Point>} of length 2, containing the top-left and bottom-right crop locations.<br> * Furthermore, the bounding box and corner point coordinates may be specified in terms of either pixels or * "fraction of image". Note that if the crop region falls partly outside the input image region, black padding * will be added as necessary to keep the requested output size.<br> * Supports both {@code Image} and {@code List<Image>} inputs. * @author Alex Black */ @Data @Accessors(fluent = true) @JsonName("IMAGE_CROP") @NoArgsConstructor @Schema(description = "Crop an image to the specified rectangular region. The crop region may be specified in one of two ways:<br>" + "(a) Via a bounding box, or<br>" + "(b) Via a {@code List<Point>} of length 2, containing the top-left and bottom-right crop locations.<br>" + "These may be specified statically (i.e., fixed crop region) via \"cropBox\" or \"cropPoints\" property, or dynamically " + "via \"cropName\" (which may specify a BoundingBox or List<Point> in the input Data instance).<br>" + "Furthermore, the bounding box and corner point coordinates may be specified in terms of either pixels or" + "\"fraction of image\" - specified via the \"coordsArePixels\" property. Note that if the crop region falls partly " + "outside the input image region, black padding will be added as necessary to keep the requested output size.") public class ImageCropStep implements PipelineStep { @Schema(description = "Name of the Image or List<Image> field to crop") protected String imageName; @Schema(description = "Name of the input Data field used for dynamic cropping. May be a BoundingBox or List<Point>") protected String cropName; @Schema(description = "Static crop region defined as a List<Point>") protected List<Point> cropPoints; @Schema(description = "Static crop region defined as a BoundingBox") protected BoundingBox cropBox; @Schema(description = "Wether the crop region (BoundingBox / List<Point> are specified in pixels, or 'fraction of image'") protected boolean coordsArePixels = false; public ImageCropStep(@JsonProperty("inputNames") String imageName, @JsonProperty("cropName") String cropName, @JsonProperty("cropPoints") List<Point> cropPoints, @JsonProperty("cropBox") BoundingBox cropBox, @JsonProperty("coordsArePixels") boolean coordsArePixels) { this.imageName = imageName; this.cropName = cropName; this.cropPoints = cropPoints; this.cropBox = cropBox; this.coordsArePixels = coordsArePixels; } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/face/DrawFaceKeyPointsStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.face; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.data.image.convert.ImageToNDArrayConfig; import ai.konduit.serving.data.image.util.ColorConstants; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.experimental.Accessors; /** * A step for drawing face keypoints. * Assumes the key point array is an NDArray with shape [numExamples][2*numPoints], where each entry alternates x and y * coordinates, in 0 to 1 scale. Other formats may be added in the future. */ @Builder @Data @Accessors(fluent = true) @AllArgsConstructor @JsonName("DRAW_FACE_KEY_POINTS") @Schema(description = " A step for drawing face keypoints.\n" + " * Assumes the key point array is an NDArray with shape [numExamples][2*numPoints], where each entry alternates x and y\n" + " * coordinates, in 0 to 1 scale. Other formats may be added in the future.") public class DrawFaceKeyPointsStep implements PipelineStep { public static final String DEFAULT_BOX_COLOR = "lime"; public static final String DEFAULT_POINT_COLOR = "red"; public static final String DEFAULT_OUTPUT_NAME = "image"; public enum Scale {NONE, AT_LEAST, AT_MOST} @Schema(description = "ImageToNDArrayConfig class to transform image to array") private ImageToNDArrayConfig imageToNDArrayConfig; @Schema(description = "Height value resize to") private int resizeH; @Schema(description = "Width value resize to") private int resizeW; @Schema(description = "To draw bounding box around face. If set to true: a bounding box will be drawn around the face. If false: No bounding box will be drawn") @Builder.Default private boolean drawFaceBox = true; @Schema(description = "Specifies the color of bounding box around face. " + ColorConstants.COLOR_DESCRIPTION, defaultValue = DEFAULT_BOX_COLOR) private String faceBoxColor; @Schema(description = "Specifies the color of face keypoints. " + ColorConstants.COLOR_DESCRIPTION, defaultValue = DEFAULT_POINT_COLOR) private String pointColor; @Schema(description = "Size of face key points", defaultValue = "1") @Builder.Default private int pointSize = 1; @Schema(description = "Scaling enum, which can be NONE, AT_LEAST, AT_MOST") @Builder.Default private Scale scale = Scale.NONE; @Schema(description = "Field name, which contain array of keypoints from previous step") private String landmarkArray; @Schema(description = "An optional field, specifying the name of the image to be drawn on") private String image; @Schema(description = "Name of the key of face keypoints from this step.", defaultValue = DEFAULT_OUTPUT_NAME) private String outputName; public DrawFaceKeyPointsStep() { this.scale = Scale.NONE; this.pointSize = 1; this.drawFaceBox = true; } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/grayscale/GrayScaleStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.grayscale; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; /** * Transforms an image to be gray scale. * @author Adam Gibson */ @Data @Accessors(fluent = true) @AllArgsConstructor @NoArgsConstructor @JsonName("GRAY_SCALE") @Schema(description = "A pipeline step converts the given input image in to a grayscale image.") public class GrayScaleStep implements PipelineStep { @Schema(description = "Name of the input image key from the previous step. If set to null, it will try to find any image in the incoming data instance.") private String imageName; @Schema(description = "Number of output channels, defaults to 1") private int outputChannels = 1; }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/grid
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/grid/crop/CropFixedGridStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.grid.crop; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.data.Point; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import lombok.experimental.Tolerate; import java.util.Arrays; import java.util.List; /** * As per {@link CropGridStep} but the x/y location values are hardcoded into the configuration, instead of coming * dynamically from the input Data instance * * @author Alex Black * @see CropGridStep */ @Data @Accessors(fluent = true) @AllArgsConstructor @NoArgsConstructor @JsonName("CROP_FIXED_GRID") @Schema(description = "This step is similar to the CropGridStep with the difference that the x/y location values are " + "hardcoded into the configuration, instead of coming dynamically from the input Data instance.") public class CropFixedGridStep implements PipelineStep { @Schema(description = "Name of the input image key from the previous step. If set to null, it will try to find any image in the incoming data instance.") private String imageName; @Schema(description = "A List<Point> (of length 4), the corners, in order: topLeft, topRight, bottomLeft, bottomRight") private List<Point> points; @Schema(description = "The number of grid segments between (topLeft and topRight) and (bottomLeft and bottomRight)") private int gridX; @Schema(description = "The number of grid segments between (topLeft and bottomLeft) and (topRight and bottomRight)") private int gridY; @Schema(description = "If true, the points are in pixels coordinates (0 to width-1) and (0 to height-1); if false, they " + "are 0.0 to 1.0 (fraction of image height/width)") private boolean coordsArePixels; @Schema(description = "Name of the output bounding boxes key.") private String boundingBoxName; @Schema(description = "If true, the two lists are returned which contains the data of grid horizontal and verticle coordinates, respectively.") private boolean outputCoordinates; @Schema(description = "If true, other data key and values from the previous step are kept and passed on to the next step as well.") private boolean keepOtherFields; @Schema(description = "If set, the smaller dimensions will be increased to keep the aspect ratio correct (which may crop outside the image border).") private Double aspectRatio; @Schema(description = "Name of the key of all the cropped output images from this step.") private String outputName; @Tolerate public CropFixedGridStep points(Point... points) { return this.points(Arrays.asList(points)); } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/grid
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/grid/crop/CropGridStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.grid.crop; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; /** * Crop sub images out of a larger image, based on a grid. The grid location is defined in terms of the x/y coordinates * of the corners (which comes from the input Data instance), and the number of segments within the grid in both directions.<br> * The 4 corner X coordinates come from {@code Data.getListDouble(xName)} and the 4 corner Y coordinates come from * {@code Data.getListDouble(yName)}.<br> * Note that the output depends on the configuration. * Always returned: {@code List<Image>} - the cropped images from the grid, in order: (row,col) = (0,0), (0, 1), ..., (0, C-1), ..., (R-1, C-1).<br> * Returned if {@code outputCoordinates=true}: two {@code List<Long>}s - the box coordinates (0,0), ..., (gridX-1, gridY-1)<br> * Returned if {@code boundingBoxName != null}: one {@code List<BoundingBox>} - the crop bounding boxes, (0,0), (0,1), ..., (gridX-1, gridY-1)<br> * See also {@link CropFixedGridStep}<br> * If {@code aspectRatio} is set, the smaller dimension will be increased to keep the aspect ratio correct. Note this may crop * outside the image border * @author Alex Black * @see CropFixedGridStep */ @Data @Accessors(fluent = true) @AllArgsConstructor @NoArgsConstructor @JsonName("CROP_GRID") @Schema(description = "A pipeline step that crops sub images out of a larger image, based on a grid. " + "The 4 corner coordinates are defined as points, and come from {@code Data.getListPoint(name)}, in the following order:" + "topLeft, topRight, bottomLeft, bottomRight<br>" + "gridX is the number of grid segments between (topLeft and topRight) and (bottomLeft and bottomRight).<br>" + "gridY is the number of grid segments between (topLeft and bottomLeft) and (topRight and bottomRight)<br>" + "The output contains a List<Image> of cropped images from the grid, in order: (row,col) = (0,0), (0, 1), ..., (0, C-1), ..., (R-1, C-1).") public class CropGridStep implements PipelineStep { public static final String DEFAULT_OUTPUT_NAME = "crops"; @Schema(description = "Name of the input image key from the previous step. If set to null, it will try to find any image in the incoming data instance.") private String imageName; @Schema(description = "Name of the List<Point> points specifying the corners, in order: topLeft, topRight, bottomLeft, bottomRight") private String pointsName; @Schema(description = "The number of grid segments between (topLeft and topRight) and (bottomLeft and bottomRight)") private int gridX; @Schema(description = "The number of grid segments between (topLeft and bottomLeft) and (topRight and bottomRight)") private int gridY; @Schema(description = "If true, the lists are in pixels coordinates, not from 0 to 1.") private boolean coordsArePixels; @Schema(description = "Name of the output bounding boxes key.") private String boundingBoxName; @Schema(description = "If true, the two lists are returned which contains the data of grid horizontal and verticle coordinates, respectively.") private boolean outputCoordinates; @Schema(description = "If true, other data key and values from the previous step are kept and passed on to the next step as well.") private boolean keepOtherFields; @Schema(description = "If set, the smaller dimensions will be increased to keep the aspect ratio correct (which may crop outside the image border).") private Double aspectRatio; @Schema(description = "Name of the key of all the cropped output images from this step.", defaultValue = DEFAULT_OUTPUT_NAME) private String outputName; }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/grid
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/grid/draw/DrawFixedGridStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.grid.draw; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.data.image.util.ColorConstants; import ai.konduit.serving.pipeline.api.data.Point; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import lombok.experimental.Tolerate; import java.util.Arrays; import java.util.List; /** * As per {@link DrawGridStep} but the x/y location values are hardcoded into the configuration, instead of coming * dynamically from the input Data instance * * @author Alex Black */ @Data @Accessors(fluent = true) @AllArgsConstructor @NoArgsConstructor @JsonName("DRAW_FIXED_GRID") @Schema(description = "A pipeline step that draws a grid on an image. This is similar to DrawGridStep but the corner x/y" + " location values are hardcoded into the configuration (via points), instead of coming dynamically from the input Data instance.") public class DrawFixedGridStep implements PipelineStep { public static final String DEFAULT_COLOR = "lime"; @Schema(description = "Name of the input image key from the previous step. If set to null, it will try to find any image in the incoming data instance.") private String imageName; @Schema(description = "A List<Point> (of length 4), the corners, in order: topLeft, topRight, bottomLeft, bottomRight") private List<Point> points; @Schema(description = "The number of grid segments between (topLeft and topRight) and (bottomLeft and bottomRight)") private int gridX; @Schema(description = "The number of grid segments between (topLeft and bottomLeft) and (topRight and bottomRight)") private int gridY; @Schema(description = "If true, the points are in pixels coordinates (0 to width-1) and (0 to height-1); if false, they " + "are 0.0 to 1.0 (fraction of image height/width)") private boolean coordsArePixels; @Schema(description = "Color of the border. " + ColorConstants.COLOR_DESCRIPTION) private String borderColor; @Schema(description = "Color of the grid. If not set, the border color will be used. " + ColorConstants.COLOR_DESCRIPTION) private String gridColor; @Schema(description = "Line thickness to use to draw the border (in pixels).", defaultValue = "1") private int borderThickness = 1; @Schema(description = "Line thickness to use to draw the border (in pixels). " + "If null then the same value as the borderThickness is used") private Integer gridThickness; @Tolerate public DrawFixedGridStep points(Point... points) { return this.points(Arrays.asList(points)); } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/grid
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/grid/draw/DrawGridStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.grid.draw; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.data.image.util.ColorConstants; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; /** * Draw a grid on the specified image, based on the x/y coordinates of the corners, and the number of segments within * the grid in both directions.<br> * The 4 corner coordinates are defined as points, and come from {@code Data.getListPoint(name)}, in the following order: * topLeft, topRight, bottomLeft, bottomRight<br> * gridX is the number of grid segments between (topLeft and topRight) and (bottomLeft and bottomRight).<br> * gridY is the number of grid segments between (topLeft and bottomLeft) and (topRight and bottomRight)<br> * The colors and line thicknesses can be configured. * <p> * See also {@link DrawFixedGridStep} * * @author Alex Black */ @Data @Accessors(fluent = true) @AllArgsConstructor @NoArgsConstructor @JsonName("DRAW_GRID") @Schema(description = "Draw a grid on the specified image, based on the x/y coordinates of the corners, and the number of segments within " + "the grid in both directions.<br>" + "The 4 corner coordinates are defined as points, and come from {@code Data.getListPoint(name)}, in the following order:" + "topLeft, topRight, bottomLeft, bottomRight<br>" + "gridX is the number of grid segments between (topLeft and topRight) and (bottomLeft and bottomRight).<br>" + "gridY is the number of grid segments between (topLeft and bottomLeft) and (topRight and bottomRight)<br>" + "The colors and line thicknesses can be configured.") public class DrawGridStep implements PipelineStep { public static final String DEFAULT_COLOR = "lime"; @Schema(description = "Name of the input image key from the previous step. If set to null, it will try to find any image in the incoming data instance.") private String imageName; @Schema(description = "Name of the List<Point> points specifying the corners, in order: topLeft, topRight, bottomLeft, bottomRight") private String pointsName; @Schema(description = "The number of grid segments between (topLeft and topRight) and (bottomLeft and bottomRight)") private int gridX; @Schema(description = "The number of grid segments between (topLeft and bottomLeft) and (topRight and bottomRight)") private int gridY; @Schema(description = "If true, the lists are in pixels coordinates, not from 0 to 1.") private boolean coordsArePixels; @Schema(description = "Color of the border. " + ColorConstants.COLOR_DESCRIPTION) private String borderColor; @Schema(description = "Color of the grid. " + ColorConstants.COLOR_DESCRIPTION) private String gridColor; @Schema(description = "Line thickness to use to draw the border (in pixels).", defaultValue = "1") private int borderThickness = 1; @Schema(description = "Line thickness to use to draw the border (in pixels). " + "If null then the same value as the borderThickness is used") private Integer gridThickness; }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/ndarray/ImageToNDArrayStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.ndarray; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.data.image.convert.ImageToNDArrayConfig; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import lombok.experimental.Tolerate; import org.nd4j.shade.jackson.annotation.JsonProperty; import java.util.Arrays; import java.util.List; /** * A PipelineStep for converting {@link ai.konduit.serving.pipeline.api.data.Image}s to {@link ai.konduit.serving.pipeline.api.data.NDArray}s. * The exact way that images are converted is highly configurable (formats, channels, output sizes, normalization, etc) - * see {@link ImageToNDArrayConfig} for more details<br> * <br> * The following configuration is available: * <ul> * <li><b>config</b>: the {@link ImageToNDArrayConfig} configuration for how conversion should be performed</li> * <li><b>keys</b>: may be null. If non-null: These are the names of images in the Data instance to convert</li> * <li><b>outputNames</b>: may be null. If non-null: the input images are renamed to this in the output Data instance after conversion to NDArray</li> * <li><b>keepOtherValues</b>: True by default. If true: copy all the other (non-converted/non-image) entries in the input Data to the output Data</li> * <li><b>metadata</b>: False by default. If true: include metadata about the images in the output Data - for example if/how it was cropped, * and the original input size.</li> * <li><b>metadataKey</b>: Sets the key that the metadata will be stored under. Default: {@link #DEFAULT_METADATA_KEY}. Not relevant if * metadata == false</li> * </ul> * <p> * Note that metadata will have the following format:<br> * If a single image is converted, the metadata Data instance will have a nested Data instance * i.e.: * <pre> * {@code * Data meta = myData.get( "@ImageToNDArrayStepMetadata"); * }}</pre> * * @author Alex Black */ @Data @Accessors(fluent = true) @NoArgsConstructor @JsonName("IMAGE_TO_NDARRAY") @Schema(description = "A PipelineStep for converting images to n-dimensional arrays. " + "The exact way that images are converted is highly configurable (formats, channels, output sizes, " + "normalization, etc).") public class ImageToNDArrayStep implements PipelineStep { public static final String DEFAULT_METADATA_KEY = "@ImageToNDArrayStepMetadata"; public static final String META_INNAME_KEY = "in_name"; public static final String META_OUTNAME_KEY = "out_name"; public static final String META_IMG_H = "image_height"; public static final String META_IMG_W = "image_width"; public static final String META_CROP_REGION = "crop_region"; @Schema(description = "Configuration for how conversion should be performed.") private ImageToNDArrayConfig config; @Schema(description = "May be null. If non-null, these are the names of images in the Data instance to convert.") private List<String> keys; @Schema(description = "May be null. If non-null, the input images are renamed to this in the output Data instance after conversion to n-dimensional array.") private List<String> outputNames; @Schema(description = "True by default. If true, copy all the other (non-converted/non-image) entries in the input data to the output data", defaultValue = "true") private boolean keepOtherValues = true; @Schema(description = "False by default. If true, include metadata about the images in the output data. For example, if/how it was cropped, " + "and the original input size.") private boolean metadata; @Schema(description = "Sets the key that the metadata will be stored under. Not relevant if metadata == false.", defaultValue = DEFAULT_METADATA_KEY) private String metadataKey = DEFAULT_METADATA_KEY; public ImageToNDArrayStep(@JsonProperty("config") ImageToNDArrayConfig config, @JsonProperty("keys") List<String> keys, @JsonProperty("outputNames") List<String> outputNames, @JsonProperty("keepOtherValues") boolean keepOtherValues, @JsonProperty("metadata") boolean metadata, @JsonProperty("metadataKey") String metadataKey) { this.config = config; this.keys = keys; this.outputNames = outputNames; this.keepOtherValues = keepOtherValues; this.metadata = metadata; this.metadataKey = metadataKey; } @Tolerate public ImageToNDArrayStep outputNames(String... outputNames) { return this.outputNames(Arrays.asList(outputNames)); } @Tolerate public ImageToNDArrayStep keys(String... keys) { return this.keys(Arrays.asList(keys)); } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/point
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/point/convert/RelativeToAbsoluteStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.point.convert; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.data.image.convert.ImageToNDArrayConfig; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AccessLevel; import lombok.Data; import lombok.Getter; import lombok.Setter; import lombok.experimental.Accessors; import lombok.experimental.Tolerate; import java.util.Arrays; import java.util.List; /** * For a given set of {@code Point}, {@code List<Point>}, {@code BoundingBox} or {@code List<BoundingBox>} that are * defined in relative terms (i.e., all values 0.0 to 1.0 in terms of "fraction of image height/width"), convert these * to absolute values (i.e., pixels) using:<br> * (a) An input image, as specified via the imageName configuration, OR<br> * (b) An image height, as specified by imageH and imageW configuration<br> * <br> * Note that an ImageToNDArrayConfig can be provided, to account for the fact that the original image may have been * cropped before being passed into a network that produced the Points or BoundingBoxes.<br> * If the imageToNDArrayConfig field is null, it is assumed no cropping has occurred. * * @author Alex Black */ @Schema(description = "For a given set of {@code Point}, {@code List<Point>}, {@code BoundingBox} or {@code List<BoundingBox>} that are " + "defined in relative terms (i.e., all values 0.0 to 1.0 in terms of \"fraction of image height/width\"), convert these " + "to absolute values (i.e., pixels) using:<br>" + "(a) An input image, as specified via the imageName configuration, OR<br>" + "(b) An image height, as specified by imageH and imageW configuration<br>" + "<br>" + "Note that an ImageToNDArrayConfig can be provided, to account for the fact that the original image may have been " + "cropped before being passed into a network that produced the Points or BoundingBoxes.<br>" + "If the imageToNDArrayConfig field is null, it is assumed no cropping has occurred.") @Data @Accessors(fluent = true) @JsonName("RELATIVE_TO_ABSOLUTE") public class RelativeToAbsoluteStep implements PipelineStep { @Schema(description = "Optional - the name of the field in the input Data containing the Image to use (to determine H/W)") protected String imageName; @Schema(description = "If imageName is not specified - height of the input image to use") protected Integer imageH; @Schema(description = "If imageName is not specified - width of the input image to use") protected Integer imageW; @Schema(description = "Optional - used to account for the fact that the image may have been cropped before being passed" + " into a network that produced the points/bounding box. This allows for them to be offset, so the boxes/coordinates" + " are specified in terms of the original image, not the cropped image") protected ImageToNDArrayConfig imageToNDArrayConfig; @Schema(description = "Optional - the name of the Point, List<Point>, BoundingBox or List<BoundingBox> fields to convert." + " If not set, the step will convert any/all fields of those types") protected List<String> toConvert; @Tolerate public RelativeToAbsoluteStep toConvert(String... toConvert){ return toConvert(Arrays.asList(toConvert)); } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/point
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/point/draw/DrawPointsStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.point.draw; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.data.image.convert.ImageToNDArrayConfig; import ai.konduit.serving.data.image.util.ColorConstants; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import lombok.experimental.Tolerate; import java.util.Arrays; import java.util.List; import java.util.Map; /** * Draw 2D points<br> * Configuration:<br> * <ul> * <li><b>classColors</b>: Optional: A list of colors to use for each class. Must be in one of the following formats: hex/HTML - "#788E87", * RGB - "rgb(128,0,255)", or one of 16 HTML color name such as \"green\" (https://en.wikipedia.org/wiki/Web_colors#HTML_color_names). * If no colors are specified, or not enough colors are specified, random colors are used instead (note consistent between runs). * Colors are mapped to named classes in alphabetical order, i.e. first color to class A, second color to class B, etc...</li> * <li><b>points</b>: Names of the points to be drawn. Accepts both single points and lists of points. * <li><b>radius</b>: Point radius on drawn image. </li> * <li><b>image</b>: Optional. Name of the image to be drawn on</li> * <li><b>width</b>: Must be provided when <b>image</b> isn't set. Used to resolve position of points with relative addressing (dimensions between 0 and 1)</li> * <li><b>height</b>: Must be provided when <b>image</b> isn't set. Used to resolve position of points with relative addressing (dimensions between 0 and 1)</li> * <li><b>outputName</b>: Name of the output image</li> * </ul> * * @author Paul Dubs */ @Data @Accessors(fluent = true) @AllArgsConstructor @NoArgsConstructor @JsonName("DRAW_POINTS") @Schema(description = "A pipeline step that configures how to draw 2D points on an image.") public class DrawPointsStep implements PipelineStep { public static final String DEFAULT_OUTPUT_NAME = "image"; public static final String DEFAULT_NO_POINT_COLOR = "lime"; @Schema(description = "Color belongs to non-label data", defaultValue = DEFAULT_NO_POINT_COLOR) private String noClassColor; @Schema(description = "This is an optional field which specifies the mapping of colors to use for each class. " + ColorConstants.COLOR_DESCRIPTION) private Map<String, String> classColors; @Schema(description = "Name of the input data fields containing the points to be drawn. Accepts both single points and lists of points. Accepts both relative and absolute addressed points.") private List<String> points; @Schema(description = "Optional. Point radius on drawn image. Default = 5px") private Integer radius; @Schema(description = "An optional field, specifying the name of the image to be drawn on") private String image; @Schema(description = "Must be provided when \"image\" isn't set. Used to resolve position of points with relative addressing (dimensions between 0 and 1)") private Integer width; @Schema(description = "Must be provided when \"image\" isn't set. Used to resolve position of points with relative addressing (dimensions between 0 and 1)") private Integer height; @Schema(description = "Used to account for the fact that n-dimensional array from ImageToNDArrayConfig may be " + "used to crop images before passing to the network, when the image aspect ratio doesn't match the NDArray " + "aspect ratio. This allows the step to determine the subset of the image actually passed to the network.") private ImageToNDArrayConfig imageToNDArrayConfig; @Schema(description = "Name of the output image", defaultValue = DEFAULT_OUTPUT_NAME) private String outputName; @Tolerate public DrawPointsStep points(String... points) { return this.points(Arrays.asList(points)); } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/point
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/point/heatmap/DrawHeatmapStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.point.heatmap; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.data.image.convert.ImageToNDArrayConfig; import ai.konduit.serving.data.image.step.segmentation.index.DrawSegmentationStep; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.Singular; import lombok.experimental.Accessors; import lombok.experimental.Tolerate; import java.util.Arrays; import java.util.List; /** * Draw a heatmap using 2D points.<br> * Heat will accumulate over the lifecycle of this step.<br> * Configuration:<br> * <ul> * <li><b>points</b>: Names of the points to be used for the heatmap. Accepts both single points and lists of points. * <li><b>radius</b>: Size of area influenced by a point.</li> * <li><b>fadingFactor</b>: Optional. Value between 0 and 1. 0: No Fade, 1: Instant fade; default: 0.9</li> * <li><b>image</b>: Optional. Name of the image to be drawn on</li> * <li><b>opacity</b>: Optional. Opacity of the heatmap. Between 0 and 1. 0: Fully transparent, 1: Fully opaque; default: 0.5</li> * <li><b>width</b>: Must be provided when <b>image</b> isn't set. Used to resolve position of points with relative addressing (dimensions between 0 and 1)</li> * <li><b>height</b>: Must be provided when <b>image</b> isn't set. Used to resolve position of points with relative addressing (dimensions between 0 and 1)</li> * <li><b>outputName</b>: Name of the output image</li> * </ul> * * @author Paul Dubs */ @Data @Accessors(fluent = true) @AllArgsConstructor @NoArgsConstructor @JsonName("DRAW_HEATMAP") @Schema(description = "A pipeline step that configures how to draw a 2D heatmap on an image.") public class DrawHeatmapStep implements PipelineStep { public static final String DEFAULT_OUTPUT_NAME = "image"; @Schema(description = "Name of the input data fields containing the points used for the heatmap. Accepts both single points and lists of points. Accepts both relative and absolute addressed points.") @Singular private List<String> points; @Schema(description = "Size of area influenced by a point") private Integer radius; @Schema(description = "Fading factor. 0: no fade, 1: instant fade") private Double fadingFactor; @Schema(description = "An optional field, specifying the name of the image to draw on") private String image; @Schema(description = "Opacity of the heatmap. Between 0 and 1. 0: Fully transparent, 1: Fully opaque. Default: 0.5") private Double opacity; @Schema(description = "Must be provided when \"image\" isn't set. Used to resolve position of points with relative addressing (dimensions between 0 and 1)") private Integer width; @Schema(description = "Must be provided when \"image\" isn't set. Used to resolve position of points with relative addressing (dimensions between 0 and 1)") private Integer height; @Schema(description = "Name of the output image", defaultValue = DEFAULT_OUTPUT_NAME) private String outputName; @Schema(description = "True by default. If true, copy all the other (non-converted/non-image) entries in the input data to the output data", defaultValue = "true") private boolean keepOtherValues = true; @Schema(description = "Used to account for the fact that n-dimensional array from ImageToNDArrayConfig may be " + "used to crop images before passing to the network, when the image aspect ratio doesn't match the NDArray " + "aspect ratio. This allows the step to determine the subset of the image actually passed to the network.") private ImageToNDArrayConfig imageToNDArrayConfig; @Tolerate public DrawHeatmapStep points(String... points) { return this.points(Arrays.asList(points)); } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/point/perspective
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/point/perspective/convert/PerspectiveTransformStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.point.perspective.convert; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.data.Point; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.*; import lombok.experimental.Accessors; import lombok.experimental.Tolerate; import java.util.Arrays; import java.util.List; /** * PerspectiveTransformStep: Applies a perspective transformation to Images, Points and Bounding Boxes.<br> * The transformation is defined either statically with the Step definition or dynamically with a list of 4 points in an * input. If both are defined, an IllegalStateException will be thrown.<br> * If only the source points are defined, the transformation will result in mapping those points to a rectangle. <br> * If you want to apply a more specific transformation, you can also provide the target points yourself.<br> * Note: supports both single values and lists. If the input is as single value, * the output will be a single value; if the input is a list, the output will be a list.<br> */ @Data @Accessors(fluent = true) @AllArgsConstructor @NoArgsConstructor @JsonName("PERSPECTIVE_TRANSFORM") public class PerspectiveTransformStep implements PipelineStep { /** * If null: just find any Points, Bounding Boxes and Images */ @Schema(description = "A list of names of the input fields to process. May be points, bounding boxes, images, or lists of these. If input names are not set, the fields of these types will be inferred automatically.") private List<String> inputNames; @Schema(description = "A list of names of the output field. If not set, the output has the same name as the input field.") private List<String> outputNames; /** * When you provide source points as an input, they must be provided as a list of 4 points [topLeft, topRight, bottomLeft, bottomRight] */ @Schema(description = "List of 4 source points [topLeft, topRight, bottomLeft, bottomRight]") private String sourcePointsName; /** * When you provide target points as an input, they must be provided as a list of 4 points [topLeft, topRight, bottomLeft, bottomRight] */ @Schema(description = "List of 4 target points [topLeft, topRight, bottomLeft, bottomRight]") private String targetPointsName; /** * When a referenceImage is provided, the transform will be adjusted to ensure the entire transformed image fits into the output image (up to 4096x4096)<br> * Can also reference a list of images, in which case <b>only the first</b> image is used as the reference.<br> * The adjustment is applied to all inputs of the step. */ @Schema(description = "When a referenceImage is provided, the transform will be adjusted to ensure the entire transformed image fits into the output image (up to 4096x4096)." + "Can also reference a list of images, in which case <b>only the first</b> image is used as the reference." + "The adjustment is applied to all inputs of the step.") private String referenceImage; /** * When a reference width and height are provided, the transform will be adjusted to make sure the entire area fits into the output image (up to 4096x4096)<br> * The adjustment is applied to all inputs of the step. */ @Schema(description = "When a reference width and height are provided, the transform will be adjusted to make sure the entire area fits into the output image (up to 4096x4096)<br> The adjustment is applied to all inputs of the step.") private Integer referenceWidth; /** * When a reference width and height are provided, the transform will be adjusted to make sure the entire area fits into the output image (up to 4096x4096)<br> * The adjustment is applied to all inputs of the step. */ @Schema(description = "When a reference width and height are provided, the transform will be adjusted to make sure the entire area fits into the output image (up to 4096x4096)<br> The adjustment is applied to all inputs of the step.") private Integer referenceHeight; /** * takes exactly 4 points [topLeft, topRight, bottomLeft, bottomRight] */ @Schema(description = "Takes exactly 4 source points [topLeft, topRight, bottomLeft, bottomRight]" ) private List<Point> sourcePoints; /** * takes exactly 4 points [topLeft, topRight, bottomLeft, bottomRight] */ @Schema(description = "Takes exactly 4 target points [topLeft, topRight, bottomLeft, bottomRight]" ) private List<Point> targetPoints; @Schema(description = "If true, other data key and values from the previous step are kept and passed on to the next step as well.", defaultValue = "true") private boolean keepOtherFields = true; @Tolerate public PerspectiveTransformStep inputNames(String... inputNames) { return this.inputNames(Arrays.asList(inputNames)); } @Tolerate public PerspectiveTransformStep outputNames(String... outputNames) { return this.outputNames(Arrays.asList(outputNames)); } @Tolerate public PerspectiveTransformStep sourcePoints(Point... sourcePoints) { return this.sourcePoints(Arrays.asList(sourcePoints)); } @Tolerate public PerspectiveTransformStep targetPoints(Point... targetPoints) { return this.targetPoints(Arrays.asList(targetPoints)); } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/resize/ImageResizeStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.resize; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.data.image.convert.config.AspectRatioHandling; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import lombok.experimental.Tolerate; import org.nd4j.shade.jackson.annotation.JsonProperty; import java.util.Arrays; import java.util.Collections; import java.util.List; /** * Resize an image - scaling up or down as needed to comply with the specified output height/width. * Usually, both {@code height} and {@code width} are specified. However, if only one is specified, the other value is * calculated based on the aspect ration of the input image. * When both {@code height} and {@code width} are specified, and the aspect ratios of the input doesn't match * the aspect ratio of the output, (for example, 100x200 in, 200x200 out) the {@code aspectRatioHandling} setting (see * {@link AspectRatioHandling}) is used to determine how to handle this situation.<br> * Note that the names of the inputs Data fields to resize may or may not be specified: * If no value is provided for {@code inputNames} configuration: All input {@code Image} and {@code List<Image>} fields * in the input Data instance will be resized, regardless of name.<br> * If {@code inputNames} is specified: Only those fields with those names will be resized.<br> * <br> * <br> * Example 1: Scaling to 300x300, center cropping if needed: {@code new ImageResizeStep().height(300).width(300).aspectRatioHandling(AspectRatioHandling.CENTER_CROP)}<br> * Example 2: Scaling to height of 256 (any output width, maintaining original aspect ratio): {@code new ImageResizeStep().height(256)}<br> * * @author Alex Black */ @Data @Accessors(fluent = true) @JsonName("IMAGE_RESIZE") @NoArgsConstructor @Schema(description = "A pipeline step that resizes an image, scaling up or down as needed to comply with the " + "specified output height/width. Usually, both <height> and <width> are specified. However, if only one is specified, " + "the other value is calculated based on the aspect ration of the input image. When both <height> and <width> are specified, " + "and the aspect ratios of the input doesn't match the aspect ratio of the output, (for example, 100x200 in, 200x200 out) " + "the <aspectRatioHandling> setting is used to determine how to handle this situation. Note that the names of the inputs data fields " + "to resize may or may not be specified. If no value is provided for <inputNames> configuration, all input images fields " + "in the input Data instance will be resized, regardless of name. If <inputNames> is specified, only those fields with those " + "names will be resized.") public class ImageResizeStep implements PipelineStep { @Schema(description = "Name of the input keys whose values contain images from the previous step.") protected List<String> inputNames; @Schema(description = "Resize height.") protected Integer height; @Schema(description = "Resize width.") protected Integer width; @Schema(description = "An enum to define how to handle the aspect ratio when the aspect ratio doesn't match with " + "that of the input image.", defaultValue = "STRETCH") protected AspectRatioHandling aspectRatioHandling = AspectRatioHandling.STRETCH; /** * Set the input name - i.e., the name of the {@code Image} or {@code List<Image>} field to be processed. * If any names have been previously set, they will be discarded when calling this method * * @param name Input {@code Image} or {@code List<Image>} name * @return This instance */ public ImageResizeStep inputName(String name) { this.inputNames = Collections.singletonList(name); return this; } public ImageResizeStep(@JsonProperty("inputNames") List<String> inputNames, @JsonProperty("height") Integer height, @JsonProperty("width") Integer width, @JsonProperty("aspectRatioHandling") AspectRatioHandling aspectRatioHandling) { this.inputNames = inputNames; this.height = height; this.width = width; this.aspectRatioHandling = aspectRatioHandling; } @Tolerate public ImageResizeStep inputNames(String... inputNames) { return this.inputNames(Arrays.asList(inputNames)); } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/segmentation
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/segmentation/index/DrawSegmentationStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.segmentation.index; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.data.image.convert.ImageToNDArrayConfig; import ai.konduit.serving.data.image.util.ColorConstants; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import lombok.experimental.Tolerate; import java.util.Arrays; import java.util.List; /** * Draw segmentation mask, optionally on an image.<br> * Configuration:<br> * <ul> * <li><b>classColors</b>: Optional: A list of colors to use for each class. Must be in one of the following formats: hex/HTML - "#788E87", * RGB - "rgb(128,0,255)", or one of 16 HTML color name such as \"green\" (https://en.wikipedia.org/wiki/Web_colors#HTML_color_names). * If no colors are specified, or not enough colors are specified, random colors are used instead (note consistent between runs)</li> * <li><b>segmentArray</b>: Name of the NDArray with the class indices, 0 to numClasses-1. Shape [1, height, width]</li> * <li><b>image</b>: Optional. Name of the image to draw the segmentation classes on to. If not provided, the segmentation classes are drawn * onto a black background image</li> * <li><b>outputName</b>: Name of the output image</li> * <li><b>opacity</b>: Optional. Only used when "image" configuration is set. The opacity, between 0.0 and 1.0, of the mask to draw on the image. * Default value of 0.5 if not set. Value of 0.0 is fully transparent, 1.0 is fully opaque.</li> * <li><b>backgroundClass</b>: Optional. If set: Don't draw this class. If not set: all classes will be drawn</li> * </ul> * * @author Alex Black */ @Data @Accessors(fluent = true) @AllArgsConstructor @NoArgsConstructor @JsonName("DRAW_SEGMENTATION") @Schema(description = "A pipeline step that configures how to draw a segmentation mask, optionally on an image.") public class DrawSegmentationStep implements PipelineStep { public static final String DEFAULT_OUTPUT_NAME = "image"; public static final double DEFAULT_OPACITY = 0.5; @Schema(description = "This is an optional field which specifies the list of colors to use for each class. " + ColorConstants.COLOR_DESCRIPTION) private List<String> classColors; @Schema(description = "Name of the NDArray with the class indices, 0 to numClasses-1. Shape [1, height, width].") private String segmentArray; @Schema(description = "An optional field, specifying the name of the image to draw the segmentation classes " + "on to. If not provided, the segmentation classes are drawn onto a black background image.") private String image; @Schema(description = "Name of the output image", defaultValue = DEFAULT_OUTPUT_NAME) private String outputName; @Schema(description = "An optional field, that is used only used when <image> key name is set. This specifies, the opacity, " + "between 0.0 and 1.0, of the mask to draw on the image. Default value of 0.5 is used if it's not set. " + "Value of 0.0 is fully transparent, 1.0 is fully opaque.", defaultValue = "0.5") private Double opacity; @Schema(description = "An optional field, specifying a class that's not to be drawn. If not set, all classes will be drawn") private Integer backgroundClass; @Schema(description = "Used to account for the fact that n-dimensional array from ImageToNDArrayConfig may be " + "used to crop images before passing to the network, when the image aspect ratio doesn't match the NDArray " + "aspect ratio. This allows the step to determine the subset of the image actually passed to the network that " + "produced the segmentation prediction to be drawn.") private ImageToNDArrayConfig imageToNDArrayConfig; @Tolerate public DrawSegmentationStep classColors(String... classColors) { return this.classColors(Arrays.asList(classColors)); } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/step/show/ShowImageStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.step.show; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import org.nd4j.shade.jackson.annotation.JsonProperty; @Data @Accessors(fluent = true) @NoArgsConstructor @JsonName("SHOW_IMAGE") @Schema(description = "A pipeline step that configures how to show/render an image from a previous step in an application " + "frame. Usually only used for testing and debugging locally, not when serving from HTTP/GRPC etc endpoints") public class ShowImageStep implements PipelineStep { @Schema(description = "Name of the incoming input image key.", defaultValue = "image") private String imageName = "image"; @Schema(description = "Image display name.", defaultValue = "Image") private String displayName = "Image"; @Schema(description = "Height of the displayed image frame. If null: same size as image is used") private Integer width; @Schema(description = "Width of the image. If null: same size as the image") private Integer height; @Schema(description = "Allow multiple images to be shown.") private boolean allowMultiple = false; public ShowImageStep(@JsonProperty("imageName") String imageName, @JsonProperty("displayName") String displayName, @JsonProperty("width") Integer width, @JsonProperty("height") Integer height, @JsonProperty("allowMultiple") boolean allowMultiple){ this.imageName = imageName; this.displayName = displayName; this.width = width; this.height = height; this.allowMultiple = allowMultiple; } }
0
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image
java-sources/ai/konduit/serving/konduit-serving-image-config/0.3.0/ai/konduit/serving/data/image/util/ColorConstants.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.image.util; public class ColorConstants { public static final String COLOR_DESCRIPTION = "The color can be a hex/HTML string like" + "\"#788E87\", an RGB value like RGB - \"rgb(128,0,255)\" or it can be from a set of predefined HTML color names: " + "[white, silver, gray, black, red, maroon, yellow, olive, lime, green, aqua, teal, blue, navy, fuchsia, purple]"; public static final String INVALID_COLOR = "Invalid color: Must be in one of the following formats: hex/HTML - #788E87, " + "RGB - rgb(128,0,255), or a HTML color name such as \"green\" (https://en.wikipedia.org/wiki/Web_colors#HTML_color_names) - got \"%s\""; }
0
java-sources/ai/konduit/serving/konduit-serving-kafka/0.3.0/ai/konduit/serving/vertx/protocols
java-sources/ai/konduit/serving/konduit-serving-kafka/0.3.0/ai/konduit/serving/vertx/protocols/kafka/KonduitServingKafkaJsonMapping.java
package ai.konduit.serving.vertx.protocols.kafka;import ai.konduit.serving.pipeline.api.serde.JsonSubType; import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping; import ai.konduit.serving.pipeline.api.serde.JsonSubType; import java.util.ArrayList; import java.util.List; //GENERATED CLASS DO NOT EDIT public class KonduitServingKafkaJsonMapping implements JsonSubTypesMapping { @Override public List<JsonSubType> getSubTypesMapping() { List<JsonSubType> l = new ArrayList<>(); return l; } }
0
java-sources/ai/konduit/serving/konduit-serving-kafka/0.3.0/ai/konduit/serving/vertx/protocols
java-sources/ai/konduit/serving/konduit-serving-kafka/0.3.0/ai/konduit/serving/vertx/protocols/kafka/VertxKafkaModuleInfo.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.vertx.protocols.kafka; import ai.konduit.serving.annotation.module.ModuleInfo; @ModuleInfo("konduit-serving-kafka") public class VertxKafkaModuleInfo { private VertxKafkaModuleInfo() {} }
0
java-sources/ai/konduit/serving/konduit-serving-kafka/0.3.0/ai/konduit/serving/vertx/protocols/kafka
java-sources/ai/konduit/serving/konduit-serving-kafka/0.3.0/ai/konduit/serving/vertx/protocols/kafka/api/InferenceKafkaApi.java
package ai.konduit.serving.vertx.protocols.kafka.api; public class InferenceKafkaApi { }
0
java-sources/ai/konduit/serving/konduit-serving-kafka/0.3.0/ai/konduit/serving/vertx/protocols/kafka
java-sources/ai/konduit/serving/konduit-serving-kafka/0.3.0/ai/konduit/serving/vertx/protocols/kafka/verticle/InferenceVerticleKafka.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.vertx.protocols.kafka.verticle; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.settings.constants.Constants; import ai.konduit.serving.pipeline.settings.constants.EnvironmentConstants; import ai.konduit.serving.vertx.config.KafkaConfiguration; import ai.konduit.serving.vertx.verticle.InferenceVerticle; import com.google.common.base.Strings; import io.vertx.core.AsyncResult; import io.vertx.core.Promise; import io.vertx.core.buffer.Buffer; import io.vertx.core.http.HttpServerOptions; import io.vertx.core.http.HttpVersion; import io.vertx.core.impl.ContextInternal; import io.vertx.core.json.JsonObject; import io.vertx.core.net.PemKeyCertOptions; import io.vertx.core.net.SelfSignedCertificate; import io.vertx.kafka.client.consumer.KafkaConsumer; import io.vertx.kafka.client.consumer.KafkaConsumerRecord; import io.vertx.kafka.client.producer.KafkaProducer; import io.vertx.kafka.client.producer.KafkaProducerRecord; import io.vertx.kafka.client.producer.RecordMetadata; import io.vertx.kafka.client.serialization.BufferSerializer; import io.vertx.kafka.client.serialization.JsonObjectSerializer; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.serialization.StringSerializer; import java.io.File; import java.sql.Date; import java.time.Instant; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import static ai.konduit.serving.pipeline.settings.KonduitSettings.*; @Slf4j public class InferenceVerticleKafka extends InferenceVerticle { @Override public void start(Promise<Void> startPromise) throws Exception { vertx.executeBlocking(handler -> { try { initialize(); handler.complete(); } catch (Exception exception) { handler.fail(exception); startPromise.fail(exception); } }, resultHandler -> { if (resultHandler.failed()) { if (resultHandler.cause() != null) startPromise.fail(resultHandler.cause()); else { startPromise.fail("Failed to start. Unknown cause."); } } else { int port; String portEnvValue = System.getenv(EnvironmentConstants.KONDUIT_SERVING_PORT); if (portEnvValue != null) { try { port = Integer.parseInt(portEnvValue); } catch (NumberFormatException exception) { log.error("Environment variable \"{}={}\" isn't a valid port number.", EnvironmentConstants.KONDUIT_SERVING_PORT, portEnvValue); startPromise.fail(exception); return; } } else { port = inferenceConfiguration.port(); } if (port < 0 || port > 0xFFFF) { startPromise.fail(new Exception("Valid port range is 0 <= port <= 65535. The given port was " + port)); return; } KafkaConfiguration kafkaConfiguration = inferenceConfiguration.kafkaConfiguration(); Map<String, String> configConsumer = new HashMap<>(); configConsumer.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, String.format("%s:%s", inferenceConfiguration.host(), port)); configConsumer.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, getKafkaConsumerKeyDeserializerClass(kafkaConfiguration != null ? kafkaConfiguration.consumerKeyDeserializerClass() : null)); configConsumer.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, getKafkaConsumerValueDeserializerClass(kafkaConfiguration != null ? kafkaConfiguration.consumerValueDeserializerClass() : null)); configConsumer.put(ConsumerConfig.GROUP_ID_CONFIG, getConsumerGroupId(kafkaConfiguration != null ? kafkaConfiguration.consumerGroupId() : null)); configConsumer.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, getConsumerAutoOffsetReset(kafkaConfiguration != null ? kafkaConfiguration.consumerAutoOffsetReset() : null)); configConsumer.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, getConsumerAutoCommit(kafkaConfiguration != null ? kafkaConfiguration.consumerAutoCommit() : null)); String producerValueSerializerClass = getKafkaProducerValueSerializerClass(kafkaConfiguration != null ? kafkaConfiguration.producerValueSerializerClass() : null); Map<String, String> configProducer = new HashMap<>(); configProducer.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, String.format("%s:%s", inferenceConfiguration.host(), port)); configProducer.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, getKafkaProducerKeySerializerClass(kafkaConfiguration != null ? kafkaConfiguration.producerKeySerializerClass() : null)); configProducer.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, producerValueSerializerClass); configProducer.put(ProducerConfig.ACKS_CONFIG, getProducerAcks(kafkaConfiguration != null ? kafkaConfiguration.producerAcks() : null)); KafkaConsumer consumer = KafkaConsumer.create(vertx, configConsumer); KafkaProducer producer = KafkaProducer.create(vertx, configProducer); consumer.handler( recordIn -> { KafkaConsumerRecord castedRecordIn = (KafkaConsumerRecord) recordIn; Object input = castedRecordIn.value(); log.debug("Processing input from topic: {} at {}. " + "Headers={}, Key={}, " + "Value={}, Partition={}, " + "Offset={}", castedRecordIn.topic(), Date.from(Instant.ofEpochMilli(castedRecordIn.timestamp())), castedRecordIn.headers(), castedRecordIn.key(), input, castedRecordIn.partition(), castedRecordIn.offset()); Data output; if(input instanceof Buffer) { output = pipelineExecutor.exec(Data.fromBytes(((Buffer) input).getBytes())); } else if(input instanceof JsonObject) { output = pipelineExecutor.exec(Data.fromJson(((JsonObject) input).encode())); } else if(input instanceof String) { output = pipelineExecutor.exec(Data.fromJson((String) input)); } else { throw new IllegalStateException("No conversion format exist for input value class type: " + input.getClass().getCanonicalName()); } String producerTopicName = getProducerTopicName(kafkaConfiguration != null ? kafkaConfiguration.producerTopicName() : null); KafkaProducerRecord recordOut; if(producerValueSerializerClass.equals(BufferSerializer.class.getCanonicalName())) { recordOut = KafkaProducerRecord.create(producerTopicName, Buffer.buffer(output.asBytes())); } else if(producerValueSerializerClass.equals(JsonObjectSerializer.class.getCanonicalName())) { recordOut = KafkaProducerRecord.create(producerTopicName, new JsonObject(output.toJson())); } else if(producerValueSerializerClass.equals(StringSerializer.class.getCanonicalName())) { recordOut = KafkaProducerRecord.create(producerTopicName, output.toJson()); } else { throw new IllegalStateException("No conversion format exist for output value class type: " + producerValueSerializerClass); } producer.send(recordOut, recordOutHandler -> { AsyncResult<RecordMetadata> castedRecordOutHandler = (AsyncResult<RecordMetadata>) recordOutHandler; if (castedRecordOutHandler.succeeded()) { log.debug("Sent output to topic: {} at {}. " + "Headers={}, Key={}, " + "Value={}, Partition={}, " + "Offset={}", recordOut.topic(), Date.from(Instant.ofEpochMilli(recordOut.timestamp())), recordOut.headers(), recordOut.key(), recordOut.value(), recordOut.partition(), castedRecordOutHandler.result().getOffset()); } else { log.error("Failed to send output to topic: {} at {}. " + "Headers={}, Key={}, " + "Value={}, Partition={}", recordOut.topic(), Date.from(Instant.ofEpochMilli(recordOut.timestamp())), recordOut.headers(), recordOut.key(), recordOut.value(), recordOut.partition(), castedRecordOutHandler.cause()); } }); } ); String consumerTopicName = getConsumerTopicName(kafkaConfiguration != null ? kafkaConfiguration.consumerTopicName() : null); consumer.subscribe(consumerTopicName, subscribeHandler -> { AsyncResult<Void> castedSubscribeHandler = (AsyncResult<Void>) subscribeHandler; if (castedSubscribeHandler.succeeded()) { log.info("Subscribed to topic: {}", consumerTopicName); if(getStartHttpServerForKafka(kafkaConfiguration != null ? kafkaConfiguration.startHttpServerForKafka() : Constants.DEFAULT_START_HTTP_SERVER_FOR_KAFKA)) { String httpHost = getHttpKafkaHost(kafkaConfiguration != null ? kafkaConfiguration.httpKafkaHost() : Constants.DEFAULT_HTTP_KAFKA_HOST); int httpPort = getHttpKafkaPort(kafkaConfiguration != null ? kafkaConfiguration. httpKafkaPort(): Constants.DEFAULT_HTTP_KAFKA_PORT); log.info("Starting HTTP server for kafka on host {} and port {}", httpHost, httpPort); HttpServerOptions httpServerOptions = new HttpServerOptions() .setPort(httpPort) .setHost(httpHost) .setSsl(false) .setSslHandshakeTimeout(0) .setCompressionSupported(true) .setTcpKeepAlive(true) .setTcpNoDelay(true) .setAlpnVersions(Arrays.asList(HttpVersion.HTTP_1_0,HttpVersion.HTTP_1_1)) .setUseAlpn(false); boolean useSsl = inferenceConfiguration.useSsl(); String sslKeyPath = inferenceConfiguration.sslKeyPath(); String sslCertificatePath = inferenceConfiguration.sslCertificatePath(); if (useSsl) { if (Strings.isNullOrEmpty(sslKeyPath) || Strings.isNullOrEmpty(sslCertificatePath)) { if (Strings.isNullOrEmpty(sslKeyPath)) { log.warn("No pem key file specified for SSL."); } if (Strings.isNullOrEmpty(sslCertificatePath)) { log.warn("No pem certificate file specified for SSL."); } log.info("Using an auto generated self signed pem key and certificate with SSL."); httpServerOptions.setKeyCertOptions(SelfSignedCertificate.create().keyCertOptions()); } else { sslKeyPath = new File(sslKeyPath).getAbsolutePath(); sslCertificatePath = new File(sslCertificatePath).getAbsolutePath(); log.info("Using SSL with PEM Key: {} and certificate {}.", sslKeyPath, sslCertificatePath); httpServerOptions.setPemKeyCertOptions(new PemKeyCertOptions().setKeyPath(sslKeyPath).setCertPath(sslCertificatePath)); } } vertx.createHttpServer(httpServerOptions) .requestHandler(httpHandler -> { if (httpHandler.path().equals("/health")) { httpHandler.response().end("Kafka server running"); } else { httpHandler.response().setStatusCode(404).end("Route not implemented"); } }) .exceptionHandler(throwable -> log.error("Error occurred during http request.", throwable)) .listen(httpPort, httpHost, handler -> { if (handler.failed()) { startPromise.fail(handler.cause()); } else { int actualPort = handler.result().actualPort(); if(inferenceConfiguration.kafkaConfiguration() == null) { inferenceConfiguration.kafkaConfiguration(new KafkaConfiguration()); } inferenceConfiguration.kafkaConfiguration().httpKafkaPort(actualPort); try { ((ContextInternal) context).getDeployment() .deploymentOptions() .setConfig(new JsonObject(inferenceConfiguration.toJson())); long pid = getPid(); saveInspectionDataIfRequired(pid); log.info("HTTP server for kafka is listening on host: '{}'", inferenceConfiguration.host()); log.info("HTTP server for kafka started on port {}", actualPort); startPromise.complete(); } catch (Throwable throwable) { startPromise.fail(throwable); } } }); } else { long pid = getPid(); saveInspectionDataIfRequired(pid); startPromise.complete(); } } else { log.error("Could not subscribe to topic: {}", consumerTopicName, castedSubscribeHandler.cause()); startPromise.fail(castedSubscribeHandler.cause()); } }); } }); } }
0
java-sources/ai/konduit/serving/konduit-serving-meta/0.3.0/ai/konduit/serving
java-sources/ai/konduit/serving/konduit-serving-meta/0.3.0/ai/konduit/serving/meta/StaticConfigGenerator.java
/* * ****************************************************************************** * * * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * See the NOTICE file distributed with this work for additional * * information regarding copyright ownership. * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.meta; import ai.konduit.serving.util.ObjectMappers; import org.apache.commons.io.FileUtils; import org.nd4j.common.io.ClassPathResource; import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Map; import java.util.stream.Collectors; public class StaticConfigGenerator { public static void main(String[] args) throws IOException { String outputPath = args[0]; if (outputPath == null || outputPath.isEmpty()) { System.err.println("'outputPath' is undefined or empty. Usage: <mainClass> <outputPath>"); System.exit(1); } else { File jsonMappingResource = new ClassPathResource("META-INF/konduit-serving/JsonNameMapping").getFile(); Map<String, String> outputConfigMap = Arrays.stream(FileUtils.readFileToString(jsonMappingResource, StandardCharsets.UTF_8).split("\n")) .map(line -> line.split(",")) .filter(splits -> splits[2].equals("ai.konduit.serving.pipeline.api.step.PipelineStep")) .collect(Collectors.toMap(splits -> splits[0], splits -> { try { return ObjectMappers.toJson(Class.forName(splits[1]).getConstructor().newInstance()); } catch (Exception exception) { System.err.format("Unable to create config for: %s%n%s%n", splits[1], exception); System.exit(1); return null; } })); FileUtils.writeStringToFile(new File(outputPath), ObjectMappers.toJson(outputConfigMap), StandardCharsets.UTF_8); } } }
0
java-sources/ai/konduit/serving/konduit-serving-mqtt/0.3.0/ai/konduit/serving/vertx/protocols
java-sources/ai/konduit/serving/konduit-serving-mqtt/0.3.0/ai/konduit/serving/vertx/protocols/mqtt/KonduitServingMqttJsonMapping.java
package ai.konduit.serving.vertx.protocols.mqtt;import ai.konduit.serving.pipeline.api.serde.JsonSubType; import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping; import ai.konduit.serving.pipeline.api.serde.JsonSubType; import java.util.ArrayList; import java.util.List; //GENERATED CLASS DO NOT EDIT public class KonduitServingMqttJsonMapping implements JsonSubTypesMapping { @Override public List<JsonSubType> getSubTypesMapping() { List<JsonSubType> l = new ArrayList<>(); return l; } }
0
java-sources/ai/konduit/serving/konduit-serving-mqtt/0.3.0/ai/konduit/serving/vertx/protocols
java-sources/ai/konduit/serving/konduit-serving-mqtt/0.3.0/ai/konduit/serving/vertx/protocols/mqtt/VertxMqttModuleInfo.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.vertx.protocols.mqtt; import ai.konduit.serving.annotation.module.ModuleInfo; @ModuleInfo("konduit-serving-mqtt") public class VertxMqttModuleInfo { private VertxMqttModuleInfo(){ } }
0
java-sources/ai/konduit/serving/konduit-serving-mqtt/0.3.0/ai/konduit/serving/vertx/protocols/mqtt
java-sources/ai/konduit/serving/konduit-serving-mqtt/0.3.0/ai/konduit/serving/vertx/protocols/mqtt/verticle/InferenceVerticleMqtt.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.vertx.protocols.mqtt.verticle; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.settings.constants.EnvironmentConstants; import ai.konduit.serving.vertx.verticle.InferenceVerticle; import com.google.common.base.Strings; import io.netty.handler.codec.mqtt.MqttQoS; import io.vertx.core.Promise; import io.vertx.core.buffer.Buffer; import io.vertx.core.impl.ContextInternal; import io.vertx.core.json.JsonObject; import io.vertx.core.net.PemKeyCertOptions; import io.vertx.core.net.SelfSignedCertificate; import io.vertx.mqtt.MqttServer; import io.vertx.mqtt.MqttServerOptions; import lombok.extern.slf4j.Slf4j; import java.io.File; import java.nio.charset.StandardCharsets; import java.util.stream.Collectors; @Slf4j public class InferenceVerticleMqtt extends InferenceVerticle { @Override public void start(Promise<Void> startPromise) { vertx.executeBlocking(handler -> { try { initialize(); handler.complete(); } catch (Exception exception) { handler.fail(exception); startPromise.fail(exception); } }, resultHandler -> { if (resultHandler.failed()) { if (resultHandler.cause() != null) startPromise.fail(resultHandler.cause()); else { startPromise.fail("Failed to start. Unknown cause."); } } else { int port; String portEnvValue = System.getenv(EnvironmentConstants.KONDUIT_SERVING_PORT); if (portEnvValue != null) { try { port = Integer.parseInt(portEnvValue); } catch (NumberFormatException exception) { log.error("Environment variable \"{}={}\" isn't a valid port number.", EnvironmentConstants.KONDUIT_SERVING_PORT, portEnvValue); startPromise.fail(exception); return; } } else { port = inferenceConfiguration.port(); } if (port < 0 || port > 0xFFFF) { startPromise.fail(new Exception("Valid port range is 0 <= port <= 65535. The given port was " + port)); return; } MqttServerOptions mqttServerOptions = new MqttServerOptions() .setHost(inferenceConfiguration.host()) .setPort(port); boolean useSsl = inferenceConfiguration.useSsl(); String sslKeyPath = inferenceConfiguration.sslKeyPath(); String sslCertificatePath = inferenceConfiguration.sslCertificatePath(); if (useSsl) { if (Strings.isNullOrEmpty(sslKeyPath) || Strings.isNullOrEmpty(sslCertificatePath)) { if (Strings.isNullOrEmpty(sslKeyPath)) { log.warn("No pem key file specified for SSL."); } if (Strings.isNullOrEmpty(sslCertificatePath)) { log.warn("No pem certificate file specified for SSL."); } log.info("Using an auto generated self signed pem key and certificate with SSL."); mqttServerOptions.setKeyCertOptions(SelfSignedCertificate.create().keyCertOptions()); } else { sslKeyPath = new File(sslKeyPath).getAbsolutePath(); sslCertificatePath = new File(sslCertificatePath).getAbsolutePath(); log.info("Using SSL with PEM Key: {} and certificate {}.", sslKeyPath, sslCertificatePath); mqttServerOptions.setPemKeyCertOptions(new PemKeyCertOptions().setKeyPath(sslKeyPath).setCertPath(sslCertificatePath)); } } MqttServer mqttServer = MqttServer.create(vertx, mqttServerOptions); mqttServer.endpointHandler( endpoint -> { log.info("MQTT client [{}] request to connect, clean session = {}", endpoint.clientIdentifier() , endpoint.isCleanSession()); if (endpoint.auth() != null) { log.info("[username = {}, password = {}]", endpoint.auth().getUsername(), endpoint.auth().getPassword()); } if (endpoint.will() != null) { log.info("[will topic = {} msg = {} QoS = {} isRetain = {}]", endpoint.will().getWillTopic(), endpoint.will().getWillMessageBytes(), endpoint.will().getWillQos(), endpoint.will().isWillRetain()); } log.info("[keep alive timeout = {}]", endpoint.keepAliveTimeSeconds()); endpoint.accept(false) .disconnectHandler(v -> log.info("Received disconnect from client")) .subscribeHandler(handler -> endpoint.subscribeAcknowledge(handler.messageId(), handler.topicSubscriptions().stream().map(subscription -> { log.info("Subscription for {} with QoS {}", subscription.topicName(), subscription.qualityOfService()); return subscription.qualityOfService(); }).collect(Collectors.toList()))) .unsubscribeHandler(handler -> { handler.topics().forEach(topic -> log.info("Unsubscription for {}", topic)); endpoint.unsubscribeAcknowledge(handler.messageId()); }) .publishHandler(message -> { String topicName = message.topicName(); String messageString = message.payload().toString(StandardCharsets.UTF_8); log.info("Just received message [{}] with QoS [{}}] in topic [{}]", messageString, message.qosLevel(), topicName); if (message.qosLevel() == MqttQoS.AT_LEAST_ONCE) { endpoint.publishAcknowledge(message.messageId()); } else if (message.qosLevel() == MqttQoS.EXACTLY_ONCE) { endpoint.publishReceived(message.messageId()); } try { String output = pipelineExecutor.exec(Data.fromJson(messageString)).toJson(); String outputTopic = topicName + "-out"; log.debug("Publishing message: {} to topic: {}", output, outputTopic); endpoint.publish(outputTopic, Buffer.buffer(output), MqttQoS.EXACTLY_ONCE, false, false); log.debug("Message published to topic: {}", outputTopic); } catch (Throwable throwable) { log.error("Unable to publish data due to the following error", throwable); endpoint.publish(topicName + "-out", Buffer.buffer(new JsonObject() .put("errorMessage", throwable.getMessage()) .encodePrettily()), MqttQoS.EXACTLY_ONCE, false, false); } }) .publishReleaseHandler(endpoint::publishComplete) .publishAcknowledgeHandler(messageId -> log.info("Received ack for message = {}", messageId)) .publishReceivedHandler(endpoint::publishRelease) .publishCompletionHandler(messageId -> log.info("Received ack for message = {}", messageId)) .pingHandler(v -> log.info("Ping received from client")); }) .listen(handler -> { if (handler.failed()) { startPromise.fail(handler.cause()); } else { int actualPort = handler.result().actualPort(); inferenceConfiguration.port(actualPort); try { ((ContextInternal) context).getDeployment() .deploymentOptions() .setConfig(new JsonObject(inferenceConfiguration.toJson())); long pid = getPid(); saveInspectionDataIfRequired(pid); log.info("MQTT server listening on host: '{}'", inferenceConfiguration.host()); log.info("MQTT server started on port {}", actualPort); startPromise.complete(); } catch (Throwable throwable) { startPromise.fail(throwable); } } }); } }); } }
0
java-sources/ai/konduit/serving/konduit-serving-nd4j/0.3.0/ai/konduit/serving/data
java-sources/ai/konduit/serving/konduit-serving-nd4j/0.3.0/ai/konduit/serving/data/nd4j/KonduitServingNd4jJsonMapping.java
package ai.konduit.serving.data.nd4j;import ai.konduit.serving.pipeline.api.serde.JsonSubType; import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping; import ai.konduit.serving.pipeline.api.serde.JsonSubType; import java.util.ArrayList; import java.util.List; //GENERATED CLASS DO NOT EDIT public class KonduitServingNd4jJsonMapping implements JsonSubTypesMapping { @Override public List<JsonSubType> getSubTypesMapping() { List<JsonSubType> l = new ArrayList<>(); return l; } }
0
java-sources/ai/konduit/serving/konduit-serving-nd4j/0.3.0/ai/konduit/serving/data
java-sources/ai/konduit/serving/konduit-serving-nd4j/0.3.0/ai/konduit/serving/data/nd4j/Nd4jModuleInfo.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.nd4j; import ai.konduit.serving.annotation.module.*; @ModuleInfo("konduit-serving-nd4j") @RequiresDependenciesAny({ //Requires ND4J native + one of the classifiers @Requires(requires = Req.ALL, value = {@Dependency(gId = "org.nd4j", aId = "nd4j-native", ver = "1.0.0-SNAPSHOT"), @Dependency(gId = "org.nd4j", aId = "nd4j-native", ver = "1.0.0-SNAPSHOT", classifier = { "linux-x86_64", "linux-x86_64-avx2", "linux-x86_64-avx512", "linux-ppc64le", "linux-arm64", "linux-armhf", "windows-x86_64", "windows-x86_64-avx2", "macosx-x86_64", "macosx-x86_64-avx2"}, cReq = Req.ANY) //TODO 2020/05/29 AB - Not including android classifiers here as we don't yet support that for KS }), //OR it requires CUDA 10.0, 10.1 or 10.2 + one of the classifiers @Requires(requires = Req.ALL, value = {@Dependency(gId = "org.nd4j", aId = "nd4j-cuda-10.0", ver = "1.0.0-SNAPSHOT"), @Dependency(gId = "org.nd4j", aId = "nd4j-cuda-10.0", ver = "1.0.0-SNAPSHOT", classifier = {"linux-x86_64", "linux-ppc64le", "linux-arm64","windows-x86_64"}), @Dependency(gId = "org.bytedeco", aId = "cuda", ver = "10.2-7.6-1.5.3", classifier = {"linux-x86_64", "linux-ppc64le", "windows-x86_64"}), @Dependency(gId = "org.bytedeco", aId = "cuda", ver = "10.0-7.4-1.5", classifier = {"linux-x86_64", "linux-ppc64le", "windows-x86_64"})}), @Requires(requires = Req.ALL, value = {@Dependency(gId = "org.nd4j", aId = "nd4j-cuda-10.1", ver = "1.0.0-SNAPSHOT"), @Dependency(gId = "org.nd4j", aId = "nd4j-cuda-10.1", ver = "1.0.0-SNAPSHOT", classifier = {"linux-x86_64", "linux-ppc64le", /*"linux-arm64",*/ "windows-x86_64"}), //Note 1.0.0-SNAPSHOT was only released for linux-arm64 for CUDA 10.0 @Dependency(gId = "org.bytedeco", aId = "cuda", ver = "10.1-7.6-1.5.2", classifier = {"linux-x86_64", "linux-ppc64le", "windows-x86_64"})}), @Requires(requires = Req.ALL, value = {@Dependency(gId = "org.nd4j", aId = "nd4j-cuda-10.2", ver = "1.0.0-SNAPSHOT"), @Dependency(gId = "org.nd4j", aId = "nd4j-cuda-10.2", ver = "1.0.0-SNAPSHOT", classifier = {"linux-x86_64", "linux-ppc64le", /*"linux-arm64",*/ "windows-x86_64"}), @Dependency(gId = "org.bytedeco", aId = "cuda", ver = "10.2-7.6-1.5.3", classifier = {"linux-x86_64", "linux-ppc64le", "windows-x86_64"})}) }) public class Nd4jModuleInfo { private Nd4jModuleInfo(){ } }
0
java-sources/ai/konduit/serving/konduit-serving-nd4j/0.3.0/ai/konduit/serving/data/nd4j
java-sources/ai/konduit/serving/konduit-serving-nd4j/0.3.0/ai/konduit/serving/data/nd4j/data/ND4JNDArray.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.nd4j.data; import ai.konduit.serving.data.nd4j.util.ND4JUtil; import ai.konduit.serving.pipeline.api.data.NDArrayType; import ai.konduit.serving.pipeline.impl.data.ndarray.BaseNDArray; import org.nd4j.linalg.api.ndarray.INDArray; public class ND4JNDArray extends BaseNDArray<INDArray> { public ND4JNDArray(INDArray array) { super(array); } @Override public NDArrayType type() { return ND4JUtil.typeNd4jToNDArrayType(array.dataType()); } @Override public long[] shape() { return array.shape(); } @Override public long size(int dimension) { return array.size(dimension); } @Override public int rank() { return array.rank(); } }
0
java-sources/ai/konduit/serving/konduit-serving-nd4j/0.3.0/ai/konduit/serving/data/nd4j
java-sources/ai/konduit/serving/konduit-serving-nd4j/0.3.0/ai/konduit/serving/data/nd4j/format/ND4JConverters.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.nd4j.format; import ai.konduit.serving.data.nd4j.util.ND4JUtil; import ai.konduit.serving.pipeline.api.data.NDArray; import ai.konduit.serving.pipeline.api.data.NDArrayType; import ai.konduit.serving.pipeline.api.format.NDArrayConverter; import ai.konduit.serving.pipeline.api.format.NDArrayFormat; import ai.konduit.serving.pipeline.impl.data.ndarray.SerializedNDArray; import lombok.AllArgsConstructor; import org.nd4j.common.base.Preconditions; import org.nd4j.common.util.ArrayUtil; import org.nd4j.linalg.api.buffer.DataBuffer; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.shape.Shape; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.indexing.NDArrayIndex; import java.nio.ByteBuffer; public class ND4JConverters { @AllArgsConstructor public static abstract class BaseFromNd4jArrConverter<T> implements NDArrayConverter { private final Class<T> cTo; @Override public boolean canConvert(NDArray from, NDArrayFormat to) { return canConvert(from, to.formatType()); } @Override public boolean canConvert(NDArray from, Class<?> to) { boolean ret = INDArray.class.isAssignableFrom(from.get().getClass()) && to.isAssignableFrom(cTo); return ret; } @Override public <T> T convert(NDArray from, NDArrayFormat<T> to) { throw new UnsupportedOperationException("Not yet implemneted"); } @Override public <U> U convert(NDArray from, Class<U> to) { INDArray arr = (INDArray) from.get(); return (U)convert(arr); } public abstract T convert(INDArray from); } public static class ArrToFloat1Converter extends BaseFromNd4jArrConverter<float[]> { public ArrToFloat1Converter() { super(float[].class); } @Override public float[] convert(INDArray from) { return from.toFloatVector(); } } public static class ArrToFloat2Converter extends BaseFromNd4jArrConverter<float[][]> { public ArrToFloat2Converter() { super(float[][].class); } @Override public float[][] convert(INDArray from) { return from.toFloatMatrix(); } } public static class ArrToFloat3Converter extends BaseFromNd4jArrConverter<float[][][]> { public ArrToFloat3Converter() { super(float[][][].class); } @Override public float[][][] convert(INDArray from) { Preconditions.checkState(from.rank() == 3, "Can only convert rank 3 arrays to float[][][], got array with shape %s", from.shape()); float[][][] out = new float[(int)from.size(0)][0][0]; for( int i = 0; i < out.length; i++) { out[i] = from.get(NDArrayIndex.point(i), NDArrayIndex.all(), NDArrayIndex.all()).toFloatMatrix(); } return out; } } public static class ArrToFloat4Converter extends BaseFromNd4jArrConverter<float[][][][]> { public ArrToFloat4Converter() { super(float[][][][].class); } @Override public float[][][][] convert(INDArray from) { Preconditions.checkState(from.rank() == 4, "Can only convert rank 4 arrays to float[][][][], got array with shape %s", from.shape()); float[][][][] out = new float[(int)from.size(0)][(int)from.size(1)][0][0]; for( int i=0; i<out.length; i++){ for( int j = 0; j < out[0].length; j++) { out[i][j] = from.get(NDArrayIndex.point(i), NDArrayIndex.point(j), NDArrayIndex.all(), NDArrayIndex.all()).toFloatMatrix(); } } return out; } } public static class ArrToFloat5Converter extends BaseFromNd4jArrConverter<float[][][][][]> { public ArrToFloat5Converter() { super(float[][][][][].class); } @Override public float[][][][][] convert(INDArray from) { Preconditions.checkState(from.rank() == 5, "Can only convert rank 5 arrays to float[][][][][], got array with shape %s", from.shape()); float[][][][][] out = new float[(int)from.size(0)][(int)from.size(1)][0][0][0]; for( int i = 0; i < out.length; i++) { for( int j = 0; j < out[0].length; j++) { for(int k = 0; k < out[2].length; k++) out[i][j][k] = from.get(NDArrayIndex.point(i), NDArrayIndex.point(j), NDArrayIndex.point(k), NDArrayIndex.all()).toFloatMatrix(); } } return out; } } @AllArgsConstructor public static abstract class BaseToNd4jArrConverter<T> implements NDArrayConverter { private final Class<T> clazz; @Override public boolean canConvert(NDArray from, NDArrayFormat to) { return clazz.isAssignableFrom(from.get().getClass()); //Basically: return from.get() instanceof clazz } @Override public boolean canConvert(NDArray from, Class<?> to) { return clazz.isAssignableFrom(from.get().getClass()) && INDArray.class.isAssignableFrom(to); } @Override public <U> U convert(NDArray from, Class<U> to) { Preconditions.checkState(canConvert(from, to), "Unable to convert NDArray to %s", to); T t = (T) from.get(); INDArray out = convert(t); return (U)out; } @Override public <U> U convert(NDArray from, NDArrayFormat<U> to) { Preconditions.checkState(canConvert(from, to), "Unable to convert to format: %s", to); T t = (T) from.get(); INDArray arr = convert(t); return (U)arr; } public abstract INDArray convert(T from); } public static class Float1ToArrConverter extends BaseToNd4jArrConverter<float[]> { public Float1ToArrConverter() { super(float[].class); } @Override public INDArray convert(float[] from) { return Nd4j.createFromArray(from); } } public static class Float2ToArrConverter extends BaseToNd4jArrConverter<float[][]> { public Float2ToArrConverter() { super(float[][].class); } @Override public INDArray convert(float[][] from) { return Nd4j.createFromArray(from); } } public static class Float3ToArrConverter extends BaseToNd4jArrConverter<float[][][]> { public Float3ToArrConverter() { super(float[][][].class); } @Override public INDArray convert(float[][][] from) { return Nd4j.createFromArray(from); } } public static class Float4ToArrConverter extends BaseToNd4jArrConverter<float[][][][]> { public Float4ToArrConverter() { super(float[][][][].class); } @Override public INDArray convert(float[][][][] from) { return Nd4j.createFromArray(from); } } public static class Float5ToArrConverter extends BaseToNd4jArrConverter<float[][][][][]> { public Float5ToArrConverter() { super(float[][][][][].class); } @Override public INDArray convert(float[][][][][] array) { Preconditions.checkNotNull(array, "Cannot create INDArray from null Java array"); ArrayUtil.assertNotRagged(array); if(array.length == 0 || array[0].length == 0 || array[0][0].length == 0 || array[0][0][0].length == 0) return Nd4j.empty(DataType.FLOAT); long[] shape = new long[]{array.length, array[0].length, array[0][0].length, array[0][0][0].length}; return Nd4j.create(flatten(array), shape, ArrayUtil.calcStrides(shape), 'c', DataType.FLOAT); } } public static float[] flatten(float[][][][][] arr) { float[] ret = new float[arr.length * arr[0].length * arr[0][0].length * arr[0][0][0].length * arr[0][0][0][0].length]; int count = 0; for(int i = 0; i < arr.length; ++i) { for(int j = 0; j < arr[0].length; ++j) { for(int k = 0; k < arr[0][0].length; ++k) { System.arraycopy(arr[i][j][k], 0, ret, count, arr[0][0][0].length); count += arr[0][0][0].length; } } } return ret; } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////// @AllArgsConstructor public static class SerializedToNd4jArrConverter implements NDArrayConverter { @Override public boolean canConvert(NDArray from, NDArrayFormat to) { return canConvert(from, to.formatType()); } @Override public boolean canConvert(NDArray from, Class<?> to) { return SerializedNDArray.class.isAssignableFrom(from.get().getClass()) && INDArray.class.isAssignableFrom(to); } @Override public <U> U convert(NDArray from, Class<U> to) { Preconditions.checkState(canConvert(from, to), "Unable to convert NDArray to %s", to); SerializedNDArray t = (SerializedNDArray) from.get(); INDArray out = convert(t); return (U)out; } @Override public <U> U convert(NDArray from, NDArrayFormat<U> to) { Preconditions.checkState(canConvert(from, to), "Unable to convert to format: %s", to); SerializedNDArray f = (SerializedNDArray) from.get(); INDArray arr = convert(f); return (U)arr; } public INDArray convert(SerializedNDArray from){ DataType dt = ND4JUtil.typeNDArrayTypeToNd4j(from.getType()); long[] shape = from.getShape(); long length = ArrayUtil.prodLong(shape); ByteBuffer bb = from.getBuffer(); bb.rewind(); DataBuffer db = Nd4j.createBuffer(bb, dt, (int)length, 0); INDArray arr = Nd4j.create(db, shape); return arr; } } @AllArgsConstructor public static class Nd4jToSerializedConverter implements NDArrayConverter { @Override public boolean canConvert(NDArray from, NDArrayFormat to) { return canConvert(from, to.formatType()); } @Override public boolean canConvert(NDArray from, Class<?> to) { return INDArray.class.isAssignableFrom(from.get().getClass()) && SerializedNDArray.class.isAssignableFrom(to); } @Override public <U> U convert(NDArray from, Class<U> to) { Preconditions.checkState(canConvert(from, to), "Unable to convert SerializedNDArray to %s", to); INDArray f = (INDArray) from.get(); SerializedNDArray t = convert(f); return (U)t; } @Override public <U> U convert(NDArray from, NDArrayFormat<U> to) { Preconditions.checkState(canConvert(from, to), "Unable to convert to format: %s", to); INDArray f = (INDArray) from.get(); SerializedNDArray t = convert(f); return (U)t; } public SerializedNDArray convert(INDArray from){ if(from.isView() || from.ordering() != 'c' || !Shape.hasDefaultStridesForShape(from)) from = from.dup('c'); NDArrayType type = ND4JUtil.typeNd4jToNDArrayType(from.dataType()); long[] shape = from.shape(); ByteBuffer bb = from.data().asNio(); return new SerializedNDArray(type, shape, bb); } } }
0
java-sources/ai/konduit/serving/konduit-serving-nd4j/0.3.0/ai/konduit/serving/data/nd4j
java-sources/ai/konduit/serving/konduit-serving-nd4j/0.3.0/ai/konduit/serving/data/nd4j/format/ND4JNDArrayFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.nd4j.format; import ai.konduit.serving.data.nd4j.data.ND4JNDArray; import ai.konduit.serving.pipeline.api.data.NDArray; import ai.konduit.serving.pipeline.api.data.ValueType; import ai.konduit.serving.pipeline.api.format.NDArrayFactory; import ai.konduit.serving.pipeline.impl.data.wrappers.ListValue; import com.google.common.primitives.Doubles; import com.google.common.primitives.Floats; import com.google.common.primitives.Longs; import org.nd4j.common.base.Preconditions; import org.nd4j.common.primitives.Pair; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import java.util.*; public class ND4JNDArrayFactory implements NDArrayFactory { @Override public Set<Class<?>> supportedTypes() { Set<Class<?>> s = new HashSet<>(); s.add(INDArray.class); //TODO do we want to allow creating INDArray from float[], float[][] etc? // Probably not, given we can convert behind the scenes easily if needed... return s; } @Override public boolean canCreateFrom(Object o) { return o instanceof INDArray || o instanceof ListValue; } @Override public NDArray create(Object o) { Preconditions.checkState(canCreateFrom(o), "Unable to create ND4J NDArray from object of %s", o.getClass()); INDArray a; if(o instanceof INDArray){ a = (INDArray)o; } else if (o instanceof ListValue) { List<Long> shape = new ArrayList<>(); List<Float> data = new ArrayList<>(); ListValue listValue = (ListValue) o; getData(listValue, data); getShape(listValue, shape); a = Nd4j.create(Floats.toArray(data), Longs.toArray(shape)); } else { throw new IllegalStateException(); } //TODO add all the other java types! return new ND4JNDArray(a); } private void getData(ListValue listValue, List<Float> data) { for (Object object: listValue.get()) { if(listValue.elementType() == ValueType.LIST) { if(object instanceof Pair) { Pair pair = (Pair) object; getData(new ListValue((List) pair.getKey(), ValueType.LIST), data); } else { if(object instanceof Double || object instanceof Long) { data.add(Float.valueOf(String.valueOf(object))); } else { throw new IllegalStateException(String.format("Can't convert type %s to an NDArray", object.getClass().getCanonicalName())); } } } else if(listValue.elementType() == ValueType.DOUBLE || listValue.elementType() == ValueType.INT64) { data.add(Float.valueOf(String.valueOf(object))); } else { throw new IllegalStateException(String.format("Can't convert type %s to an NDArray", listValue.elementType())); } } } private void getShape(ListValue listValue, List<Long> shape) { if(listValue.get() == null || listValue.get().isEmpty()) { throw new IllegalStateException("Empty or zero sized arrays are not accepted!"); } else { shape.add((long) listValue.get().size()); if (listValue.elementType() == ValueType.LIST) { if(listValue.get().get(0) instanceof Pair) { Pair pair = (Pair) listValue.get().get(0); ListValue listValueInternal = new ListValue((List) pair.getKey(), ValueType.LIST); getShape(listValueInternal, shape); } } } } }
0
java-sources/ai/konduit/serving/konduit-serving-nd4j/0.3.0/ai/konduit/serving/data/nd4j
java-sources/ai/konduit/serving/konduit-serving-nd4j/0.3.0/ai/konduit/serving/data/nd4j/util/ND4JUtil.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.data.nd4j.util; import ai.konduit.serving.pipeline.api.data.NDArrayType; import lombok.NonNull; import org.nd4j.linalg.api.buffer.DataType; public class ND4JUtil { private ND4JUtil(){ } public static NDArrayType typeNd4jToNDArrayType(@NonNull DataType dataType) { switch (dataType){ case DOUBLE: return NDArrayType.DOUBLE; case FLOAT: return NDArrayType.FLOAT; case HALF: return NDArrayType.FLOAT16; case LONG: return NDArrayType.INT64; case INT: return NDArrayType.INT32; case SHORT: return NDArrayType.INT16; case BYTE: return NDArrayType.INT8; case UBYTE: return NDArrayType.UINT8; case BOOL: return NDArrayType.BOOL; case UTF8: return NDArrayType.UTF8; case BFLOAT16: return NDArrayType.BFLOAT16; case UINT16: return NDArrayType.UINT16; case UINT32: return NDArrayType.UINT32; case UINT64: return NDArrayType.UINT64; case COMPRESSED: case UNKNOWN: default: throw new UnsupportedOperationException("Unknown or not supported type: " + dataType); } } public static DataType typeNDArrayTypeToNd4j(@NonNull NDArrayType type) { switch (type){ case DOUBLE: return DataType.DOUBLE; case FLOAT: return DataType.FLOAT; case FLOAT16: return DataType.FLOAT16; case BFLOAT16: return DataType.BFLOAT16; case INT64: return DataType.INT64; case INT32: return DataType.INT32; case INT16: return DataType.INT16; case INT8: return DataType.INT8; case UINT64: return DataType.UINT64; case UINT32: return DataType.UINT32; case UINT16: return DataType.UINT16; case UINT8: return DataType.UINT8; case BOOL: return DataType.BOOL; case UTF8: return DataType.UTF8; default: throw new UnsupportedOperationException("Unable to convert datatype to ND4J datatype: " + type); } } }
0
java-sources/ai/konduit/serving/konduit-serving-nd4j-tensorflow/0.3.0/ai/konduit/serving/models/nd4j/tensorflow
java-sources/ai/konduit/serving/konduit-serving-nd4j-tensorflow/0.3.0/ai/konduit/serving/models/nd4j/tensorflow/step/KonduitServingNd4jTensorflowJsonMapping.java
package ai.konduit.serving.models.nd4j.tensorflow.step;import ai.konduit.serving.pipeline.api.serde.JsonSubType; import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping; import ai.konduit.serving.pipeline.api.serde.JsonSubType; import java.util.ArrayList; import java.util.List; //GENERATED CLASS DO NOT EDIT public class KonduitServingNd4jTensorflowJsonMapping implements JsonSubTypesMapping { @Override public List<JsonSubType> getSubTypesMapping() { List<JsonSubType> l = new ArrayList<>(); return l; } }
0
java-sources/ai/konduit/serving/konduit-serving-nd4j-tensorflow/0.3.0/ai/konduit/serving/models/nd4j/tensorflow
java-sources/ai/konduit/serving/konduit-serving-nd4j-tensorflow/0.3.0/ai/konduit/serving/models/nd4j/tensorflow/step/Nd4jTensorFlowPipelineStepRunnerFactory.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.models.nd4j.tensorflow.step; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class Nd4jTensorFlowPipelineStepRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep step) { return step instanceof Nd4jTensorFlowStep; } @Override public Nd4jTensorFlowRunner create(PipelineStep step) { Preconditions.checkState(canRun(step), "Unable to run step of type: %s", step.getClass()); return new Nd4jTensorFlowRunner((Nd4jTensorFlowStep) step); } }
0
java-sources/ai/konduit/serving/konduit-serving-nd4j-tensorflow/0.3.0/ai/konduit/serving/models/nd4j/tensorflow
java-sources/ai/konduit/serving/konduit-serving-nd4j-tensorflow/0.3.0/ai/konduit/serving/models/nd4j/tensorflow/step/Nd4jTensorFlowRunner.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.models.nd4j.tensorflow.step; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.NDArray; import ai.konduit.serving.pipeline.api.protocol.URIResolver; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import lombok.NonNull; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.nd4j.common.base.Preconditions; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.tensorflow.conversion.graphrunner.GraphRunner; import java.io.File; import java.util.LinkedHashMap; import java.util.Map; @Slf4j @CanRun(Nd4jTensorFlowStep.class) public class Nd4jTensorFlowRunner implements PipelineStepRunner { private final Nd4jTensorFlowStep step; private GraphRunner sess; @SneakyThrows public Nd4jTensorFlowRunner(@NonNull Nd4jTensorFlowStep step) { this.step = step; File origFile = URIResolver.getFile(step.modelUri()); Preconditions.checkState(origFile.exists(), "Model file does not exist: " + step.modelUri()); sess = GraphRunner.builder() .inputNames(step.inputNames()) .graphPath(origFile) .outputNames(step.outputNames()). build(); } @Override public void close() { sess.close(); } @Override public PipelineStep getPipelineStep() { return step; } @Override public Data exec(Context ctx, Data data) { Preconditions.checkState(step.inputNames() != null, "TensorFlowStep input array names are not set (null)"); Map<String,INDArray> inputData = new LinkedHashMap<>(); for(String key : data.keys()) { NDArray ndArray = data.getNDArray(key); INDArray arr = ndArray.getAs(INDArray.class); inputData.put(key,arr); } Map<String, INDArray> graphOutput = this.sess.run(inputData); Data out = Data.empty(); for (Map.Entry<String, INDArray> entry : graphOutput.entrySet()) { out.put(entry.getKey(), NDArray.create(entry.getValue())); } return out; } }
0
java-sources/ai/konduit/serving/konduit-serving-nd4j-tensorflow/0.3.0/ai/konduit/serving/models/nd4j/tensorflow
java-sources/ai/konduit/serving/konduit-serving-nd4j-tensorflow/0.3.0/ai/konduit/serving/models/nd4j/tensorflow/step/Nd4jTensorflowModuleInfo.java
package ai.konduit.serving.models.nd4j.tensorflow.step; import ai.konduit.serving.annotation.module.ModuleInfo; @ModuleInfo("konduit-serving-nd4j-tensorflow") //TODO AB 2020/05/29 Need to add required dependencies - CPU/GPU public class Nd4jTensorflowModuleInfo { private Nd4jTensorflowModuleInfo(){} }
0
java-sources/ai/konduit/serving/konduit-serving-nd4j-tensorflow-config/0.3.0/ai/konduit/serving/models/nd4j/tensorflow
java-sources/ai/konduit/serving/konduit-serving-nd4j-tensorflow-config/0.3.0/ai/konduit/serving/models/nd4j/tensorflow/step/KonduitServingNd4jTensorflowConfigJsonMapping.java
package ai.konduit.serving.models.nd4j.tensorflow.step;import ai.konduit.serving.pipeline.api.serde.JsonSubType; import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping; import ai.konduit.serving.pipeline.api.serde.JsonSubType; import java.util.ArrayList; import java.util.List; //GENERATED CLASS DO NOT EDIT public class KonduitServingNd4jTensorflowConfigJsonMapping implements JsonSubTypesMapping { @Override public List<JsonSubType> getSubTypesMapping() { List<JsonSubType> l = new ArrayList<>(); l.add(new JsonSubType("ND4JTENSORFLOW", ai.konduit.serving.models.nd4j.tensorflow.step.Nd4jTensorFlowStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); return l; } }
0
java-sources/ai/konduit/serving/konduit-serving-nd4j-tensorflow-config/0.3.0/ai/konduit/serving/models/nd4j/tensorflow
java-sources/ai/konduit/serving/konduit-serving-nd4j-tensorflow-config/0.3.0/ai/konduit/serving/models/nd4j/tensorflow/step/Nd4jTensorFlowStep.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.models.nd4j.tensorflow.step; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import lombok.experimental.Tolerate; import java.util.Arrays; import java.util.List; @Data @NoArgsConstructor @AllArgsConstructor @Accessors(fluent = true) @JsonName("ND4JTENSORFLOW") @Schema(description = "A pipeline step that configures a TensorFlow model that is to be executed based on nd4j graph runner.") public class Nd4jTensorFlowStep implements PipelineStep { @Schema(description = "A list of names of the input placeholders.") private List<String> inputNames; @Schema(description = "A list of names of the output arrays - i.e., what should be predicted.") private List<String> outputNames; /* Uncomment when it's needed. The main thing is to not have INDArray in step configurations which breaks the design * @Schema(description = "A map of constants") * private Map<String, INDArray> constants = new HashMap<>(); */ @Schema(description = "Uniform Resource Identifier of model") private String modelUri; @Tolerate public Nd4jTensorFlowStep inputNames(String... inputNames) { return this.inputNames(Arrays.asList(inputNames)); } @Tolerate public Nd4jTensorFlowStep outputNames(String... outputNames) { return this.outputNames(Arrays.asList(outputNames)); } }
0
java-sources/ai/konduit/serving/konduit-serving-nd4j-tensorflow-config/0.3.0/ai/konduit/serving/models/nd4j/tensorflow
java-sources/ai/konduit/serving/konduit-serving-nd4j-tensorflow-config/0.3.0/ai/konduit/serving/models/nd4j/tensorflow/step/Nd4jTensorflowConfigModuleInfo.java
package ai.konduit.serving.models.nd4j.tensorflow.step; import ai.konduit.serving.annotation.module.ModuleInfo; @ModuleInfo("konduit-serving-nd4j-tensorflow-config") //TODO AB 2020/05/29 Need to add required dependencies - CPU/GPU public class Nd4jTensorflowConfigModuleInfo { private Nd4jTensorflowConfigModuleInfo(){} }
0
java-sources/ai/konduit/serving/konduit-serving-onnx/0.3.0/ai/konduit/serving/models
java-sources/ai/konduit/serving/konduit-serving-onnx/0.3.0/ai/konduit/serving/models/onnx/KonduitServingOnnxJsonMapping.java
package ai.konduit.serving.models.onnx;import ai.konduit.serving.pipeline.api.serde.JsonSubType; import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping; import ai.konduit.serving.pipeline.api.serde.JsonSubType; import java.util.ArrayList; import java.util.List; //GENERATED CLASS DO NOT EDIT public class KonduitServingOnnxJsonMapping implements JsonSubTypesMapping { @Override public List<JsonSubType> getSubTypesMapping() { List<JsonSubType> l = new ArrayList<>(); return l; } }
0
java-sources/ai/konduit/serving/konduit-serving-onnx/0.3.0/ai/konduit/serving/models
java-sources/ai/konduit/serving/konduit-serving-onnx/0.3.0/ai/konduit/serving/models/onnx/ONNXModuleInfo.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.models.onnx; import ai.konduit.serving.annotation.module.InheritRequiredDependencies; import ai.konduit.serving.annotation.module.ModuleInfo; @ModuleInfo("konduit-serving-onnx") @InheritRequiredDependencies("konduit-serving-nd4j") public class ONNXModuleInfo { private ONNXModuleInfo(){ } }
0
java-sources/ai/konduit/serving/konduit-serving-onnx/0.3.0/ai/konduit/serving/models/onnx
java-sources/ai/konduit/serving/konduit-serving-onnx/0.3.0/ai/konduit/serving/models/onnx/step/ONNXPipelineStepRunnerFactory.java
/* * * * ****************************************************************************** * * * Copyright (c) 2020 Konduit AI. * * * * * * This program and the accompanying materials are made available under the * * * terms of the Apache License, Version 2.0 which is available at * * * https://www.apache.org/licenses/LICENSE-2.0. * * * * * * Unless required by applicable law or agreed to in writing, software * * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * * License for the specific language governing permissions and limitations * * * under the License. * * * * * * SPDX-License-Identifier: Apache-2.0 * * ***************************************************************************** * * */ package ai.konduit.serving.models.onnx.step; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import ai.konduit.serving.pipeline.api.step.PipelineStepRunnerFactory; import org.nd4j.common.base.Preconditions; public class ONNXPipelineStepRunnerFactory implements PipelineStepRunnerFactory { @Override public boolean canRun(PipelineStep step) { return step instanceof ONNXStep; } @Override public PipelineStepRunner create(PipelineStep step) { Preconditions.checkState(canRun(step), "Unable to run pipeline step: %s", step.getClass()); return new ONNXRunner((ONNXStep) step); } }
0
java-sources/ai/konduit/serving/konduit-serving-onnx/0.3.0/ai/konduit/serving/models/onnx
java-sources/ai/konduit/serving/konduit-serving-onnx/0.3.0/ai/konduit/serving/models/onnx/step/ONNXRunner.java
/* * * * ****************************************************************************** * * * Copyright (c) 2020 Konduit AI. * * * * * * This program and the accompanying materials are made available under the * * * terms of the Apache License, Version 2.0 which is available at * * * https://www.apache.org/licenses/LICENSE-2.0. * * * * * * Unless required by applicable law or agreed to in writing, software * * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * * License for the specific language governing permissions and limitations * * * under the License. * * * * * * SPDX-License-Identifier: Apache-2.0 * * ***************************************************************************** * * */ package ai.konduit.serving.models.onnx.step; import ai.konduit.serving.annotation.runner.CanRun; import ai.konduit.serving.models.onnx.utils.ONNXUtils; import ai.konduit.serving.pipeline.api.context.Context; import ai.konduit.serving.pipeline.api.data.Data; import ai.konduit.serving.pipeline.api.data.NDArray; import ai.konduit.serving.pipeline.api.step.PipelineStep; import ai.konduit.serving.pipeline.api.step.PipelineStepRunner; import lombok.extern.slf4j.Slf4j; import org.bytedeco.javacpp.*; import org.bytedeco.onnxruntime.*; import org.nd4j.common.base.Preconditions; import org.nd4j.linalg.api.buffer.DataBuffer; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import java.util.LinkedHashMap; import java.util.Map; import java.util.UUID; import static ai.konduit.serving.models.onnx.utils.ONNXUtils.getDataBuffer; import static ai.konduit.serving.models.onnx.utils.ONNXUtils.getTensor; import static org.bytedeco.onnxruntime.global.onnxruntime.*; @Slf4j @CanRun({ONNXStep.class}) public class ONNXRunner implements PipelineStepRunner { private ONNXStep onnxStep; private Session session; private RunOptions runOptions; private MemoryInfo memoryInfo; private AllocatorWithDefaultOptions allocator; private SessionOptions sessionOptions; private static Env env; private Pointer bp; public ONNXRunner(ONNXStep onnxStep) { this.onnxStep = onnxStep; if(env == null) { env = new Env(ONNXUtils.getOnnxLogLevelFromLogger(log), new BytePointer("konduit-serving-onnx-session-" + UUID.randomUUID().toString())); env.retainReference(); } sessionOptions = new SessionOptions(); sessionOptions.SetGraphOptimizationLevel(ORT_ENABLE_EXTENDED); sessionOptions.SetIntraOpNumThreads(1); sessionOptions.retainReference(); allocator = new AllocatorWithDefaultOptions(); allocator.retainReference(); bp = Loader.getPlatform().toLowerCase().startsWith("windows") ? new CharPointer(onnxStep.modelUri()) : new BytePointer(onnxStep.modelUri()); runOptions = new RunOptions(); memoryInfo = MemoryInfo.CreateCpu(OrtArenaAllocator, OrtMemTypeDefault); session = new Session(env, bp, sessionOptions); //retain the session reference to prevent pre emptive release of the session. session.retainReference(); } @Override public void close() { if(session != null) { session.close(); } sessionOptions.releaseReference(); allocator.releaseReference(); runOptions.releaseReference(); } @Override public PipelineStep getPipelineStep() { return onnxStep; } @Override public Data exec(Context ctx, Data data) { Data ret = Data.empty(); long numInputNodes = session.GetInputCount(); long numOutputNodes = session.GetOutputCount(); PointerPointer<BytePointer> inputNodeNames = new PointerPointer<>(numInputNodes); PointerPointer<BytePointer> outputNodeNames = new PointerPointer<>(numOutputNodes); Value inputVal = new Value(numInputNodes); for (int i = 0; i < numInputNodes; i++) { BytePointer inputName = session.GetInputName(i, allocator.asOrtAllocator()); inputNodeNames.put(i, inputName); INDArray arr = data.getNDArray(inputName.getString()).getAs(INDArray.class); Value inputTensor = getTensor(arr, memoryInfo); Preconditions.checkState(inputTensor.IsTensor(),"Input must be a tensor."); inputVal.position(i).put(inputTensor); } //reset position after iterating inputVal.position(0); for (int i = 0; i < numOutputNodes; i++) { BytePointer outputName = session.GetOutputName(i, allocator.asOrtAllocator()); outputNodeNames.put(i, outputName); } ValueVector outputVector = session.Run( runOptions, inputNodeNames, inputVal, numInputNodes, outputNodeNames, numOutputNodes); Map<String, INDArray> output = new LinkedHashMap<>(); for (int i = 0; i < numOutputNodes; i++) { Value outValue = outputVector.get(i); DataBuffer buffer = getDataBuffer(outValue); output.put((outputNodeNames.get(BytePointer.class, i)).getString(), Nd4j.create(buffer)); } Preconditions.checkNotNull(output,"Output must not be null!"); for(String outputName : onnxStep.outputNames()) { Preconditions.checkNotNull(output.get(outputName),"Output name " + outputName + " not found in output!"); ret.put(outputName, NDArray.create(output.get(outputName))); } return ret; } }
0
java-sources/ai/konduit/serving/konduit-serving-onnx/0.3.0/ai/konduit/serving/models/onnx
java-sources/ai/konduit/serving/konduit-serving-onnx/0.3.0/ai/konduit/serving/models/onnx/utils/ONNXUtils.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.models.onnx.utils; import org.bytedeco.javacpp.*; import org.bytedeco.javacpp.indexer.*; import org.bytedeco.onnxruntime.MemoryInfo; import org.bytedeco.onnxruntime.Value; import org.nd4j.common.base.Preconditions; import org.nd4j.linalg.api.buffer.DataBuffer; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import org.slf4j.Logger; import static org.bytedeco.onnxruntime.global.onnxruntime.*; import static org.nd4j.linalg.api.buffer.DataType.*; public class ONNXUtils { /** * * @param expected * @param array */ public static void validateType(DataType expected, INDArray array) { if (!array.dataType().equals(expected)) throw new RuntimeException("INDArray data type (" + array.dataType() + ") does not match required ONNX data type (" + expected + ")"); } /** * Return a {@link DataType} * for the onnx data type * @param dataType the equivalent nd4j data type * @return */ public static DataType dataTypeForOnnxType(int dataType) { if(dataType == dataType) { return FLOAT; } else if(dataType == ONNX_TENSOR_ELEMENT_DATA_TYPE_INT8) { return INT8; } else if(dataType == ONNX_TENSOR_ELEMENT_DATA_TYPE_DOUBLE) { return DOUBLE; } else if(dataType == ONNX_TENSOR_ELEMENT_DATA_TYPE_BOOL) { return BOOL; } else if(dataType == ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT8) { return UINT8; } else if(dataType == ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT16) { return UINT16; } else if(dataType == ONNX_TENSOR_ELEMENT_DATA_TYPE_INT16) { return INT16; } else if(dataType == ONNX_TENSOR_ELEMENT_DATA_TYPE_INT32) { return INT32; } else if(dataType == ONNX_TENSOR_ELEMENT_DATA_TYPE_INT64) { return INT64; } else if(dataType == ONNX_TENSOR_ELEMENT_DATA_TYPE_FLOAT16) { return FLOAT16; } else if(dataType == ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT32) { return UINT32; } else if(dataType == ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT64) { return UINT64; } else if(dataType == ONNX_TENSOR_ELEMENT_DATA_TYPE_BFLOAT16) { return BFLOAT16; } else throw new IllegalArgumentException("Illegal data type " + dataType); } /** * Convert the onnx type for the given data type * @param dataType * @return */ public static int onnxTypeForDataType(DataType dataType) { if(dataType == FLOAT) { return ONNX_TENSOR_ELEMENT_DATA_TYPE_FLOAT; } else if(dataType == INT8) { return ONNX_TENSOR_ELEMENT_DATA_TYPE_INT8; } else if(dataType == DOUBLE) { return ONNX_TENSOR_ELEMENT_DATA_TYPE_DOUBLE; } else if(dataType == BOOL) { return ONNX_TENSOR_ELEMENT_DATA_TYPE_BOOL; } else if(dataType == UINT8) { return ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT8; } else if(dataType == UINT16) { return ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT16; } else if(dataType == INT16) { return ONNX_TENSOR_ELEMENT_DATA_TYPE_INT16; } else if(dataType == INT32) { return ONNX_TENSOR_ELEMENT_DATA_TYPE_INT32; } else if(dataType == INT64) { return ONNX_TENSOR_ELEMENT_DATA_TYPE_INT64; } else if(dataType == FLOAT16) { return ONNX_TENSOR_ELEMENT_DATA_TYPE_FLOAT16; } else if(dataType == UINT32) { return ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT32; } else if(dataType == UINT64) { return ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT64; } else if(dataType == BFLOAT16) { return ONNX_TENSOR_ELEMENT_DATA_TYPE_BFLOAT16; } else throw new IllegalArgumentException("Illegal data type " + dataType); } /** * Convert an onnx {@link Value} * in to an {@link INDArray} * @param value the value to convert * @return */ public static INDArray getArray(Value value) { DataType dataType = dataTypeForOnnxType(value.GetTypeInfo().GetONNXType()); LongPointer shape = value.GetTensorTypeAndShapeInfo().GetShape(); long[] shapeConvert; if(shape != null) { shapeConvert = new long[(int) value.GetTensorTypeAndShapeInfo().GetDimensionsCount()]; shape.get(shapeConvert); } else { shapeConvert = new long[]{1}; } DataBuffer getBuffer = getDataBuffer(value); Preconditions.checkState(dataType.equals(getBuffer.dataType()),"Data type must be equivalent as specified by the onnx metadata."); return Nd4j.create(getBuffer,shapeConvert,Nd4j.getStrides(shapeConvert),0); } /** * Get the onnx log level relative to the given slf4j logger. * Trace or debug will return ORT_LOGGING_LEVEL_VERBOSE * Info will return: ORT_LOGGING_LEVEL_INFO * Warn returns ORT_LOGGING_LEVEL_WARNING * Error returns error ORT_LOGGING_LEVEL_ERROR * * The default is info * @param logger the slf4j logger to get the onnx log level for * @return */ public static int getOnnxLogLevelFromLogger(Logger logger) { if(logger.isTraceEnabled() || logger.isDebugEnabled()) { return ORT_LOGGING_LEVEL_VERBOSE; } else if(logger.isInfoEnabled()) { return ORT_LOGGING_LEVEL_INFO; } else if(logger.isWarnEnabled()) { return ORT_LOGGING_LEVEL_WARNING; } else if(logger.isErrorEnabled()) { return ORT_LOGGING_LEVEL_ERROR; } return ORT_LOGGING_LEVEL_INFO; } /** * Get an onnx tensor from an ndarray. * @param ndArray the ndarray to get the value from * @param memoryInfo the {@link MemoryInfo} to use. * Can be created with: * MemoryInfo memoryInfo = MemoryInfo.CreateCpu(OrtArenaAllocator, OrtMemTypeDefault); * @return */ public static Value getTensor(INDArray ndArray, MemoryInfo memoryInfo) { Pointer inputTensorValuesPtr = ndArray.data().pointer(); Pointer inputTensorValues = inputTensorValuesPtr; long sizeInBytes = ndArray.length() * ndArray.data().getElementSize(); // public static native Value CreateTensor(@Const OrtMemoryInfo var0, Pointer var1, @Cast({"size_t"}) long var2, @Cast({"const int64_t*"}) LongPointer var4, @Cast({"size_t"}) long var5, @Cast({"ONNXTensorElementDataType"}) int var7); /** * static Value CreateTensor(const OrtMemoryInfo* info, void* p_data, size_t p_data_byte_count, const int64_t* shape, size_t shape_len, * ONNXTensorElementDataType type) */ LongPointer dims = new LongPointer(ndArray.shape()); Value ret = Value.CreateTensor( memoryInfo.asOrtMemoryInfo(), inputTensorValues, sizeInBytes, dims, ndArray.rank(), onnxTypeForDataType(ndArray.dataType())); return ret; } /** * Get the data buffer from the given value * @param tens the values to get * @return the equivalent data buffer */ public static DataBuffer getDataBuffer(Value tens) { try (PointerScope scope = new PointerScope()) { DataBuffer buffer = null; int type = tens.GetTensorTypeAndShapeInfo().GetElementType(); long size = tens.GetTensorTypeAndShapeInfo().GetElementCount(); switch (type) { case ONNX_TENSOR_ELEMENT_DATA_TYPE_FLOAT: FloatPointer pFloat = tens.GetTensorMutableDataFloat().capacity(size); FloatIndexer floatIndexer = FloatIndexer.create(pFloat); buffer = Nd4j.createBuffer(pFloat, DataType.FLOAT, size, floatIndexer); break; case ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT8: BytePointer pUint8 = tens.GetTensorMutableDataUByte().capacity(size); Indexer uint8Indexer = ByteIndexer.create(pUint8); buffer = Nd4j.createBuffer(pUint8, DataType.UINT8, size, uint8Indexer); break; case ONNX_TENSOR_ELEMENT_DATA_TYPE_INT8: BytePointer pInt8 = tens.GetTensorMutableDataByte().capacity(size); Indexer int8Indexer = ByteIndexer.create(pInt8); buffer = Nd4j.createBuffer(pInt8, DataType.UINT8, size, int8Indexer); break; case ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT16: ShortPointer pUint16 = tens.GetTensorMutableDataUShort().capacity(size); Indexer uint16Indexer = ShortIndexer.create(pUint16); buffer = Nd4j.createBuffer(pUint16, DataType.UINT16, size, uint16Indexer); break; case ONNX_TENSOR_ELEMENT_DATA_TYPE_INT16: ShortPointer pInt16 = tens.GetTensorMutableDataShort().capacity(size); Indexer int16Indexer = ShortIndexer.create(pInt16); buffer = Nd4j.createBuffer(pInt16, INT16, size, int16Indexer); break; case ONNX_TENSOR_ELEMENT_DATA_TYPE_INT32: IntPointer pInt32 = tens.GetTensorMutableDataInt().capacity(size); Indexer int32Indexer = IntIndexer.create(pInt32); buffer = Nd4j.createBuffer(pInt32, DataType.INT32, size, int32Indexer); break; case ONNX_TENSOR_ELEMENT_DATA_TYPE_INT64: LongPointer pInt64 = tens.GetTensorMutableDataLong().capacity(size); Indexer int64Indexer = LongIndexer.create(pInt64); buffer = Nd4j.createBuffer(pInt64, DataType.INT64, size, int64Indexer); break; case ONNX_TENSOR_ELEMENT_DATA_TYPE_STRING: BytePointer pString = tens.GetTensorMutableDataByte().capacity(size); Indexer stringIndexer = ByteIndexer.create(pString); buffer = Nd4j.createBuffer(pString, DataType.INT8, size, stringIndexer); break; case ONNX_TENSOR_ELEMENT_DATA_TYPE_BOOL: BoolPointer pBool = tens.GetTensorMutableDataBool().capacity(size); Indexer boolIndexer = BooleanIndexer.create(new BooleanPointer(pBool)); //Converting from JavaCPP Bool to Boolean here - C++ bool type size is not defined, could cause problems on some platforms buffer = Nd4j.createBuffer(pBool, DataType.BOOL, size, boolIndexer); break; case ONNX_TENSOR_ELEMENT_DATA_TYPE_FLOAT16: ShortPointer pFloat16 = tens.GetTensorMutableDataShort().capacity(size); Indexer float16Indexer = ShortIndexer.create(pFloat16); buffer = Nd4j.createBuffer(pFloat16, DataType.FLOAT16, size, float16Indexer); break; case ONNX_TENSOR_ELEMENT_DATA_TYPE_DOUBLE: DoublePointer pDouble = tens.GetTensorMutableDataDouble().capacity(size); Indexer doubleIndexer = DoubleIndexer.create(pDouble); buffer = Nd4j.createBuffer(pDouble, DataType.DOUBLE, size, doubleIndexer); break; case ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT32: IntPointer pUint32 = tens.GetTensorMutableDataUInt().capacity(size); Indexer uint32Indexer = IntIndexer.create(pUint32); buffer = Nd4j.createBuffer(pUint32, DataType.UINT32, size, uint32Indexer); break; case ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT64: LongPointer pUint64 = tens.GetTensorMutableDataULong().capacity(size); Indexer uint64Indexer = LongIndexer.create(pUint64); buffer = Nd4j.createBuffer(pUint64, DataType.UINT64, size, uint64Indexer); break; case ONNX_TENSOR_ELEMENT_DATA_TYPE_BFLOAT16: ShortPointer pBfloat16 = tens.GetTensorMutableDataShort().capacity(size); Indexer bfloat16Indexer = ShortIndexer.create(pBfloat16); buffer = Nd4j.createBuffer(pBfloat16, DataType.BFLOAT16, size, bfloat16Indexer); break; default: throw new RuntimeException("Unsupported data type encountered"); } return buffer; } } }
0
java-sources/ai/konduit/serving/konduit-serving-onnx-config/0.3.0/ai/konduit/serving/models/onnx
java-sources/ai/konduit/serving/konduit-serving-onnx-config/0.3.0/ai/konduit/serving/models/onnx/step/KonduitServingOnnxConfigJsonMapping.java
package ai.konduit.serving.models.onnx.step;import ai.konduit.serving.pipeline.api.serde.JsonSubType; import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping; import ai.konduit.serving.pipeline.api.serde.JsonSubType; import java.util.ArrayList; import java.util.List; //GENERATED CLASS DO NOT EDIT public class KonduitServingOnnxConfigJsonMapping implements JsonSubTypesMapping { @Override public List<JsonSubType> getSubTypesMapping() { List<JsonSubType> l = new ArrayList<>(); l.add(new JsonSubType("ONNX", ai.konduit.serving.models.onnx.step.ONNXStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); return l; } }
0
java-sources/ai/konduit/serving/konduit-serving-onnx-config/0.3.0/ai/konduit/serving/models/onnx
java-sources/ai/konduit/serving/konduit-serving-onnx-config/0.3.0/ai/konduit/serving/models/onnx/step/ONNXConfigModuleInfo.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.models.onnx.step; import ai.konduit.serving.annotation.module.ModuleInfo; @ModuleInfo("konduit-serving-onnx-config") public class ONNXConfigModuleInfo { private ONNXConfigModuleInfo(){ } }
0
java-sources/ai/konduit/serving/konduit-serving-onnx-config/0.3.0/ai/konduit/serving/models/onnx
java-sources/ai/konduit/serving/konduit-serving-onnx-config/0.3.0/ai/konduit/serving/models/onnx/step/ONNXStep.java
/* * ****************************************************************************** * * * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * See the NOTICE file distributed with this work for additional * * information regarding copyright ownership. * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.models.onnx.step; import ai.konduit.serving.annotation.json.JsonName; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.Accessors; import lombok.experimental.Tolerate; import org.nd4j.shade.jackson.annotation.JsonProperty; import java.util.Arrays; import java.util.List; @Data @NoArgsConstructor @Accessors(fluent = true) @JsonName("ONNX") @Schema(description = "A pipeline step that configures a ONNX model that is to be executed.") public class ONNXStep implements PipelineStep { @Schema(description = "Specifies the location of a saved model file.") private String modelUri; @Schema(description = "A list of names of the input placeholders ( computation graph, with multiple inputs. Where values from the input data keys are mapped to " + "the computation graph inputs).") private List<String> inputNames; @Schema(description = "A list of names of the output placeholders (computation graph, with multiple outputs. Where the values of these output keys are mapped " + "from the computation graph output - INDArray[] to data keys).") private List<String> outputNames; @Schema(description = "Optional, usually unnecessary. Specifies a class used to load the model if customization in how " + "model loading is performed, instead of the usual MultiLayerNetwork.load or ComputationGraph.load methods. " + "Must be a java.util.Function<String,MultiLayerNetwork> or java.util.Function<String,ComputationGraph>") private String loaderClass; public ONNXStep(@JsonProperty("modelUri") String modelUri, @JsonProperty("inputNames") List<String> inputNames, @JsonProperty("outputNames") List<String> outputNames){ this.modelUri = modelUri; this.inputNames = inputNames; this.outputNames = outputNames; } @Tolerate public ONNXStep inputNames(String... inputNames) { return this.inputNames(Arrays.asList(inputNames)); } @Tolerate public ONNXStep outputNames(String... outputNames) { return this.outputNames(Arrays.asList(outputNames)); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/KonduitServingPipelineJsonMapping.java
package ai.konduit.serving.pipeline;import ai.konduit.serving.pipeline.api.serde.JsonSubType; import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping; import ai.konduit.serving.pipeline.api.serde.JsonSubType; import java.util.ArrayList; import java.util.List; //GENERATED CLASS DO NOT EDIT public class KonduitServingPipelineJsonMapping implements JsonSubTypesMapping { @Override public List<JsonSubType> getSubTypesMapping() { List<JsonSubType> l = new ArrayList<>(); l.add(new JsonSubType("SSD_TO_BBOX", ai.konduit.serving.pipeline.impl.step.ml.ssd.SSDToBoundingBoxStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("SWITCH_OUTPUT", ai.konduit.serving.pipeline.impl.pipeline.graph.SwitchOutput.class, ai.konduit.serving.pipeline.impl.pipeline.graph.GraphStep.class)); l.add(new JsonSubType("ANY", ai.konduit.serving.pipeline.impl.pipeline.graph.AnyStep.class, ai.konduit.serving.pipeline.impl.pipeline.graph.GraphStep.class)); l.add(new JsonSubType("SWITCH", ai.konduit.serving.pipeline.impl.pipeline.graph.SwitchStep.class, ai.konduit.serving.pipeline.impl.pipeline.graph.GraphStep.class)); l.add(new JsonSubType("INT_SWITCH", ai.konduit.serving.pipeline.impl.pipeline.graph.switchfn.DataIntSwitchFn.class, ai.konduit.serving.pipeline.impl.pipeline.graph.SwitchFn.class)); l.add(new JsonSubType("YOLO_BBOX", ai.konduit.serving.pipeline.impl.step.bbox.yolo.YoloToBoundingBoxStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("LOGGING", ai.konduit.serving.pipeline.impl.step.logging.LoggingStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("MERGE", ai.konduit.serving.pipeline.impl.pipeline.graph.MergeStep.class, ai.konduit.serving.pipeline.impl.pipeline.graph.GraphStep.class)); l.add(new JsonSubType("REGRESSION_OUTPUT", ai.konduit.serving.pipeline.impl.step.ml.regression.RegressionOutputStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("TIME_LOOP_TRIGGER", ai.konduit.serving.pipeline.impl.pipeline.loop.TimeLoopTrigger.class, ai.konduit.serving.pipeline.api.pipeline.Trigger.class)); l.add(new JsonSubType("SIMPLE_LOOP_TRIGGER", ai.konduit.serving.pipeline.impl.pipeline.loop.SimpleLoopTrigger.class, ai.konduit.serving.pipeline.api.pipeline.Trigger.class)); l.add(new JsonSubType("CLASSIFIER_OUTPUT", ai.konduit.serving.pipeline.impl.step.ml.classifier.ClassifierOutputStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("BOUNDING_BOX_FILTER", ai.konduit.serving.pipeline.impl.step.bbox.filter.BoundingBoxFilterStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); l.add(new JsonSubType("STRING_SWITCH", ai.konduit.serving.pipeline.impl.pipeline.graph.switchfn.DataStringSwitchFn.class, ai.konduit.serving.pipeline.impl.pipeline.graph.SwitchFn.class)); l.add(new JsonSubType("BOUNDING_BOX_TO_POINT", ai.konduit.serving.pipeline.impl.step.bbox.point.BoundingBoxToPointStep.class, ai.konduit.serving.pipeline.api.step.PipelineStep.class)); return l; } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/PipelineModuleInfo.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline; import ai.konduit.serving.annotation.module.ModuleInfo; @ModuleInfo("konduit-serving-pipeline") public class PipelineModuleInfo { private PipelineModuleInfo() { } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/api/BaseModelPipelineStep.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.pipeline.api; import ai.konduit.serving.pipeline.api.step.PipelineStep; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Data; import lombok.experimental.SuperBuilder; @AllArgsConstructor @SuperBuilder @Data public abstract class BaseModelPipelineStep<T extends Configuration> implements PipelineStep { @Schema(description = "Uniform Resource Identifier of a model.") private String modelUri; @Schema(description = "Generic for model`s config.") private T config; }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/api/Configuration.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.pipeline.api; import java.util.Map; import java.util.Set; public interface Configuration extends TextConfig { Set<String> keys(); Map<String,Object> asMap(); Object get(String key); Object getOrDefault(String key, Object defaultValue); }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/api/TextConfig.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.pipeline.api; import ai.konduit.serving.pipeline.util.ObjectMappers; /** * TextConfig is an interface for any configuration in Konduit Serving that should be convertable to/from JSON and YAML * This interface does two things: * (a) Adds default toJson() and toYaml() methods to the class, using Jackson * (b) Is used in testing to provide coverage tracking for to/from JSON/YAML testing * * @author Alex Black */ public interface TextConfig { /** * Convert a configuration to a JSON string * * @return convert this object to a string */ default String toJson() { return ObjectMappers.toJson(this); } /** * Convert a configuration to a YAML string * * @return the yaml representation of this configuration */ default String toYaml() { return ObjectMappers.toYaml(this); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/api
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/api/context/Context.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.api.context; /** * Context is used for features such as profiling and metrics. * * See {@link Profiler} and {@link Metrics} for more details * * @author Alex Black */ public interface Context { Metrics metrics(); Profiler profiler(); }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/api
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/api/context/Counter.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.api.context; public interface Counter { void increment(); void increment(double value); }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/api
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/api/context/Gauge.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.api.context; public interface Gauge { double value(); }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/api
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/api/context/Metrics.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.api.context; /** * Metrics are used to record values, for debugging and visualization. * * Instances of the Metrics interface are available within a PipelineStepRunner's exec method via the {@link Context#metrics()} * method. * * @author Alex Black */ public interface Metrics { Counter counter(String id); Timer timer(String id); Gauge gauge(String id, double number); }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/api
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/api/context/NoOpProfiler.java
/* ****************************************************************************** * Copyright (c) 2022 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package ai.konduit.serving.pipeline.api.context; public class NoOpProfiler implements Profiler { @Override public boolean profilerEnabled() { return false; } @Override public void eventStart(String key) { } @Override public void eventEnd(String key) { } @Override public void flushBlocking() { } @Override public void closeAll() { } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/api
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/api/context/PipelineCounter.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.api.context; import ai.konduit.serving.pipeline.registry.MicrometerRegistry; public class PipelineCounter implements Counter { private io.micrometer.core.instrument.Counter mmCounter; public PipelineCounter(String id) { mmCounter = io.micrometer.core.instrument.Counter.builder(id).register(MicrometerRegistry.getRegistry()); } @Override public void increment() { mmCounter.increment(); } @Override public void increment(double value) { mmCounter.increment(value); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/api
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/api/context/PipelineGauge.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.api.context; import ai.konduit.serving.pipeline.registry.MicrometerRegistry; import org.nd4j.common.primitives.AtomicDouble; public class PipelineGauge implements Gauge { private io.micrometer.core.instrument.Gauge mmGauge; private AtomicDouble d; public PipelineGauge(String id, double value) { d = new AtomicDouble(value); mmGauge = io.micrometer.core.instrument.Gauge .builder(id, d, AtomicDouble::get) .register(MicrometerRegistry.getRegistry()); } public void set(double set){ d.set(set); } public double value() { return mmGauge.value(); } }
0
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/api
java-sources/ai/konduit/serving/konduit-serving-pipeline/0.3.0/ai/konduit/serving/pipeline/api/context/PipelineMetrics.java
/* * ****************************************************************************** * * Copyright (c) 2022 Konduit K.K. * * * * This program and the accompanying materials are made available under the * * terms of the Apache License, Version 2.0 which is available at * * https://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * SPDX-License-Identifier: Apache-2.0 * ***************************************************************************** */ package ai.konduit.serving.pipeline.api.context; import lombok.Setter; import java.util.HashMap; import java.util.Map; public class PipelineMetrics implements Metrics { private String pipelineName; @Setter private String instanceName = "default"; @Setter private String stepName = "default"; private Map<String,PipelineGauge> gaugeMap = new HashMap<>(); public PipelineMetrics(String name) { this.pipelineName = name; } private String assembleId(String id) { String effectiveId = pipelineName + "." + instanceName + "." + stepName + "." + id; return effectiveId; } @Override public Counter counter(String id) { Counter counter = new PipelineCounter(assembleId(id)); return counter; } @Override public Timer timer(String id) { Timer timer = new PipelineTimer(assembleId(id)); return timer; } @Override public Gauge gauge(String id, double number) { String s = assembleId(id); PipelineGauge pg = gaugeMap.computeIfAbsent(s, k -> new PipelineGauge(k, number)); pg.set(number); return pg; } }