index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/kognition/pilecv4j/lib-tracking/1.0/ai/kognition/pilecv4j
|
java-sources/ai/kognition/pilecv4j/lib-tracking/1.0/ai/kognition/pilecv4j/tracking/CvKalmanFilter.java
|
/*
* Copyright 2022 Jim Carroll
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.kognition.pilecv4j.tracking;
import static ai.kognition.pilecv4j.image.CvMat.TRACK_MEMORY_LEAKS;
import java.io.PrintStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.StringJoiner;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.video.KalmanFilter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.kognition.pilecv4j.image.Closer;
import ai.kognition.pilecv4j.image.CvMat;
import ai.kognition.pilecv4j.image.Utils;
/**
* This represents a standard Kalman filter. The transition, control, and measurement matrices can be modified ({@link CvKalmanFilter#setTransitionMatrix(Mat)},
* {@link CvKalmanFilter#setControlMatrix(Mat)}, {@link CvKalmanFilter#setMeasurementMatrix(Mat)}, respectively) to get extended functionality.
*
* @see <a href="https://en.wikipedia.org/wiki/Kalman_filter">wikipedia: Kalman filter</a>
*/
public class CvKalmanFilter extends KalmanFilter implements AutoCloseable {
private static final Logger LOGGER = LoggerFactory.getLogger(CvKalmanFilter.class);
private static final Method nativeDelete;
protected boolean deletedAlready = false;
protected boolean skipOnceForDelete = false;
protected final RuntimeException initTrace;
protected RuntimeException deleteTrace = null;
public final int dynamicParameters;
public final int measureParameters;
public final int controlParameters;
public final KalmanDataType dataType;
static {
CvMat.initOpenCv();
try {
nativeDelete = KalmanFilter.class.getDeclaredMethod("delete", long.class);
nativeDelete.setAccessible(true);
} catch(final NoSuchMethodException | SecurityException e) {
throw new RuntimeException("Got an exception trying to access " + KalmanFilter.class.getSimpleName() +
".delete. Either the security model is too restrictive or the version of OpenCv can't be supported.", e);
}
}
public enum KalmanDataType {
CV_32F(CvType.CV_32F, float.class), CV_64F(CvType.CV_64F, double.class);
public final int cvType;
public final Class<? extends Number> javaType;
KalmanDataType(final int cvType, final Class<? extends Number> javaType) {
this.cvType = cvType;
this.javaType = javaType;
}
}
public CvKalmanFilter(final int dynamicParameters, final int measureParameters, final KalmanDataType type) {
this(dynamicParameters, measureParameters, 0, type);
}
/**
* TODO change this to a builder pattern.
*
* @param dynamicParameters Dimensionality of the state. Must be greater than 0.
* @param measureParameters Dimensionality of the measurement. Must be greater than 0.
* @param controlParameters Dimensionality of the control vector. Default value is 0.
* @param type Numerical primitive type underlying all {@link Mat} generated by the Kalman Filter.
*/
public CvKalmanFilter(final int dynamicParameters, final int measureParameters, final int controlParameters, final KalmanDataType type) {
super(dynamicParameters, measureParameters, controlParameters, type.cvType);
this.dynamicParameters = dynamicParameters;
this.measureParameters = measureParameters;
this.controlParameters = controlParameters;
this.dataType = type;
initTrace = TRACK_MEMORY_LEAKS ? new RuntimeException("Here's where I was instantiated: ") : null;
}
/**
* Updates the predicted state from the measurement and returns the posteriori state (as in {@link CvKalmanFilter#getCorrectedState()}).
*
* @return a new mat to be managed by the caller.
*
* @see CvKalmanFilter#getCorrectedState()
*/
@Override
public CvMat correct(final Mat measurement) {
return CvMat.move(super.correct(measurement));
}
/**
* @return a shallow copied Mat to be managed by the caller.
*/
@Override
public CvMat predict(final Mat control) {
return CvMat.move(super.predict(control));
}
/**
* @return a shallow copied Mat to be managed by the caller.
*/
@Override
public CvMat predict() {
return CvMat.move(super.predict());
}
@Override
public CvMat get_statePre() {
return CvMat.move(super.get_statePre());
}
/**
* Predicted state: (x'(k)): x(k)=A*x(k-1)+B*u(k)
*
* @return a shallow copied Mat to be managed by the caller.
*/
public CvMat getPredictionState() {
return this.get_statePre();
}
public CvKalmanFilter setPredictionState(final Mat matrix) {
if(notAllowSet(getPredictionState(), matrix))
throw new ArithmeticException("Cannot set prediction state: wrong size.");
super.set_statePre(matrix);
return this;
}
@Override
public CvMat get_statePost() {
return CvMat.move(super.get_statePost());
}
/**
* Corrected state: (x(k)): x(k)=x'(k)+K(k)*(z(k)-H*x'(k))
*
* @return a shallow copied Mat to be managed by the caller.
*/
public CvMat getCorrectedState() {
return this.get_statePost();
}
/**
* @param matrix a Mat of size [{@link CvKalmanFilter#dynamicParameters},1].
*/
public CvKalmanFilter setCorrectedState(final Mat matrix) {
if(notAllowSet(getCorrectedState(), matrix))
throw new ArithmeticException("Cannot set corrected state: wrong size.");
super.set_statePost(matrix);
return this;
}
@Override
public CvMat get_transitionMatrix() {
return CvMat.move(super.get_transitionMatrix());
}
/**
* State transition matrix: (A)
*
* @return a shallow copied Mat to be managed by the caller.
*/
public CvMat getTransitionMatrix() {
return get_transitionMatrix();
}
public CvKalmanFilter setTransitionMatrix(final Mat transitionMatrix) {
if(notAllowSet(getTransitionMatrix(), transitionMatrix))
throw new ArithmeticException("Cannot set transition matrix: wrong size.");
super.set_transitionMatrix(transitionMatrix);
return this;
}
@Override
public CvMat get_controlMatrix() {
return CvMat.move(super.get_controlMatrix());
}
/**
* Control matrix (B)
* <p>
* Unused if there is no control.
*
* @return a shallow copied Mat to be managed by the caller.
*/
public CvMat getControlMatrix() {
return this.get_controlMatrix();
}
public CvKalmanFilter setControlMatrix(final Mat controlMatrix) {
if(controlParameters <= 0)
throw new ArithmeticException("Cannot set control: no control parameters.");
if(notAllowSet(getControlMatrix(), controlMatrix))
throw new ArithmeticException("Cannot set control: wrong size.");
super.set_controlMatrix(controlMatrix);
return this;
}
@Override
public CvMat get_measurementMatrix() {
return CvMat.move(super.get_measurementMatrix());
}
/**
* Measurement matrix (H)
*
* @return a shallow copied Mat to be managed by the caller.
*/
public CvMat getMeasurementMatrix() {
return get_measurementMatrix();
}
public CvKalmanFilter setMeasurementMatrix(final Mat measurementMatrix) {
if(notAllowSet(getMeasurementMatrix(), measurementMatrix))
throw new ArithmeticException("Cannot set measurement: wrong size.");
super.set_measurementMatrix(measurementMatrix);
return this;
}
@Override
public CvMat get_processNoiseCov() {
return CvMat.move(super.get_processNoiseCov());
}
/**
* Process noise covariance matrix (Q).
*
* @return a shallow copied Mat to be managed by the caller.
*/
public CvMat getProcessNoiseCovariance() {
return this.get_processNoiseCov();
}
public CvKalmanFilter setProcessNoiseCovariance(final Mat processNoiseCov) {
if(notAllowSet(getProcessNoiseCovariance(), processNoiseCov))
throw new ArithmeticException("Cannot set process noise covariance: wrong size.");
super.set_processNoiseCov(processNoiseCov);
return this;
}
@Override
public CvMat get_measurementNoiseCov() {
return CvMat.move(super.get_measurementNoiseCov());
}
/**
* Measurement noise covariance matrix (R)
*
* @return a shallow copied Mat to be managed by the caller.
*/
public CvMat getMeasurementNoiseCovariance() {
return this.get_measurementNoiseCov();
}
public CvKalmanFilter setMeasurementNoiseCovariance(final Mat measurementNoiseCovariance) {
if(notAllowSet(getMeasurementNoiseCovariance(), measurementNoiseCovariance))
throw new ArithmeticException("Cannot set measurement error covariance: wrong size.");
super.set_measurementNoiseCov(measurementNoiseCovariance);
return this;
}
@Override
public CvMat get_errorCovPre() {
return CvMat.move(super.get_errorCovPre());
}
/**
* Priori error estimate covariance matrix (P'(k)): P'(k)=A*P(k-1)*At + Q)
*
* @return a shallow copied Mat to be managed by the caller.
*/
public CvMat getPrioriErrorEstimateCovariance() {
return this.get_errorCovPre();
}
public CvKalmanFilter setPrioriErrorEstimateCovariance(final Mat errorCovPre) {
if(notAllowSet(getPrioriErrorEstimateCovariance(), errorCovPre))
throw new ArithmeticException("Cannot set priori error covariance: wrong size.");
super.set_errorCovPre(errorCovPre);
return this;
}
@Override
public CvMat get_gain() {
return CvMat.move(super.get_gain());
}
/**
* Kalman gain matrix (K(k)): K(k)=P'(k)*Ht*inv(H*P'(k)*Ht+R)
*
* @return a shallow copied Mat to be managed by the caller.
*/
public CvMat getGain() {
return this.get_gain();
}
public CvKalmanFilter setGain(final Mat gain) {
if(notAllowSet(getGain(), gain))
throw new ArithmeticException("Cannot set Kalman gain: wrong size.");
super.set_gain(gain);
return this;
}
@Override
public CvMat get_errorCovPost() {
return CvMat.move(super.get_errorCovPost());
}
/**
* Posteriori error estimate covariance matrix (P(k)): P(k)=(I-K(k)*H)*P'(k)
*
* @return a shallow copied Mat to be managed by the caller.
*/
public CvMat getPosterioriErrorEstimateCovariance() {
return this.get_measurementNoiseCov();
}
public CvKalmanFilter setPosterioriErrorEstimateCovariance(final Mat errorCovPost) {
if(notAllowSet(getPosterioriErrorEstimateCovariance(), errorCovPost))
throw new ArithmeticException("Cannot set posteriori error covariance: wrong size.");
super.set_errorCovPost(errorCovPost);
return this;
}
protected void doNativeDelete() {
try {
nativeDelete.invoke(this, super.nativeObj);
} catch(final IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
throw new RuntimeException("Got an exception trying to call " + KalmanFilter.class.getSimpleName() + ".delete. Either the security model is too " +
"restrictive or the version of OpenCV can't be supported.", e);
}
}
public CvKalmanFilter skipOnceForReturn() {
skipOnceForDelete = true;
return this;
}
@Override
public void close() {
if(!skipOnceForDelete) {
if(!deletedAlready) {
doNativeDelete();
deletedAlready = true;
if(TRACK_MEMORY_LEAKS)
deleteTrace = new RuntimeException("Here's where I was closed.");
} else if(TRACK_MEMORY_LEAKS) {
LOGGER.warn("TRACKING: Deleting {} again at:", this.getClass()
.getSimpleName(), new RuntimeException());
LOGGER.warn("TRACKING: Originally closed at:", deleteTrace);
LOGGER.warn("TRACKING: Created at:", initTrace);
}
} else {
skipOnceForDelete = false;
}
}
@Override
public void finalize() {
if(!deletedAlready) {
LOGGER.debug("Finalizing a {} that hasn't been closed.", this.getClass()
.getSimpleName());
if(TRACK_MEMORY_LEAKS)
LOGGER.debug("TRACKING: Here's where I was instantiated: ", initTrace);
close();
}
}
@Override
public String toString() {
return new StringJoiner(", ", CvKalmanFilter.class.getSimpleName() + "[", "]").add("dataType=" + dataType)
.add("dynamicParameters=" + dynamicParameters)
.add("measureParameters=" + measureParameters)
.add("controlParameters=" + controlParameters)
.add("deletedAlready=" + deletedAlready)
.add("nativeObj=" + nativeObj)
.toString();
}
/**
* OpenCV's JNI wrappers do no error checking since the set methods directly assign to the underlying kalman.cpp's state. Rather than getting an illegal
* argument exception, a runtime error is potentially, eventually returned instead. This preempts that problem by doing error checking at set time.
*/
private static boolean notAllowSet(final CvMat original, final Mat newMat) {
try(original) {
return original.rows() != newMat.rows() || original.cols() != newMat.cols() || original.channels() != newMat.channels();
}
}
/**
* This is a convenience method for {@link CvKalmanFilter#dump(CvKalmanFilter, PrintStream)} that uses {@link System#out} as the {@link PrintStream} and
* dumps all elements of every mat in the supplied filter.
* <p>
* This is an expensive operation.
*/
public static void dump(final CvKalmanFilter filter) {
dump(filter, System.out);
}
/**
* Dumps elements of every mat in the supplied filter to the supplied print stream.
* <p>
* This is an expensive operation.
*/
public static void dump(final CvKalmanFilter filter, final PrintStream out) {
out.println(filter.toString());
if(filter.deletedAlready)
return;
try(Closer c = new Closer();) {
dumpMat("Predicted state (x'(k))", c.add(filter.getPredictionState()), out);
dumpMat("Corrected state (x(k))", c.add(filter.getCorrectedState()), out);
dumpMat("Transition matrix (A)", c.add(filter.getTransitionMatrix()), out);
dumpMat("Control matrix (B)", c.add(filter.getControlMatrix()), out);
dumpMat("Measurement matrix (H)", c.add(filter.getMeasurementMatrix()), out);
dumpMat("Kalman gain (K)", c.add(filter.getGain()), out);
dumpMat("Process noise uncertainty/cov (Q)", c.add(filter.getProcessNoiseCovariance()), out);
dumpMat("Measurement noise uncertainty/cov (R)", c.add(filter.getMeasurementNoiseCovariance()), out);
dumpMat("Priori error estimate uncertainty/cov (P'(k))", c.add(filter.getPrioriErrorEstimateCovariance()), out);
dumpMat("Posteriori error estimate uncertainty/cov (P(k))", c.add(filter.getPosterioriErrorEstimateCovariance()), out);
}
}
private static void dumpMat(final String name, final CvMat mat, final PrintStream out) {
try(mat) {
out.print("- " + name + ": ");
if(mat == null || mat.cols() == 0) {
out.println("[]");
} else {
Utils.dump(mat, out);
}
}
}
}
|
0
|
java-sources/ai/kognition/pilecv4j/lib-tracking/1.0/ai/kognition/pilecv4j
|
java-sources/ai/kognition/pilecv4j/lib-tracking/1.0/ai/kognition/pilecv4j/tracking/NaiveMultiTracker.java
|
/*
* Copyright 2022 Jim Carroll
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.kognition.pilecv4j.tracking;
import java.util.ArrayList;
import java.util.List;
import org.opencv.core.Mat;
import org.opencv.core.Rect2d;
/**
* This is a reimplementation of {@link org.opencv.tracking.MultiTracker} but written in Java for Java.
* <p>
* This works exactly in the same way that the C++ MultiTrackerAlt operates- naively- but allows the developer to have easy, underlying access to the internal
* trackers, and to be able to remove them.
*/
public class NaiveMultiTracker {
public final List<Tracker> trackers = new ArrayList<>();
/**
* @param tracker any previously initialized tracker.
*
* @see Tracker#initialize(Mat, Rect2d)
*/
public NaiveMultiTracker addInitializedTracker(final Tracker tracker) {
if(!tracker.isInitialized())
throw new IllegalStateException("Attempted to pass in a " + tracker.getClass()
.getSimpleName() + " that was not initialized.");
trackers.add(tracker);
return this;
}
/**
* For each tracker present in this, return where the tracked object is.
*/
public Rect2d[] update(final Mat newImage) {
return trackers.stream()
.map(tracker -> tracker.update(newImage))
.map(boundingBox -> boundingBox.orElse(null))
.toArray(Rect2d[]::new);
}
}
|
0
|
java-sources/ai/kognition/pilecv4j/lib-tracking/1.0/ai/kognition/pilecv4j
|
java-sources/ai/kognition/pilecv4j/lib-tracking/1.0/ai/kognition/pilecv4j/tracking/Tracker.java
|
/*
* Copyright 2022 Jim Carroll
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.kognition.pilecv4j.tracking;
import java.util.Optional;
import org.opencv.core.Mat;
import org.opencv.core.Rect;
import org.opencv.core.Rect2d;
/**
* Much like {@link ai.kognition.pilecv4j.image.CvMat}, this is an easier interface to OpenCV's tracking ({@link org.opencv.tracking.Tracker}) class. It has
* simple memory management via the {@link AutoCloseable} interface and builder patterns.
*/
public interface Tracker extends AutoCloseable {
boolean supportsMasking();
default Tracker setMask(final Mat mask) {
throw new UnsupportedOperationException(this.getClass()
.getSimpleName() + " does not support masking.");
}
boolean isInitialized();
/**
* All trackers in OpenCV must be initialized by first stating "this is the object I wish to track."
* <p>
* Not all trackers will be able to understand what the underlying features are to be able to discriminate the difference between the object and the
* background. The myriad of ways in which a tracker can fail to initialize are numerous but the end result is the same- a tracker that doesn't track. For
* all trackers, <em>in the event that the tracker fails to initialize, the underlying memory will be automatically released via the {@link this#close()}
* method.</em>
*
* @param image image from which the bounding box was pulled from
* @param initialBoundingBox The object you wish to track
*
* @return {@link Optional#empty()} if the tracker fails to initialize. Otherwise, return this.
*/
Optional<Tracker> initialize(Mat image, Rect2d initialBoundingBox);
default Optional<Tracker> initialize(final Mat image, final Rect initialBoundingBox) {
return initialize(image, new Rect2d(initialBoundingBox.x, initialBoundingBox.y, initialBoundingBox.width, initialBoundingBox.height));
}
/**
* Update the tracker and find the new, most-likely bounding box for the target.
*
* @param image Current frame.
*
* @return {@link Optional#empty()} if the target was not located. Otherwise, return the bounding box. Note: an empty optional does not mean that the
* tracker has failed but merely that the target was not found in the frame. This can happen if the target is out of sight.
*/
Optional<Rect2d> update(Mat image);
/**
* This method allows the developer to return a {@link Tracker} that's being managed by a <em>"try-with-resource"</em> without worrying about the resources
* being freed. As an example:
*
* <pre>
* <code>
* try(final Tracker trackerToReturn = new Tracker()) {
* // attempt initialization
* return trackerToReturn.skipOnceForReturn();
* }
* </code>
* </pre>
*
* <p>
* While it's possible to simply not use a try-with-resource and leave the {@link Tracker} unmanaged, you run the possibility of leaking it if an exception
* is thrown prior to returning it.
*
* @return the tracker
*/
Tracker skipOnceForReturn();
}
|
0
|
java-sources/ai/kognition/pilecv4j/lib-tracking/1.0/ai/kognition/pilecv4j
|
java-sources/ai/kognition/pilecv4j/lib-tracking/1.0/ai/kognition/pilecv4j/tracking/TrackerImpl.java
|
/*
* Copyright 2022 Jim Carroll
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.kognition.pilecv4j.tracking;
import java.util.function.Supplier;
import ai.kognition.pilecv4j.tracking.tracker.TrackerCSRT;
import ai.kognition.pilecv4j.tracking.tracker.TrackerKCF;
import ai.kognition.pilecv4j.tracking.tracker.TrackerMOSSE;
public enum TrackerImpl implements Supplier<Tracker> {
CSRT {
@Override
public TrackerCSRT get() {
return new TrackerCSRT();
}
@Override
public boolean supportsMasking() {
return TrackerCSRT.SUPPORTS_MASKING;
}
},
KCF {
@Override
public TrackerKCF get() {
return new TrackerKCF();
}
@Override
public boolean supportsMasking() {
return TrackerKCF.SUPPORTS_MASKING;
}
},
MOSSE {
@Override
public TrackerMOSSE get() {
return new TrackerMOSSE();
}
@Override
public boolean supportsMasking() {
return TrackerMOSSE.SUPPORTS_MASKING;
}
};
@Override
public abstract Tracker get();
public abstract boolean supportsMasking();
}
|
0
|
java-sources/ai/kognition/pilecv4j/lib-tracking/1.0/ai/kognition/pilecv4j/tracking
|
java-sources/ai/kognition/pilecv4j/lib-tracking/1.0/ai/kognition/pilecv4j/tracking/tracker/TrackerCSRT.java
|
/*
* Copyright 2022 Jim Carroll
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.kognition.pilecv4j.tracking.tracker;
import static ai.kognition.pilecv4j.image.CvMat.TRACK_MEMORY_LEAKS;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Optional;
import org.opencv.core.Mat;
import org.opencv.core.Rect2d;
import org.opencv.tracking.legacy_TrackerCSRT;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.kognition.pilecv4j.image.CvMat;
import ai.kognition.pilecv4j.tracking.Tracker;
/**
* Extension of OpenCV's implementation of the Discriminative Correlation Filter with Channel and Spatial Reliability to fit {@link Tracker}.
*
* @see <a href="https://arxiv.org/abs/1611.08461">arxiv: Discriminitive Correlation Filter with Channel and Spatial Reliability</a>
*/
public class TrackerCSRT extends legacy_TrackerCSRT implements Tracker {
private static final Logger LOGGER = LoggerFactory.getLogger(TrackerCSRT.class);
public static final boolean SUPPORTS_MASKING = true;
private static final Method nativeCreate;
private static final Method nativeDelete;
protected boolean isInitialized = false;
protected boolean deletedAlready = false;
protected boolean skipOnceForDelete = false;
protected final RuntimeException stackTrace;
protected RuntimeException delStackTrace = null;
static {
CvMat.initOpenCv();
}
static {
try {
nativeCreate = legacy_TrackerCSRT.class.getDeclaredMethod("create_0");
nativeCreate.setAccessible(true);
nativeDelete = legacy_TrackerCSRT.class.getDeclaredMethod("delete", long.class);
nativeDelete.setAccessible(true);
} catch(final NoSuchMethodException | SecurityException e) {
throw new RuntimeException("Got an exception trying to access " + TrackerCSRT.class.getSimpleName() +
".delete or .create_0. Either the security model is too restrictive or the version of OpenCv can't be supported.", e);
}
}
public TrackerCSRT() {
this(doNativeCreate());
}
protected TrackerCSRT(final long nativeAddr) {
super(nativeAddr);
stackTrace = TRACK_MEMORY_LEAKS ? new RuntimeException("Here's where I was instantiated: ") : null;
}
@Override
public boolean supportsMasking() {
return SUPPORTS_MASKING;
}
@Override
public Tracker setMask(final Mat mask) {
super.setInitialMask(mask);
return this;
}
@Override
public Optional<Tracker> initialize(final Mat image, final Rect2d initialBoundingBox) {
if(!super.init(image, initialBoundingBox)) {
this.close();
return Optional.empty();
}
isInitialized = true;
return Optional.of(this);
}
@Override
public boolean isInitialized() {
return isInitialized;
}
@Override
public Optional<Rect2d> update(final Mat image) {
final Rect2d retval = new Rect2d();
if(!super.update(image, retval))
return Optional.empty();
return Optional.of(retval);
}
protected static long doNativeCreate() {
try {
return (Long)nativeCreate.invoke(null);
} catch(final IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException("Got an exception trying to call Tracker.create_0. Either the security model is too restrictive or the version of " +
"OpenCV can't be supported.", e);
}
}
protected void doNativeDelete() {
try {
nativeDelete.invoke(this, super.nativeObj);
} catch(final IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
throw new RuntimeException("Got an exception trying to call Tracker.delete. Either the security model is too restrictive or the version of " +
"OpenCV can't be supported.", e);
}
}
@Override
public TrackerCSRT skipOnceForReturn() {
skipOnceForDelete = true;
return this;
}
@Override
public void close() {
if(!skipOnceForDelete) {
if(!deletedAlready) {
doNativeDelete();
deletedAlready = true;
if(TRACK_MEMORY_LEAKS)
delStackTrace = new RuntimeException("Here's where I was closed");
} else if(TRACK_MEMORY_LEAKS) {
LOGGER.warn("TRACKING: deleting {} again at:", this.getClass()
.getSimpleName(), new RuntimeException());
LOGGER.warn("TRACKING: originally closed at:", delStackTrace);
LOGGER.warn("TRACKING: create at: ", stackTrace);
}
} else {
skipOnceForDelete = false;
}
}
@Override
public void finalize() {
if(!deletedAlready) {
LOGGER.debug("Finalizing a {} that hasn't been closed.", this.getClass()
.getSimpleName());
if(TRACK_MEMORY_LEAKS)
LOGGER.debug("TRACKING: here's where I was instantiated: ", stackTrace);
close();
}
}
@Override
public String toString() {
return "TrackerCSRT{" + "isInitialized=" + isInitialized + ", deletedAlready=" + deletedAlready + '}';
}
}
|
0
|
java-sources/ai/kognition/pilecv4j/lib-tracking/1.0/ai/kognition/pilecv4j/tracking
|
java-sources/ai/kognition/pilecv4j/lib-tracking/1.0/ai/kognition/pilecv4j/tracking/tracker/TrackerKCF.java
|
/*
* Copyright 2022 Jim Carroll
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.kognition.pilecv4j.tracking.tracker;
import static ai.kognition.pilecv4j.image.CvMat.TRACK_MEMORY_LEAKS;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Optional;
import org.opencv.core.Mat;
import org.opencv.core.Rect2d;
import org.opencv.tracking.legacy_TrackerKCF;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.kognition.pilecv4j.image.CvMat;
import ai.kognition.pilecv4j.tracking.Tracker;
/**
* Extension of OpenCV's implementation of the Kernelized Correlation Filter to fit {@link Tracker}.
*
* @see <a href="https://arxiv.org/abs/1404.7584">arxiv: High-Speed Tracking with Kernelized Correlation Filters</a>
*/
public class TrackerKCF extends legacy_TrackerKCF implements Tracker {
private static final Logger LOGGER = LoggerFactory.getLogger(TrackerKCF.class);
public static final boolean SUPPORTS_MASKING = false;
private static final Method nativeCreate;
private static final Method nativeDelete;
protected boolean isInitialized = false;
protected boolean deletedAlready = false;
protected boolean skipOnceForDelete = false;
protected final RuntimeException stackTrace;
protected RuntimeException delStackTrace;
static {
CvMat.initOpenCv();
}
static {
try {
nativeCreate = legacy_TrackerKCF.class.getDeclaredMethod("create_0");
nativeCreate.setAccessible(true);
nativeDelete = legacy_TrackerKCF.class.getDeclaredMethod("delete", long.class);
nativeDelete.setAccessible(true);
} catch(final NoSuchMethodException | SecurityException e) {
throw new RuntimeException("Got an exception trying to access " + TrackerKCF.class.getSimpleName() +
".delete or .create_0. Either the security model is too restrictive or the version of OpenCv can't be supported.", e);
}
}
public TrackerKCF() {
this(doNativeCreate());
}
protected TrackerKCF(final long nativeAdr) {
super(nativeAdr);
stackTrace = TRACK_MEMORY_LEAKS ? new RuntimeException("Here's where I was instantiated: ") : null;
}
@Override
public boolean supportsMasking() {
return SUPPORTS_MASKING;
}
@Override
public boolean isInitialized() {
return isInitialized;
}
@Override
public Optional<Tracker> initialize(final Mat image, final Rect2d initialBoundingBox) {
if(!super.init(image, initialBoundingBox)) {
this.close();
return Optional.empty();
}
isInitialized = true;
return Optional.of(this);
}
@Override
public Optional<Rect2d> update(final Mat image) {
final Rect2d predicted = new Rect2d();
if(!super.update(image, predicted))
return Optional.empty();
return Optional.of(predicted);
}
protected static long doNativeCreate() {
try {
return (Long)nativeCreate.invoke(null);
} catch(final IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException("Got an exception trying to call Tracker.create_0. Either the security model is too restrictive or the version of " +
"OpenCV can't be supported.", e);
}
}
protected void doNativeDelete() {
try {
nativeDelete.invoke(this, super.nativeObj);
} catch(final IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
throw new RuntimeException("Got an exception trying to call Tracker.delete. Either the security model is too restrictive or the version of " +
"OpenCV can't be supported.", e);
}
}
@Override
public TrackerKCF skipOnceForReturn() {
skipOnceForDelete = true;
return this;
}
@Override
public void close() {
if(!skipOnceForDelete) {
if(!deletedAlready) {
doNativeDelete();
deletedAlready = true;
if(TRACK_MEMORY_LEAKS)
delStackTrace = new RuntimeException("Here's where I was closed");
} else if(TRACK_MEMORY_LEAKS) {
LOGGER.warn("TRACKING: deleting {} again at:", this.getClass()
.getSimpleName(), new RuntimeException());
LOGGER.warn("TRACKING: originally closed at:", delStackTrace);
LOGGER.warn("TRACKING: create at: ", stackTrace);
}
} else {
skipOnceForDelete = false;
}
}
@Override
public void finalize() {
if(!deletedAlready) {
LOGGER.debug("Finalizing a {} that hasn't been closed.", this.getClass()
.getSimpleName());
if(TRACK_MEMORY_LEAKS)
LOGGER.debug("TRACKING: here's where I was instantiated: ", stackTrace);
close();
}
}
@Override
public String toString() {
return "TrackerKCF{" + "isInitialized=" + isInitialized + ", deletedAlready=" + deletedAlready + '}';
}
}
|
0
|
java-sources/ai/kognition/pilecv4j/lib-tracking/1.0/ai/kognition/pilecv4j/tracking
|
java-sources/ai/kognition/pilecv4j/lib-tracking/1.0/ai/kognition/pilecv4j/tracking/tracker/TrackerMOSSE.java
|
/*
* Copyright 2022 Jim Carroll
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.kognition.pilecv4j.tracking.tracker;
import static ai.kognition.pilecv4j.image.CvMat.TRACK_MEMORY_LEAKS;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Optional;
import org.opencv.core.Mat;
import org.opencv.core.Rect2d;
import org.opencv.tracking.legacy_TrackerMOSSE;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.kognition.pilecv4j.image.CvMat;
import ai.kognition.pilecv4j.tracking.Tracker;
/**
* Extension of OpenCV's implementation of the Minimum Output of Sum of Squard Error (MOSSE) filter to fit {@link Tracker}.
*
* @see <a href="https://www.cs.colostate.edu/~vision/publications/bolme_cvpr10.pdf">cs.colorado-state: Visual Object Tracking using Adaptive Correlation
* Filters</a>
*/
public class TrackerMOSSE extends legacy_TrackerMOSSE implements Tracker {
private static final Logger LOGGER = LoggerFactory.getLogger(TrackerMOSSE.class);
public static final boolean SUPPORTS_MASKING = false;
private static final Method nativeCreate;
private static final Method nativeDelete;
protected boolean isInitialized = false;
protected boolean deletedAlready = false;
protected boolean skipOnceForDelete = false;
protected final RuntimeException stackTrace;
protected RuntimeException delStackTrace = null;
static {
CvMat.initOpenCv();
}
static {
try {
nativeCreate = legacy_TrackerMOSSE.class.getDeclaredMethod("create_0");
nativeCreate.setAccessible(true);
nativeDelete = legacy_TrackerMOSSE.class.getDeclaredMethod("delete", long.class);
nativeDelete.setAccessible(true);
} catch(final NoSuchMethodException | SecurityException e) {
throw new RuntimeException("Got an exception trying to access " + TrackerMOSSE.class.getSimpleName() +
".delete or .create_0. Either the security model is too restrictive or the version of OpenCv can't be supported.", e);
}
}
public TrackerMOSSE() {
this(doNativeCreate());
}
protected TrackerMOSSE(final long nativeAddr) {
super(nativeAddr);
stackTrace = TRACK_MEMORY_LEAKS ? new RuntimeException("Here's where I was instantiated") : null;
}
@Override
public boolean supportsMasking() {
return SUPPORTS_MASKING;
}
@Override
public boolean isInitialized() {
return isInitialized;
}
@Override
public Optional<Tracker> initialize(final Mat image, final Rect2d initialBoundingBox) {
if(!super.init(image, initialBoundingBox)) {
this.close();
return Optional.empty();
}
isInitialized = true;
return Optional.of(this);
}
@Override
public Optional<Rect2d> update(final Mat image) {
final Rect2d newBoundingBox = new Rect2d();
if(!super.update(image, newBoundingBox))
return Optional.empty();
return Optional.of(newBoundingBox);
}
protected static long doNativeCreate() {
try {
return (Long)nativeCreate.invoke(null);
} catch(final IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException("Got an exception trying to call Tracker.create_0. Either the security model is too restrictive or the version of " +
"OpenCV can't be supported.", e);
}
}
protected void doNativeDelete() {
try {
nativeDelete.invoke(this, super.nativeObj);
} catch(final IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
throw new RuntimeException("Got an exception trying to call Tracker.delete. Either the security model is too restrictive or the version of " +
"OpenCV can't be supported.", e);
}
}
@Override
public TrackerMOSSE skipOnceForReturn() {
skipOnceForDelete = true;
return this;
}
@Override
public void close() {
if(!skipOnceForDelete) {
if(!deletedAlready) {
doNativeDelete();
deletedAlready = true;
if(TRACK_MEMORY_LEAKS)
delStackTrace = new RuntimeException("Here's where I was closed");
} else if(TRACK_MEMORY_LEAKS) {
LOGGER.warn("TRACKING: deleting {} again at:", this.getClass()
.getSimpleName(), new RuntimeException());
LOGGER.warn("TRACKING: originally closed at:", delStackTrace);
LOGGER.warn("TRACKING: create at: ", stackTrace);
}
} else {
skipOnceForDelete = false;
}
}
@Override
public void finalize() {
if(!deletedAlready) {
LOGGER.debug("Finalizing a {} that hasn't been closed.", this.getClass()
.getSimpleName());
if(TRACK_MEMORY_LEAKS)
LOGGER.debug("TRACKING: here's where I was instantiated: ", stackTrace);
close();
}
}
@Override
public String toString() {
return "TrackerMOSSE{" + "isInitialized=" + isInitialized + ", deletedAlready=" + deletedAlready + '}';
}
}
|
0
|
java-sources/ai/kognition/pilecv4j/lib-util/1.0/ai/kognition/pilecv4j
|
java-sources/ai/kognition/pilecv4j/lib-util/1.0/ai/kognition/pilecv4j/util/NativeLibraryLoader.java
|
/*
* Copyright 2022 Jim Carroll
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.kognition.pilecv4j.util;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <p>
* This class will load native libraries from a jar file as long as they've been packaged appropriately.
* </p>
*
* <p>
* If you want to explicitly set a directory to load libraries from for debugging purposes you can
* set the environment variable DEBUG_LIB_DIR and the loader will first try to load any library from
* that directory. You can verify this was picked up because a WARN log message will be printed out
* identifying the fact that a library is "Loading ... from the debug library directory."
* </p>
*/
public class NativeLibraryLoader {
private static final Logger LOGGER = LoggerFactory.getLogger(NativeLibraryLoader.class);
private static Set<String> loaded = new HashSet<>();
private static PlatformDetection platform = new PlatformDetection();
private static final String DEBUG_LIB_DIR = System.getProperty("DEBUG_LIB_DIR");
public static class Loader {
private final List<LibraryDefinition> libs = new ArrayList<>();
private final List<LibraryLoadCallback> preLoadCallbacks = new ArrayList<>();
private final List<LibraryLoadCallback> postLoadCallbacks = new ArrayList<>();
private File destinationDir = new File(System.getProperty("java.io.tmpdir"));
private static class LibraryDefinition {
final private boolean required;
final private String libName;
private LibraryDefinition(final boolean required, final String libName) {
super();
this.required = required;
this.libName = libName;
}
}
public Loader library(final String... libNames) {
Arrays.stream(libNames)
.map(ln -> new LibraryDefinition(true, ln))
.forEach(libs::add);
return this;
}
public Loader optional(final String... libNames) {
Arrays.stream(libNames)
.map(ln -> new LibraryDefinition(false, ln))
.forEach(libs::add);
return this;
}
@FunctionalInterface
public interface LibraryLoadCallback {
public void loading(File directory, String libName, String fullLibName);
}
public Loader addPreLoadCallback(final LibraryLoadCallback ll) {
preLoadCallbacks.add(ll);
return this;
}
public Loader addPostLoadCallback(final LibraryLoadCallback ll) {
postLoadCallbacks.add(ll);
return this;
}
public Loader destinationDir(final String destinationDir) {
this.destinationDir = new File(destinationDir);
return this;
}
public void load() {
if(!this.destinationDir.exists()) {
if(!this.destinationDir.mkdirs())
LOGGER.warn("FAILED to create the destination directory \"{}\" for the libraries {}",
this.destinationDir, libs.stream().map(l -> l.libName).collect(Collectors.toList()));
}
final File tmpDir = this.destinationDir;
libs.stream()
.filter(ld -> ld != null)
.filter(ld -> ld.libName != null)
.filter(ld -> {
final boolean needsLoading = !loaded.contains(ld.libName);
if(!needsLoading)
LOGGER.debug("Native library \"" + ld.libName + "\" is already loaded.");
return needsLoading;
})
.forEach(ld -> {
final String libFileName = System.mapLibraryName(ld.libName);
final String libMD5FileName = libFileName + ".MD5";
LOGGER.trace("Native library \"" + ld.libName + "\" platform specific file name is \"" + libFileName + "\"");
boolean loadMe = true;
final File libFile;
if(new File(DEBUG_LIB_DIR, libFileName).exists()) {
LOGGER.warn("Loading \"{}\" from the debug library directory \"{}\"", libFileName, DEBUG_LIB_DIR);
loadMe = true;
libFile = new File(new File(DEBUG_LIB_DIR), libFileName);
} else {
libFile = new File(tmpDir, libFileName);
final File libMD5File = new File(tmpDir, libMD5FileName);
if(!libFile.exists())
loadMe = copyFromJar(ld, libFileName, libFile, libMD5FileName, libMD5File);
else {
final boolean copyMeFromJar;
final String fileMD5 = rethrowIOException(
() -> (libMD5File.exists()) ? FileUtils.readFileToString(libMD5File, StandardCharsets.UTF_8.name()) : (String)null,
libMD5FileName);
// if the file exists then fileMD5 is set. Otherwise it's null.
if(fileMD5 != null) {
// read the MD5 from the jar.
final String jarMD5 = rethrowIOException(() -> {
try(InputStream is = getInputStream(platform + "/" + libMD5FileName)) {
if(is == null) {
LOGGER.info("The library \"{}\" doesn't appear to have a coresponding MD5. Reloading from jar file.", libFileName);
return null;
} else
return IOUtils.toString(is, StandardCharsets.UTF_8.name());
}
}, platform + "/" + libMD5FileName);
// if the fileMD5 contents doesn't equal the jarMD5 contents then we need to
// re-copy the library from the jar file.
copyMeFromJar = (!fileMD5.equals(jarMD5));
} else {
// if there is not fileMD5 then we're just going to re-copy from the jar
LOGGER.warn("Missing MD5 file for \"{}.\" This will result in recopying of the library file every startup." +
" Consider generating an MD5 file for the library");
copyMeFromJar = true;
}
if(copyMeFromJar)
loadMe = copyFromJar(ld, libFileName, libFile, libMD5FileName, libMD5File);
else
LOGGER.debug("Native library \"" + ld.libName + "\" is already on the filesystem. Not overwriting.");
}
}
if(loadMe) {
preLoadCallbacks.stream()
.forEach(ll -> ll.loading(tmpDir, ld.libName, libFileName));
System.out.println("Loading: " + libFile.getAbsolutePath());
System.load(libFile.getAbsolutePath());
postLoadCallbacks.stream()
.forEach(ll -> ll.loading(tmpDir, ld.libName, libFileName));
}
loaded.add(ld.libName);
});
}
}
private static boolean copyFromJar(final Loader.LibraryDefinition ld, final String libFileName, final File libFile, final String libMD5FileName,
final File libMD5File) throws UnsatisfiedLinkError {
final String libFilePath = platform + "/" + libFileName;
final String libMD5FilePath = platform + "/" + libMD5FileName;
LOGGER.debug("Copying native library \"" + libFilePath + "\" from the jar file.");
final boolean loadMe = rethrowIOException(() -> {
try(InputStream is = getInputStream(libFilePath)) {
if(is == null) {
if(ld.required)
throw new UnsatisfiedLinkError(
"Required native library \"" + ld.libName + "\" with platform representation \"" + libFilePath
+ "\" doesn't appear to exist in any jar file on the classpath");
else {
// if we're not required and it's missing, we're fine
LOGGER.debug("Requested but optional library \"" + ld.libName + "\" is not on the classpath.");
return false;
}
}
FileUtils.copyInputStreamToFile(is, libFile);
return true;
}
}, libFilePath);
if(loadMe) // loadMe is only set if the library was in the jar (and copied onto the disk).
// otherwise we can just skip trying to load the MD5
rethrowIOException(() -> {
try(InputStream is = getInputStream(libMD5FilePath)) {
if(is == null) {
LOGGER.info("The library \"{}\" doesn't appear to have a coresponding MD5. Reloading from jar file.", libFilePath);
} else {
FileUtils.copyInputStreamToFile(is, libMD5File);
}
}
}, libMD5FilePath);
return loadMe;
}
public static Loader loader() {
return new Loader();
}
private NativeLibraryLoader() {}
@FunctionalInterface
private static interface SupplierThrows<R, E extends Throwable> {
public R get() throws E;
}
@FunctionalInterface
private static interface Nothing<E extends Throwable> {
public void doIt() throws E;
}
private static <R> R rethrowIOException(final SupplierThrows<R, IOException> suppl, final String libName) {
try {
return suppl.get();
} catch(final IOException ioe) {
final String message = "Couldn't load the file from the jar (" + libName + "):";
LOGGER.error(message, ioe);
throw new UnsatisfiedLinkError(message + ioe.getLocalizedMessage());
}
}
private static void rethrowIOException(final Nothing<IOException> suppl, final String libName) {
try {
suppl.doIt();
} catch(final IOException ioe) {
final String message = "Couldn't load the file from the jar (" + libName + "):";
LOGGER.error(message, ioe);
throw new UnsatisfiedLinkError(message + ioe.getLocalizedMessage());
}
}
private static InputStream getInputStream(final String resource) {
// I need to find the library. Let's start with the "current" classloader.
// see http://www.javaworld.com/javaworld/javaqa/2003-06/01-qa-0606-load.html
// also see: http://www.javaworld.com/javaworld/javaqa/2003-03/01-qa-0314-forname.html
InputStream is = getInputStreamFromClassLoader(NativeLibraryLoader.class.getClassLoader(), resource);
if(is == null) // ok, now try the context classloader
is = getInputStreamFromClassLoader(Thread.currentThread().getContextClassLoader(), resource);
if(is == null) // finally try the system classloader though if we're here we're probably screwed
is = getInputStreamFromClassLoader(ClassLoader.getSystemClassLoader(), resource);
return is;
}
private static InputStream getInputStreamFromClassLoader(final ClassLoader loader, final String resource) {
if(loader == null)
return null;
InputStream is = loader.getResourceAsStream(resource);
if(is == null)
is = loader.getResourceAsStream("/" + resource);
return is;
}
}
|
0
|
java-sources/ai/kognition/pilecv4j/lib-util/1.0/ai/kognition/pilecv4j
|
java-sources/ai/kognition/pilecv4j/lib-util/1.0/ai/kognition/pilecv4j/util/NativePointerWrap.java
|
/*
* Copyright 2022 Jim Carroll
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.kognition.pilecv4j.util;
import com.sun.jna.Native;
import com.sun.jna.Pointer;
/**
* This class is a simple helper class that will automatically free native memory
* pointed to by a JNA Pointer on close. It's basically a JNA Pointer guard class.
*/
public class NativePointerWrap implements AutoCloseable {
public final Pointer ptr;
public NativePointerWrap(final Pointer ptr) {
this.ptr = ptr;
}
@Override
public void close() {
if(ptr != null)
Native.free(Pointer.nativeValue(ptr));
}
}
|
0
|
java-sources/ai/kognition/pilecv4j/lib-util/1.0/ai/kognition/pilecv4j
|
java-sources/ai/kognition/pilecv4j/lib-util/1.0/ai/kognition/pilecv4j/util/PlatformDetection.java
|
/*
* Copyright 2022 Jim Carroll
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.kognition.pilecv4j.util;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang3.SystemUtils;
public class PlatformDetection {
public final String os;
public final String arch;
public static String OS_WINDOWS = "windows";
public static String OS_OSX = "osx";
public static String OS_SOLARIS = "solaris";
public static String OS_LINUX = "linux";
public static String ARCH_PPC = "ppc";
public static String ARCH_X86_32 = "x86_32";
public static String ARCH_X86_64 = "x86_64";
public PlatformDetection() {
// resolve OS
if(SystemUtils.IS_OS_WINDOWS) {
this.os = OS_WINDOWS;
} else if(SystemUtils.IS_OS_MAC_OSX) {
this.os = OS_OSX;
} else if(SystemUtils.IS_OS_SOLARIS) {
this.os = OS_SOLARIS;
} else if(SystemUtils.IS_OS_LINUX) {
this.os = OS_LINUX;
} else {
throw new IllegalArgumentException("Unknown operating system " + SystemUtils.OS_NAME);
}
// resolve architecture
final Map<String, String> archMap = new HashMap<String, String>();
archMap.put("x86", ARCH_X86_32);
archMap.put("i386", ARCH_X86_32);
archMap.put("i486", ARCH_X86_32);
archMap.put("i586", ARCH_X86_32);
archMap.put("i686", ARCH_X86_32);
archMap.put("x86_64", ARCH_X86_64);
archMap.put("amd64", ARCH_X86_64);
archMap.put("powerpc", ARCH_PPC);
this.arch = archMap.get(SystemUtils.OS_ARCH);
if(this.arch == null) {
throw new IllegalArgumentException("Unknown architecture " + SystemUtils.OS_ARCH);
}
}
@Override
public String toString() {
return os + "-" + arch;
}
}
|
0
|
java-sources/ai/kognition/pilecv4j/lib-util/1.0/ai/kognition/pilecv4j
|
java-sources/ai/kognition/pilecv4j/lib-util/1.0/ai/kognition/pilecv4j/util/Timer.java
|
/*
* Copyright 2022 Jim Carroll
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.kognition.pilecv4j.util;
public final class Timer {
private long startTime;
private long endTime;
public static final long nanoSecondsPerSecond = 1000000000L;
public static final double secondsPerNanosecond = 1.0D / nanoSecondsPerSecond;
public final void start() {
startTime = System.nanoTime();
}
public final String stop() {
endTime = System.nanoTime();
return toString();
}
public final float getSeconds() {
return (float)((endTime - startTime) * secondsPerNanosecond);
}
// public final int getTenthsOfSeconds()
// {
// return (int)(((double)(((endTime - startTime) % 1000)) / 100) + 0.5);
// }
@Override
public final String toString() {
return String.format("%.3f", getSeconds());
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation/AnnotationUtils.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.annotation;
import javax.annotation.processing.Filer;
import javax.tools.FileObject;
import javax.tools.StandardLocation;
import java.io.*;
import java.util.List;
public class AnnotationUtils {
private AnnotationUtils(){ }
public static void writeFile(Filer filer, Class<?> c, List<String> lines) {
writeFile(filer, c.getName(), lines);
}
public static void writeFile(Filer filer, String c, List<String> lines){
if(lines.isEmpty())
return;
try {
String outputFile = "META-INF/konduit-serving/" + c;
FileObject file = filer.createResource(StandardLocation.CLASS_OUTPUT, "", outputFile);
try (Writer w = file.openWriter()) {
w.write(String.join("\n", lines));
}
} catch (Throwable t) {
throw new RuntimeException("Error in annotation processing", t);
}
}
public static boolean existsAndContains(Filer filer, String c, List<String> lines){
String outputFile = "META-INF/konduit-serving/" + c;
if(!fileExists(filer, c))
return false;
String content = getContent(filer, c);
for(String s : lines){
if(!content.contains(s)){
return false;
}
}
return true;
}
public static boolean fileExists(Filer filer, String c){
String outputFile = "META-INF/konduit-serving/" + c;
try {
FileObject file = filer.getResource(StandardLocation.CLASS_OUTPUT, "", outputFile);
return file != null;
} catch (IOException e){
return false;
}
}
public static String getContent(Filer filer, String c){
String outputFile = "META-INF/konduit-serving/" + c;
try {
FileObject file = filer.getResource(StandardLocation.CLASS_OUTPUT, "", outputFile);
InputStream is = file.openInputStream();
StringBuilder sb = new StringBuilder();
try (Reader r = new BufferedReader(new InputStreamReader(is))) {
int ch = 0;
while ((ch = r.read()) != -1) {
sb.append((char) ch);
}
}
return sb.toString();
} catch (IOException e){
throw new RuntimeException("ERROR READING FILE", e);
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation/json/JsonName.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.annotation.json;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@Retention(RetentionPolicy.RUNTIME)
@Inherited
public @interface JsonName {
String value();
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation/json/JsonNameProcessor.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.annotation.json;
import ai.konduit.serving.annotation.AnnotationUtils;
import ai.konduit.serving.annotation.module.ModuleInfo;
import com.google.auto.service.AutoService;
import javax.annotation.processing.*;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.Element;
import javax.lang.model.element.TypeElement;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.util.ElementFilter;
import javax.tools.FileObject;
import javax.tools.StandardLocation;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
@SupportedAnnotationTypes({"ai.konduit.serving.annotation.json.JsonName",
"ai.konduit.serving.annotation.module.ModuleInfo"})
@SupportedSourceVersion(SourceVersion.RELEASE_8)
@AutoService(Processor.class)
public class JsonNameProcessor extends AbstractProcessor {
private static final String PIPELINE_STEP = "ai.konduit.serving.pipeline.api.step.PipelineStep";
private static final String SWITCH_FN = "ai.konduit.serving.pipeline.impl.pipeline.graph.SwitchFn";
private static final String GRAPH_STEP = "ai.konduit.serving.pipeline.impl.pipeline.graph.GraphStep";
private static final String TRIGGER = "ai.konduit.serving.pipeline.api.pipeline.Trigger";
private List<String> toWrite = new ArrayList<>();
private List<JsonSubType> subTypes = new ArrayList<>();
private String moduleName;
private String moduleClass;
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment env) {
if (env.processingOver()) {
writeFile();
} else {
//Get module name
if(moduleName == null) {
Collection<? extends Element> c = env.getElementsAnnotatedWith(ModuleInfo.class);
List<TypeElement> types = ElementFilter.typesIn(c);
for(TypeElement te : types){
moduleName = te.getAnnotation(ModuleInfo.class).value();
moduleClass = te.toString();
break;
}
}
//Collect JSON subtype info for writing at end
Collection<? extends Element> c = env.getElementsAnnotatedWith(JsonName.class);
List<TypeElement> types = ElementFilter.typesIn(c);
for (TypeElement annotation : types) {
TypeMirror t = annotation.asType();
if(processingEnv.getElementUtils().getTypeElement(PIPELINE_STEP) == null) {
throw new IllegalStateException("Processing environment did not find element " + PIPELINE_STEP + " with environment " + processingEnv.getElementUtils());
}
TypeMirror pipelineStepTypeMirror = processingEnv.getElementUtils().getTypeElement(PIPELINE_STEP).asType();
if(processingEnv.getElementUtils().getTypeElement(SWITCH_FN) == null) {
throw new IllegalStateException("Processing environment did not find element " + SWITCH_FN);
}
TypeMirror switchFnTypeMirror = processingEnv.getElementUtils().getTypeElement(SWITCH_FN).asType();
if(processingEnv.getElementUtils().getTypeElement(GRAPH_STEP) == null) {
throw new IllegalStateException("Processing environment did not find element " + GRAPH_STEP);
}
TypeMirror graphStepTypeMirror = processingEnv.getElementUtils().getTypeElement(GRAPH_STEP).asType();
if(processingEnv.getElementUtils().getTypeElement(TRIGGER) == null) {
throw new IllegalStateException("Processing environment did not find element " + TRIGGER);
}
TypeMirror triggerMirror = processingEnv.getElementUtils().getTypeElement(TRIGGER).asType();
boolean isPS = processingEnv.getTypeUtils().isAssignable(t, pipelineStepTypeMirror);
boolean isSF = processingEnv.getTypeUtils().isAssignable(t, switchFnTypeMirror);
boolean isGS = processingEnv.getTypeUtils().isAssignable(t, graphStepTypeMirror);
boolean isT = processingEnv.getTypeUtils().isAssignable(t, triggerMirror);
if(isPS || isSF || isGS || isT) {
String str;
if(isPS) {
str = PIPELINE_STEP;
} else if(isSF){
str = SWITCH_FN;
} else if(isGS) {
str = GRAPH_STEP;
} else {
str = TRIGGER;
}
String jn = annotation.getAnnotation(JsonName.class).value();
toWrite.add(jn + "," + annotation + "," + str); //Format: json_name,class_name,interface_name
subTypes.add(new JsonSubType(jn, annotation.toString(), str));
}
}
}
return true;
}
protected void writeFile() {
Filer filer = processingEnv.getFiler();
if(filer == null) {
System.err.println("No filer found. Returning.");
return;
}
AnnotationUtils.writeFile(filer, JsonName.class, toWrite);
//Also write the SubTypesMapping class (to get info via service loader)
//TODO we have 2 redundant sources of the same info here. the AnnotationUtils txt file is good for collecting info
// for project-wide aggregation, but is bad for use in service loader etc (non-unique names)
//This is better than manual JSON subtype mapping, but still isn't ideal
String name = className();
if(moduleClass == null) {
return;
}
int idx = moduleClass.lastIndexOf(".");
String fullName;
String pkg = null;
if (idx > 0) {
pkg = moduleClass.substring(0, idx);
fullName = pkg + "." + name;
} else {
fullName = name;
}
StringBuilder sb = new StringBuilder();
if(pkg != null){
sb.append("package ").append(pkg).append(";");
}
sb.append("import ai.konduit.serving.pipeline.api.serde.JsonSubType;\n")
.append("import ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping;\n")
.append("import ai.konduit.serving.pipeline.api.serde.JsonSubType;\n")
.append("\n")
.append("import java.util.ArrayList;\n")
.append("import java.util.List;\n");
sb.append("//GENERATED CLASS DO NOT EDIT\n");
sb.append("public class ").append(name).append(" implements JsonSubTypesMapping {")
.append(" @Override\n")
.append(" public List<JsonSubType> getSubTypesMapping() {\n")
.append(" List<JsonSubType> l = new ArrayList<>();\n");
for(JsonSubType j : subTypes){
sb.append(" l.add(new JsonSubType(\"")
.append(j.name).append("\", ")
.append(j.className).append(".class, ")
.append(j.subtypeOf).append(".class")
.append("));\n");
}
sb.append(" \n")
.append(" return l;\n")
.append(" }\n")
.append("}");
String s = sb.toString();
try {
FileObject fo = filer.createSourceFile(fullName);
try (Writer w = fo.openWriter()) {
w.write(s);
}
} catch (Throwable t){
t.printStackTrace();
}
//Finally, also create the service loader file
try {
try{
//Delete if it already exists
FileObject file = filer.getResource(StandardLocation.CLASS_OUTPUT, "", "META-INF/services/ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping");
file.delete();
} catch (IOException e){ }
FileObject file = filer.createResource(StandardLocation.CLASS_OUTPUT, "", "META-INF/services/ai.konduit.serving.pipeline.api.serde.JsonSubTypesMapping");
try (Writer w = file.openWriter()) {
w.write(fullName);
}
} catch (IOException e){
throw new RuntimeException("Error writing ");
}
}
private static class JsonSubType {
private String name;
private String className;
private String subtypeOf;
public JsonSubType(String name, String className, String subtypeOf){
this.name = name;
this.className = className;
this.subtypeOf = subtypeOf;
}
}
private String className(){
if(moduleName == null) {
return "";
}
String[] split = moduleName.split("-");
StringBuilder sb = new StringBuilder();
for(String s : split){
sb.append(Character.toUpperCase(s.charAt(0))).append(s.substring(1));
}
sb.append("JsonMapping");
String s = sb.toString();
return s;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation/module/Dependency.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.annotation.module;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* gId = groupId - org.apache.commons, org.deeplearning4j, etc<br>
* aId = artifactId - commons-lang3, deeplearning4j-core, etc<br>
* ver = version - 3.6, 1.0.0-SNAPSHOT, etc<br>
* classifier - may be null. Maven classifier, sometimes used for different hardware devices (linux-x86_64, etc)<br>
* cReq - Only applies when multiple classifiers exist, at which point it specifies how those classifier dependencies
* should be combined - i.e., do we need just ONE of them (i.e., ANY) or ALL of them?
*/
@Retention(RetentionPolicy.RUNTIME)
public @interface Dependency {
String gId();
String aId();
String ver();
String[] classifier() default {}; //None: means 'no classifier'
Req cReq() default Req.ANY;
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation/module/InheritRequiredDependencies.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.annotation.module;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Inherit the required dependencies from the specified module (by name, for example: konduit-serving-nd4j), instead of
* defining the {@link RequiresDependenciesAll} section with the same content
*/
@Retention(RetentionPolicy.RUNTIME)
public @interface InheritRequiredDependencies {
String value();
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation/module/ModuleInfo.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.annotation.module;
public @interface ModuleInfo {
String value();
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation/module/Req.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.annotation.module;
public enum Req {
ALL,
ANY
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation/module/Requires.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.annotation.module;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@Retention(RetentionPolicy.RUNTIME)
public @interface Requires {
Dependency[] value();
Req requires() default Req.ANY;
@interface List {
Requires[] value();
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation/module/RequiresDependenciesAll.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.annotation.module;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Dependencies that are required by this module in order to execute
* Note these are dependencies other than the ones already included in the module's Maven dependencies
* For example, backends (CPU or GPU) for ND4J, CPU or GPU native dependencies for Tensorflow, etc.
*/
@Retention(RetentionPolicy.RUNTIME)
public @interface RequiresDependenciesAll {
Requires[] value();
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation/module/RequiresDependenciesAny.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.annotation.module;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Dependencies that are required by this module in order to execute
* Note these are dependencies other than the ones already included in the module's Maven dependencies
* For example, backends (CPU or GPU) for ND4J, CPU or GPU native dependencies for Tensorflow, etc.
*/
@Retention(RetentionPolicy.RUNTIME)
public @interface RequiresDependenciesAny {
Requires[] value();
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation/module/RequiresDependenciesProcessor.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.annotation.module;
import ai.konduit.serving.annotation.AnnotationUtils;
import ai.konduit.serving.annotation.runner.CanRun;
import com.google.auto.service.AutoService;
import javax.annotation.processing.*;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.Element;
import javax.lang.model.element.TypeElement;
import javax.lang.model.util.ElementFilter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
@SupportedAnnotationTypes({"ai.konduit.serving.annotation.module.ModuleInfo",
"ai.konduit.serving.annotation.module.RequiresDependenciesAny",
"ai.konduit.serving.annotation.module.RequiresDependenciesAll",
"ai.konduit.serving.annotation.module.InheritRequiredDependencies"})
@SupportedSourceVersion(SourceVersion.RELEASE_8)
@AutoService(Processor.class)
public class RequiresDependenciesProcessor extends AbstractProcessor {
public static final String INHERIT_MODULE_PREFIX = "inherit:";
private String moduleName;
private List<String> toWrite = new ArrayList<>();
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment env) {
if(env.processingOver()){
if(moduleName == null && !toWrite.isEmpty()){
//Handle incremental build situation: usually occurs in IDEs, where the class with the annotation
//has been modified and gets recompiled in isolation (without any of the other classes)
//In this case, the generated file probably already exists, and we don't need to do anything
if(AnnotationUtils.existsAndContains(processingEnv.getFiler(), "ai.konduit.serving.annotation.module.RequiresDependencies", toWrite))
return false;
Collection<? extends Element> c = env.getElementsAnnotatedWith(RequiresDependenciesAll.class);
List<TypeElement> types1 = ElementFilter.typesIn(c);
Collection<? extends Element> c2 = env.getElementsAnnotatedWith(RequiresDependenciesAny.class);
List<TypeElement> types2 = ElementFilter.typesIn(c2);
Collection<? extends Element> c3 = env.getElementsAnnotatedWith(InheritRequiredDependencies.class);
List<TypeElement> types3 = ElementFilter.typesIn(c3);
throw new IllegalStateException("No class in this module is annotated with @ModuleInfo - a class with " +
"@ModuleInfo(\"your-module-name\") should be added to the module that has the @RequiresDependenciesAll or " +
"@RequiresDependenciesAny or @InheritRequiredDependencies annotation: " + types1 + ", " + types2 + ", " + types3);
}
writeFile();
} else {
//Get module name
if(moduleName == null){
Collection<? extends Element> c = env.getElementsAnnotatedWith(ModuleInfo.class);
List<TypeElement> types = ElementFilter.typesIn(c);
for(TypeElement te : types){
moduleName = te.getAnnotation(ModuleInfo.class).value();
break;
}
}
//Get the dependency requirements for the module from @RequiredDependenciesAll
Collection<? extends Element> c = env.getElementsAnnotatedWith(RequiresDependenciesAll.class);
List<TypeElement> l = ElementFilter.typesIn(c);
for(TypeElement annotation : l){
Requires[] requires = annotation.getAnnotation(RequiresDependenciesAll.class).value();
for (Requires require : requires) {
Dependency[] deps = require.value();
Req req = require.requires();
List<String> depsStrList = new ArrayList<>();
for(Dependency d : deps){
//g:a:v:(any or all of classifiers)
String g = d.gId();
String a = d.aId();
String v = d.ver();
String[] cl = d.classifier();
Req r = d.cReq();
depsStrList.add(process(g,a,v,cl,r));
}
String s;
if(req == Req.ALL){
s = "[" + String.join(",", depsStrList) + "]";
} else {
//Any
s = "{" + String.join(",", depsStrList) + "}";
}
toWrite.add(s);
}
}
//Get the dependency requirements for the module from @RequiredDependenciesAny
//Encode as module_name,{{Requires},{Requires},...}
c = env.getElementsAnnotatedWith(RequiresDependenciesAny.class);
l = ElementFilter.typesIn(c);
for(TypeElement annotation : l){
Requires[] requires = annotation.getAnnotation(RequiresDependenciesAny.class).value();
StringBuilder sb = new StringBuilder();
sb.append("{");
boolean first = true;
for (Requires require : requires) {
if(!first)
sb.append(",");
Dependency[] deps = require.value();
Req req = require.requires();
List<String> depsStrList = new ArrayList<>();
for(Dependency d : deps){
//g:a:v:(any or all of classifiers)
String g = d.gId();
String a = d.aId();
String v = d.ver();
String[] cl = d.classifier();
Req r = d.cReq();
depsStrList.add(process(g,a,v,cl,r));
}
String s;
if(req == Req.ALL){
s = "[" + String.join(",", depsStrList) + "]";
} else {
//Any
s = "{" + String.join(",", depsStrList) + "}";
}
sb.append(s);
first = false;
}
sb.append("}");
toWrite.add(sb.toString());
}
//Get the inherited dependency requirements for the module from @InheritRequiredDependencies
c = env.getElementsAnnotatedWith(InheritRequiredDependencies.class);
l = ElementFilter.typesIn(c);
for(TypeElement annotation : l) {
String inheritFrom = annotation.getAnnotation(InheritRequiredDependencies.class).value();
toWrite.add(INHERIT_MODULE_PREFIX + inheritFrom);
}
}
return false; //Allow other processors to process ModuleInfo
}
private static String process(String g, String a, String v, String[] cl, Req r){
StringBuilder sb = new StringBuilder();
sb.append("\"");
sb.append(g).append(":").append(a).append(":").append(v);
if(cl != null && cl.length == 1){
sb.append(":").append(cl[0]);
} else if(cl != null && cl.length > 1){
sb.append(":");
if(r == Req.ALL){
sb.append("[").append(String.join(",", cl)).append("]");
} else {
//Any of
sb.append("{").append(String.join(",", cl)).append("}");
}
}
sb.append("\"");
return sb.toString();
}
protected void writeFile(){
if(toWrite.isEmpty()) //Can be empty if @ModuleInfo exists but no required dependencies
toWrite.add("{}"); //Means "no requirements"
Filer filer = processingEnv.getFiler();
List<String> toWrite2 = new ArrayList<>();
for(String s : toWrite){
toWrite2.add(moduleName + "," + s);
}
AnnotationUtils.writeFile(filer, "ai.konduit.serving.annotation.module.RequiresDependencies", toWrite2);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation/runner/CanRun.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.annotation.runner;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Defines the PipelineStep instance(s) that this PipelineStepRunner can execute<br>
* Also includes the name of the module that the
*/
@Retention(RetentionPolicy.RUNTIME)
public @interface CanRun {
Class<?>[] value();
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation
|
java-sources/ai/konduit/serving/konduit-serving-annotation/0.3.0/ai/konduit/serving/annotation/runner/CanRunProcessor.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.annotation.runner;
import ai.konduit.serving.annotation.AnnotationUtils;
import ai.konduit.serving.annotation.module.ModuleInfo;
import com.google.auto.service.AutoService;
import javax.annotation.processing.*;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.*;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.util.ElementFilter;
import java.util.*;
/**
* Collect runner metadata:
* {@code @CanRun(SomePipeline.class)} annotation on a PipelineStepRunner means that the specified PipelineStep can
* be run by the annotated PipelineStepRunner class. Note that in some cases, a given PipelineStepRunner may not be
* able to run a particular instance of this type of PipelineStep due to some configuration or versioning issue (but it
* must be able to run _some_ of these types of PipelineStep instances)
* <br>
* During processing, this processor writes a "META-INF/konduit-serving/ai.konduit.serving.annotation.runner.CanRun" file
* with content like: ai.konduit.serving.pipeline.impl.step.logging.LoggingPipelineStep,ai.konduit.serving.pipeline.impl.step.logging.LoggingPipelineStepRunner<br>
* which should be interpreted as "LoggingPipelineStep can be run by LoggingPipelineStepRunner"
*
* @author Alex Black
*/
@SupportedAnnotationTypes({"ai.konduit.serving.annotation.runner.CanRun", "ai.konduit.serving.annotation.module.ModuleInfo"})
@SupportedSourceVersion(SourceVersion.RELEASE_8)
@AutoService(Processor.class)
public class CanRunProcessor extends AbstractProcessor {
private List<String> toWrite = new ArrayList<>();
private String moduleName;
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment env) {
if(env.processingOver()){
if(moduleName == null && !toWrite.isEmpty()){
//Handle incremental build situation: usually occurs in IDEs, where the class with the @CanRun annotation
//has been modified and gets recompiled in isolation (without any of the other classes)
//In this case, the generated file probably already exists, and we don't need to do anything
if(AnnotationUtils.existsAndContains(processingEnv.getFiler(), CanRun.class.getName(), toWrite))
return false;
Collection<? extends Element> c = env.getElementsAnnotatedWith(CanRun.class);
List<TypeElement> types = ElementFilter.typesIn(c);
throw new IllegalStateException("No class in this module is annotated with @ModuleInfo - a class with " +
"@ModuleInfo(\"your-module-name\") should be added to the module that has the @CanRun(...) annotation: " + types + " - " + toWrite);
}
writeFile();
} else {
if(moduleName == null){
Collection<? extends Element> c = env.getElementsAnnotatedWith(ModuleInfo.class);
List<TypeElement> types = ElementFilter.typesIn(c);
for(TypeElement te : types){
moduleName = te.getAnnotation(ModuleInfo.class).value();
break;
}
}
//Collect info for writing at end
Collection<? extends Element> c = env.getElementsAnnotatedWith(CanRun.class);
List<TypeElement> types = ElementFilter.typesIn(c);
Element canRunElement = processingEnv.getElementUtils().getTypeElement(CanRun.class.getName());
//Get the class values
//See https://area-51.blog/2009/02/13/getting-class-values-from-annotations-in-an-annotationprocessor/
TypeMirror canRunType = canRunElement.asType();
for (TypeElement annotation : types) {
List<? extends AnnotationMirror> l = annotation.getAnnotationMirrors();
String[] values = null;
for (AnnotationMirror am : l) {
if (am.getAnnotationType().equals(canRunType)) {
for (Map.Entry<? extends ExecutableElement, ? extends AnnotationValue> entry : am.getElementValues().entrySet()) {
if ("value".equals(entry.getKey().getSimpleName().toString())) {
String s = entry.getValue().toString(); //ai.konduit.something.SomeClass.class
s = s.replace("{", "").replace("}", "");
values = s.split(", ?");
for( int i=0; i<values.length; i++ ){
if(values[i].endsWith(".class")){
values[i] = values[i].substring(0, values[i].length()-6);
}
}
break;
}
}
}
}
if(values != null) {
for (String s : values) {
toWrite.add(s + "," + annotation.toString()); //Format: pipelineClass,runnerClass,module - i.e., "this type of pipeline step (in specified module) can be run by this type of runner"
}
}
}
}
return false; //Allow other processors to process ModuleInfo
}
protected void writeFile(){
if(toWrite.isEmpty()) //Can be empty if @ModuleInfo exists but no runners
return;
Filer filer = processingEnv.getFiler();
List<String> toWrite2 = new ArrayList<>();
for(String s : toWrite){
toWrite2.add(s + "," + moduleName);
}
AnnotationUtils.writeFile(filer, CanRun.class, toWrite2);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/config/MemMapConfig.java
|
/*
*
* * ******************************************************************************
* * *
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.config;
import ai.konduit.serving.util.ObjectMappers;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serializable;
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
/**
* Configuration for managing serving of memory-mapped files. The goal is to mem-map
* and serve a large array stored in "arrayPath" and get slices of this array on demand
* by index. If an index is specified that does not match an index of the mem-mapped array,
* an default or "unknown" vector is inserted into the slice instead, which is stored in
* "unkVectorPath".
*
* For instance, let's say we want to mem-map [[1, 2, 3], [4, 5, 6]], a small array with two
* valid slices. Our unknown vector is simply [0, 0, 0] in this example. Now, if we query for
* the indices {-2, 1} we'd get [[0, 0, 0], [4, 5, 6]].
*/
public class MemMapConfig implements Serializable, TextConfig {
public final static String ARRAY_URL = "arrayPath";
public final static String INITIAL_MEM_MAP_SIZE = "initialMemmapSize";
public final static long DEFAULT_INITIAL_SIZE = 1000000000;
public final static String WORKSPACE_NAME = "memMapWorkspace";
private String arrayPath, unkVectorPath;
@Builder.Default
private long initialMemmapSize = DEFAULT_INITIAL_SIZE;
@Builder.Default
private String workSpaceName = WORKSPACE_NAME;
public static MemMapConfig fromJson(String json){
return ObjectMappers.fromJson(json, MemMapConfig.class);
}
public static MemMapConfig fromYaml(String yaml){
return ObjectMappers.fromYaml(yaml, MemMapConfig.class);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/config/TextConfig.java
|
/* ******************************************************************************
* Copyright (c) 2022 Konduit K.K.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package ai.konduit.serving.config;
import ai.konduit.serving.util.ObjectMappers;
import io.vertx.core.json.JsonObject;
/**
* TextConfig is an interface for any configuration in Konduit Serving that should be convertable to/from JSON and YAML
* This interface does two things:
* (a) Adds default toJson() and toYaml() methods to the class, using Jackson
* (b) Is used in testing to provide coverage tracking for to/from JSON/YAML testing
*
* @author Alex Black
*/
public interface TextConfig {
/**
* Convert a configuration to a JSON string
*
* @return convert this object to a string
*/
default String toJson() {
return ObjectMappers.toJson(this);
}
/**
* Convert a configuration to a YAML string
*
* @return the yaml representation of this configuration
*/
default String toYaml() {
return ObjectMappers.toYaml(this);
}
/**
* Convert a configuration to a {@link JsonObject}
*
* @return the {@link JsonObject} representation of this configuration
*/
default JsonObject toJsonObject() {
return new JsonObject(ObjectMappers.toJson(this));
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/config
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/config/metrics/ColumnDistribution.java
|
/*
*
* * ******************************************************************************
* *
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.config.metrics;
import ai.konduit.serving.config.TextConfig;
import ai.konduit.serving.util.ObjectMappers;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.nd4j.linalg.dataset.api.preprocessor.serializer.NormalizerType;
import org.nd4j.shade.jackson.annotation.JsonTypeInfo;
import static org.nd4j.shade.jackson.annotation.JsonTypeInfo.As.PROPERTY;
import static org.nd4j.shade.jackson.annotation.JsonTypeInfo.Id.NAME;
/**
* Column distribution represents statistics and normalizer
* information for how to transform or denormalize values
* based on distribution information.
*
* @author Adam Gibson
*/
@Data
@Builder
@JsonTypeInfo(use = NAME, include = PROPERTY)
@AllArgsConstructor
@NoArgsConstructor
public class ColumnDistribution implements TextConfig {
private double mean,min,max,standardDeviation;
private NormalizerType normalizerType;
public static ColumnDistribution fromJson(String json) {
return ObjectMappers.fromJson(json, ColumnDistribution.class);
}
public static ColumnDistribution fromYaml(String yaml) {
return ObjectMappers.fromYaml(yaml, ColumnDistribution.class);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/config
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/config/metrics/MetricsConfig.java
|
/*
*
* * ******************************************************************************
* *
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.config.metrics;
import ai.konduit.serving.config.TextConfig;
import ai.konduit.serving.config.metrics.impl.ClassificationMetricsConfig;
import ai.konduit.serving.config.metrics.impl.RegressionMetricsConfig;
import io.micrometer.core.instrument.binder.MeterBinder;
import org.nd4j.shade.jackson.annotation.JsonSubTypes;
import org.nd4j.shade.jackson.annotation.JsonTypeInfo;
import java.util.Map;
import static org.nd4j.shade.jackson.annotation.JsonTypeInfo.As.PROPERTY;
import static org.nd4j.shade.jackson.annotation.JsonTypeInfo.Id.NAME;
/**
* An {@link TextConfig} associated with
* {@link MeterBinder} implementations provided as part of konduit-serving
*
* @author Adam Gibson
*/
@JsonSubTypes({
@JsonSubTypes.Type(value = RegressionMetricsConfig.class, name = "RegressionMetricsConfig"),
@JsonSubTypes.Type(value = NoOpMetricsConfig.class, name = "NoOpMetricsConfig"),
@JsonSubTypes.Type(value = ClassificationMetricsConfig.class, name = "ClassificationMetricsConfig"),
})
@JsonTypeInfo(use = NAME, include = PROPERTY)
public interface MetricsConfig extends TextConfig {
/**
* {@link MeterBinder} implementation associated with this configuration
* @return teh meter binder class associated with this configuration
*/
Class<? extends MeterBinder> metricsBinderImplementation();
/**
* The configuration value separated by name
* and value
* @return
*/
Map<String,Object> configValues();
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/config
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/config/metrics/MetricsRenderer.java
|
/*
*
* * ******************************************************************************
* *
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.config.metrics;
import io.micrometer.core.instrument.Counter;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.binder.MeterBinder;
/**
* An updatedable {@link MeterBinder} that allows
* updates of metrics beyond {@link MeterBinder#bindTo(MeterRegistry)}
*
* This allows encapsulation of logic for doing things like
* calling {@link Counter#increment()}
*
* @author Adam Gibson
*/
public interface MetricsRenderer extends MeterBinder {
/**
* The configuration for the metrics
* @return
*/
MetricsConfig config();
/**
* Updates the metrics based on given arguments.
* @param args
*/
void updateMetrics(Object...args);
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/config
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/config/metrics/NoOpMetricsConfig.java
|
/*
*
* * ******************************************************************************
* *
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.config.metrics;
import ai.konduit.serving.config.metrics.impl.MetricsBinderRendererAdapter;
import ai.konduit.serving.util.ObjectMappers;
import io.micrometer.core.instrument.binder.MeterBinder;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Collections;
import java.util.Map;
/**
* A no op {@link MetricsConfig}
* for use with the {@link MetricsBinderRendererAdapter}
*
* @author Adam Gibson
*/
@Builder
@NoArgsConstructor
@Data
public class NoOpMetricsConfig implements MetricsConfig {
@Override
public Class<? extends MeterBinder> metricsBinderImplementation() {
return MetricsBinderRendererAdapter.class;
}
@Override
public Map<String, Object> configValues() {
return Collections.emptyMap();
}
public static NoOpMetricsConfig fromJson(String json){
return ObjectMappers.fromJson(json, NoOpMetricsConfig.class);
}
public static NoOpMetricsConfig fromYaml(String yaml){
return ObjectMappers.fromYaml(yaml, NoOpMetricsConfig.class);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/config/metrics
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/config/metrics/impl/ClassificationMetricsConfig.java
|
package ai.konduit.serving.config.metrics.impl;
import ai.konduit.serving.config.metrics.MetricsConfig;
import ai.konduit.serving.util.ObjectMappers;
import io.micrometer.core.instrument.binder.MeterBinder;
import lombok.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* The configuration associated with {@link ClassificationMetrics} -
* this class contains metadata needed for exposing metrics correctly for
* {@link ClassificationMetrics}
*
* @author Adam Gibson
*/
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
public class ClassificationMetricsConfig implements MetricsConfig {
@Builder.Default
private List<String> classificationLabels = new ArrayList<>(0);
@Override
@SneakyThrows
public Class<? extends MeterBinder> metricsBinderImplementation() {
return (Class<? extends MeterBinder>) Class.forName("ai.konduit.serving.metrics.ClassificationMetrics");
}
@Override
public Map<String, Object> configValues() {
return Collections.singletonMap("classificationLabels",classificationLabels);
}
public static ClassificationMetricsConfig fromJson(String json) {
return ObjectMappers.fromJson(json, ClassificationMetricsConfig.class);
}
public static ClassificationMetricsConfig fromYaml(String yaml) {
return ObjectMappers.fromYaml(yaml, ClassificationMetricsConfig.class);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/config/metrics
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/config/metrics/impl/MetricsBinderRendererAdapter.java
|
package ai.konduit.serving.config.metrics.impl;
import ai.konduit.serving.config.metrics.MetricsConfig;
import ai.konduit.serving.config.metrics.MetricsRenderer;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.binder.MeterBinder;
import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;
/**
* An {@link MeterBinder} wrapper that provides default no op
* {@link MetricsRenderer} implementations.
*
* @author Adam Gibson
*/
@AllArgsConstructor
@NoArgsConstructor
public class MetricsBinderRendererAdapter implements MetricsRenderer {
private MeterBinder meterBinder;
@Override
public MetricsConfig config() {
return null;
}
@Override
public void updateMetrics(Object... args) {
}
@Override
public void bindTo(MeterRegistry registry) {
meterBinder.bindTo(registry);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/config/metrics
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/config/metrics/impl/MultiLabelMetricsConfig.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.config.metrics.impl;
import ai.konduit.serving.config.metrics.MetricsConfig;
import ai.konduit.serving.util.ObjectMappers;
import io.micrometer.core.instrument.binder.MeterBinder;
import lombok.*;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* A metrics configuration for a metrics render
* that, given a set of specified labels
* takes in counts of columns to increment.
* The input is either a matrix or a vector representing the columns
* to increment the count by. The column counts should be the same order
* as the specified labels for this configuration.
*
* @author Adam Gibson
*/
@Builder
@NoArgsConstructor
@AllArgsConstructor
@EqualsAndHashCode
public class MultiLabelMetricsConfig implements MetricsConfig {
@Getter
private List<String> labels;
@SneakyThrows
@Override
public Class<? extends MeterBinder> metricsBinderImplementation() {
return (Class<? extends MeterBinder>) Class.forName("ai.konduit.serving.metrics.MultiLabelMetrics");
}
@Override
public Map<String, Object> configValues() {
return Collections.singletonMap("labels",labels);
}
public static MultiLabelMetricsConfig fromJson(String json) {
return ObjectMappers.fromJson(json, MultiLabelMetricsConfig.class);
}
public static MultiLabelMetricsConfig fromYaml(String yaml) {
return ObjectMappers.fromYaml(yaml, MultiLabelMetricsConfig.class);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/config/metrics
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/config/metrics/impl/RegressionMetricsConfig.java
|
package ai.konduit.serving.config.metrics.impl;
import ai.konduit.serving.config.metrics.ColumnDistribution;
import ai.konduit.serving.config.metrics.MetricsConfig;
import ai.konduit.serving.util.ObjectMappers;
import io.micrometer.core.instrument.binder.MeterBinder;
import lombok.*;
import org.nd4j.linalg.dataset.api.preprocessor.serializer.NormalizerType;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* The configuration associated with {@link ClassificationMetrics} -
* this class contains metadata needed for exposing metrics correctly for
* {@link ClassificationMetrics}
*
* @author Adam Gibson
*/
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
public class RegressionMetricsConfig implements MetricsConfig {
@Builder.Default
private List<String> regressionColumnLabels = new ArrayList<>(0);
@Builder.Default
private List<SampleType> sampleTypes = new ArrayList<>(0);
@Builder.Default
private List<ColumnDistribution> columnDistributions = new ArrayList<>(0);
@Override
@SneakyThrows
public Class<? extends MeterBinder> metricsBinderImplementation() {
return (Class<? extends MeterBinder>) Class.forName("ai.konduit.serving.metrics.RegressionMetrics");
}
@Override
public Map<String, Object> configValues() {
return Collections.singletonMap("regressionColumnLabels", regressionColumnLabels);
}
public enum SampleType {
SUM,
MEAN,
VARIANCE_POP,
VARIANCE_NOPOP,
MAX,
MIN,
STDDEV_POP,
STDDEV_NOPOP,
}
public static RegressionMetricsConfig fromJson(String json) {
return ObjectMappers.fromJson(json, RegressionMetricsConfig.class);
}
public static RegressionMetricsConfig fromYaml(String yaml) {
return ObjectMappers.fromYaml(yaml, RegressionMetricsConfig.class);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/input
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/input/adapter/InputAdapter.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.input.adapter;
import ai.konduit.serving.input.conversion.ConverterArgs;
import java.io.IOException;
import java.util.Map;
/**
* An input adapter covers converting input data of 1
* type to a suitable output type for use with
* ETL pipeline components such as {@link org.datavec.api.transform.TransformProcess}
* . Usually the input type is a raw {@link io.vertx.core.json.JsonArray}
* or {@link io.vertx.core.buffer.Buffer} that is then mapped to some input
* such as an ndarray or ArrowWritableRecordBatch
*
* @param <INPUT_TYPE> the input type (usually json objects or buffers coming in off the wire)
* @param <OUTPUT_TYPE> the output type for use with internal ETL tooling and inference
* by a verticle
* @author Adam Gibson
*/
public interface InputAdapter<INPUT_TYPE, OUTPUT_TYPE> {
/**
* Convert the input type
* to the desired output type
* given the {@link ConverterArgs}
*
* @param input the input to convert
* @param parameters the parameters relevant
* for conversion of the output
* @param contextData the routing context when converting
* @return the desired output
* @throws IOException I/O exception
*/
OUTPUT_TYPE convert(INPUT_TYPE input, ConverterArgs parameters, Map<String, Object> contextData) throws IOException;
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/input
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/input/conversion/BatchInputParser.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.input.conversion;
import ai.konduit.serving.input.adapter.InputAdapter;
import io.vertx.core.buffer.Buffer;
import io.vertx.ext.web.FileUpload;
import io.vertx.ext.web.RoutingContext;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.datavec.api.records.Record;
import org.datavec.api.writable.Writable;
import org.datavec.arrow.recordreader.ArrowWritableRecordBatch;
import org.nd4j.common.base.Preconditions;
import org.nd4j.common.primitives.Pair;
import org.nd4j.linalg.api.ndarray.INDArray;
import java.io.IOException;
import java.util.*;
/**
* Parses a whole multi part upload buffer
* and converts it to an {@link INDArray}
* minibatch.
* <p>
* Uses {@link InputAdapter} specified by name
* allowing conversion of each type of input file's
* raw content to an {@link INDArray}
*
* @author Adam Gibson
*/
@Slf4j
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class BatchInputParser {
private Map<String, InputAdapter<Buffer, ?>> converters;
private Map<String, ConverterArgs> converterArgs;
private List<String> inputParts;
/**
* Create a batch from the {@link RoutingContext}
*
* @param routingContext the routing context to create the batch from
* @return the proper ndarray batch with the ndarrays merged
* with a batch per input
* @throws IOException I/O Exception
*/
public Record[] createBatch(RoutingContext routingContext) throws IOException {
//partition the input content by name
Map<String, List<BatchPartInfo>> partInfo = partInfoForUploads(routingContext);
if (partInfo.isEmpty()) {
throw new IllegalArgumentException("No parts resolved for file uploads!");
} else if (!inputParts.containsAll(partInfo.keySet())) {
throw new IllegalArgumentException("Illegal part info resolved. Part info keys were " + partInfo.keySet() + " while input parts were " + inputParts);
}
//batch size
Record[] inputBatches = new Record[inputParts.size()];
for (int j = 0; j < inputBatches.length; j++) {
inputBatches[j] =
new org.datavec.api.records.impl.Record(
new ArrayList<>(inputParts.size()),
null);
inputBatches[j].getRecord().add(null);
}
Map<Integer, List<List<Writable>>> missingIndices = new LinkedHashMap<>();
for (int i = 0; i < inputParts.size(); i++) {
if (inputParts.get(i) == null || !partInfo.containsKey(inputParts.get(i))) {
throw new IllegalStateException("No part found for part " + inputParts.get(i)
+ " available parts " + partInfo.keySet());
}
List<BatchPartInfo> batch = partInfo.get(inputParts.get(i));
for (int j = 0; j < batch.size(); j++) {
Pair<String, Integer> partNameAndIndex = partNameAndIndex(batch.get(j).getPartName());
Buffer buffer = loadBuffer(routingContext,
batch.get(j).getFileUploadPath());
Object convert = convert(buffer, partNameAndIndex.getFirst(), null, routingContext);
Preconditions.checkNotNull(convert, "Converted writable was null!");
//set the name
if (convert instanceof Writable) {
Writable writable = (Writable) convert;
inputBatches[i].getRecord().set(j, writable);
} else {
ArrowWritableRecordBatch arrow = (ArrowWritableRecordBatch) convert;
missingIndices.put(j, arrow);
}
}
}
if (!missingIndices.isEmpty()) {
List<Record> newRetRecords = new ArrayList<>();
for (Map.Entry<Integer, List<List<Writable>>> entry : missingIndices.entrySet()) {
for (List<Writable> record : entry.getValue()) {
newRetRecords.add(new org.datavec.api.records.impl.Record(record, null));
}
}
return newRetRecords.toArray(new Record[newRetRecords.size()]);
}
return inputBatches;
}
/**
* Returns a list of {@link BatchPartInfo}
* for each part by name.
* The "name" is meant to match 1
* name per input in to a computation graph
* such that each part name is:
* inputName[index]
*
* @param ctx the context to get the part info
* from
* @return a map indexing part name to a list of parts
* for each input
*/
private Map<String, List<BatchPartInfo>> partInfoForUploads(RoutingContext ctx) {
if (ctx.fileUploads().isEmpty()) {
throw new IllegalStateException("No files found for part info!");
} else {
log.debug("Found " + ctx.fileUploads().size() + " file uploads");
}
Map<String, List<BatchPartInfo>> ret = new LinkedHashMap<>();
//parse each file upload all at once
for (FileUpload upload : ctx.fileUploads()) {
//the part name: inputName[index]
String name = upload.name();
//likely a colon for a tensorflow name got passed in
//verify against the name in the configuration and set it to that
if (name.contains(" ")) {
name = name.replace(" ", ":");
String inputName = name;
if(inputName.contains("[")) {
inputName = inputName.substring(0, name.lastIndexOf("["));
}
if (!inputParts.contains(inputName)) {
throw new IllegalStateException("Illegal name for multi part passed in " + upload.name());
} else {
log.warn("Corrected input name " + upload.name() + " to " + name);
}
}
//split the input name and the index
Pair<String, Integer> partNameAndIndex = partNameAndIndex(name);
//the part info for this particular file
BatchPartInfo batchPartInfo = new BatchPartInfo(
partNameAndIndex.getRight(), upload.uploadedFileName(), name);
//add the input name and accumulate the part info for each input
if (!ret.containsKey(partNameAndIndex.getFirst())) {
ret.put(partNameAndIndex.getFirst(), new ArrayList<>());
}
List<BatchPartInfo> batchPartInfos = ret.get(partNameAndIndex.getFirst());
batchPartInfos.add(batchPartInfo);
}
//sort based on index
for (List<BatchPartInfo> info : ret.values()) {
Collections.sort(info);
}
return ret;
}
/**
* Use the converter specified
* by name to convert a
* raw {@link Buffer}
* to a proper input for inference
*
* @param input the raw content
* @param name the name of the input
* converter to use
* @param params the params to use where needed
* @param routingContext RoutingContext
* @return converted INDArray
* @throws IOException I/O Exception
*/
public Object convert(Buffer input, String name, ConverterArgs params, RoutingContext routingContext)
throws IOException {
if (!converters.containsKey(name)) {
throw new IllegalArgumentException("Illegal name for converter " + name + " not found!");
}
return converters.get(name).convert(input, params, routingContext.data());
}
/**
* Load the buffer from each file
*
* @param ctx the context to load from
* @param uploadedFileName the uploaded file path
* @return the file contents for the file part
*/
private Buffer loadBuffer(RoutingContext ctx, String uploadedFileName) {
return ctx.vertx().fileSystem().readFileBlocking(uploadedFileName);
}
private Pair<String, Integer> partNameAndIndex(String name) {
//inputName[partIndex]
//1 part only
if (name.indexOf('[') < 0) {
return Pair.of(name, 0);
}
String outputName = name.substring(0, name.indexOf('['));
int partIndex = Integer.parseInt(name.substring(name.indexOf('[') + 1, name.lastIndexOf(']')));
return Pair.of(outputName, partIndex);
}
@Data
@AllArgsConstructor
public static class BatchPartInfo implements Comparable<BatchPartInfo> {
private int index;
private String fileUploadPath;
private String partName;
@Override
public int compareTo(BatchPartInfo batchPartInfo) {
return Integer.compare(index, batchPartInfo.index);
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/input
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/input/conversion/ConverterArgs.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.input.conversion;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.datavec.api.transform.TransformProcess;
import org.datavec.api.transform.schema.Schema;
import org.datavec.image.transform.ImageTransformProcess;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/**
* The converter arguments
* needed for input binary data.
* example usage.
*
* @author Adam Gibson
*/
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
public class ConverterArgs implements Serializable {
private Schema schema;
private TransformProcess transformProcess;
private ImageTransformProcess imageTransformProcess;
@Builder.Default
private List<Integer> integers = new ArrayList<>();
@Builder.Default
private List<Long> longs = new ArrayList<>();
@Builder.Default
private List<Float> floats = new ArrayList<>();
@Builder.Default
private List<Double> doubles = new ArrayList<>();
@Builder.Default
private List<String> strings = new ArrayList<>();
@Builder.Default
private String imageProcessingRequiredLayout = "NCHW";
@Builder.Default
private String imageProcessingInitialLayout = "NCHW";
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/metrics/MetricType.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.metrics;
/**
* Metric types for prometheus
*
* @author Adam Gibson
*/
public enum MetricType {
CLASS_LOADER,
JVM_MEMORY,
JVM_GC,
PROCESSOR,
JVM_THREAD,
LOGGING_METRICS,
NATIVE,
GPU,
//note these are machine learning metrics, not system metrics
//these are meant to analyze the output coming form the neural network when running
//in production
CLASSIFICATION,
REGRESSION,
CUSTOM_MULTI_LABEL
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/model/SavedModelConfig.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.model;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class SavedModelConfig {
private String savedModelPath, modelTag, signatureKey;
private List<String> savedModelInputOrder, saveModelOutputOrder;
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/model/TensorDataType.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.model;
import org.nd4j.linalg.api.buffer.DataType;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.compression.CompressedDataBuffer;
import org.nd4j.linalg.compression.CompressionDescriptor;
/**
* Possible data types for tensors. Comes with conversions from TensorFlow
* and Python and between ND4J types.
* @deprecated To be removed - see https://github.com/KonduitAI/konduit-serving/issues/298
*/
@Deprecated
public enum TensorDataType {
INVALID,
FLOAT,
DOUBLE,
INT32,
UINT8,
INT16,
INT8,
STRING,
COMPLEX64,
INT64,
BOOL,
QINT8,
QUINT8,
QINT32,
BFLOAT16,
QINT16,
QUINT16,
UINT16,
COMPLEX128,
HALF,
RESOURCE,
VARIANT,
UINT32,
UINT64;
/**
* Map a tensor data type to a proto value found in tensorflow.
* Generally, this is just replacing DT_ with empty
* and returning enum.valueOf(string)
*
* @param value the input string
* @return the associated {@link TensorDataType}
*/
public static TensorDataType fromProtoValue(String value) {
String valueReplace = value.replace("DT_", "");
return TensorDataType.valueOf(valueReplace);
}
/**
* Get the python name for the given data type
*
* @param tensorDataType the python name for the given data type
* @return float64 for double, float32 for double, float16 for half, otherwise
* the type's name converted to lower case
*/
public static String toPythonName(TensorDataType tensorDataType) {
switch (tensorDataType) {
case DOUBLE:
return "float64";
case FLOAT:
return "float32";
case HALF:
return "float16";
default:
return tensorDataType.name().toLowerCase();
}
}
public static DataType toNd4jType(TensorDataType tensorDataType) {
switch (tensorDataType) {
case FLOAT:
return DataType.FLOAT;
case DOUBLE:
return DataType.DOUBLE;
case BOOL:
return DataType.BOOL;
case INT32:
return DataType.INT32;
case INT64:
return DataType.INT64;
case STRING:
return DataType.UTF8;
case HALF:
return DataType.FLOAT16;
default:
throw new IllegalArgumentException("Unsupported type " + tensorDataType.name());
}
}
public static TensorDataType fromNd4jType(DataType dataType) {
switch (dataType) {
case FLOAT:
return TensorDataType.FLOAT;
case LONG:
return TensorDataType.INT64;
case INT:
return TensorDataType.INT32;
case BOOL:
return TensorDataType.BOOL;
case DOUBLE:
return TensorDataType.DOUBLE;
case HALF:
return TensorDataType.HALF;
case UTF8:
return TensorDataType.STRING;
case COMPRESSED:
throw new IllegalStateException("Unable to work with compressed data type. Could be 1 or more types.");
case SHORT:
return TensorDataType.INT16;
default:
throw new IllegalArgumentException("Unknown data type " + dataType);
}
}
public static TensorDataType fromNd4jType(INDArray array) {
DataType dataType = array.dataType();
switch (dataType) {
case COMPRESSED:
CompressedDataBuffer compressedData = (CompressedDataBuffer) array.data();
CompressionDescriptor desc = compressedData.getCompressionDescriptor();
String algo = desc.getCompressionAlgorithm();
switch (algo) {
case "FLOAT16":
return HALF;
case "INT8":
return INT8;
case "UINT8":
return UINT8;
case "INT16":
return INT16;
case "UINT16":
return UINT16;
default:
throw new IllegalArgumentException("Unsupported compression algorithm: " + algo);
}
default:
return fromNd4jType(dataType);
}
}
public org.nd4j.tensorflow.conversion.TensorDataType toTFType(){
switch (this){
case INVALID:
return org.nd4j.tensorflow.conversion.TensorDataType.INVALID;
case FLOAT:
return org.nd4j.tensorflow.conversion.TensorDataType.FLOAT;
case DOUBLE:
return org.nd4j.tensorflow.conversion.TensorDataType.DOUBLE;
case INT32:
return org.nd4j.tensorflow.conversion.TensorDataType.INT32;
case UINT8:
return org.nd4j.tensorflow.conversion.TensorDataType.UINT8;
case INT16:
return org.nd4j.tensorflow.conversion.TensorDataType.INT16;
case INT8:
return org.nd4j.tensorflow.conversion.TensorDataType.INT8;
case STRING:
return org.nd4j.tensorflow.conversion.TensorDataType.STRING;
case COMPLEX64:
return org.nd4j.tensorflow.conversion.TensorDataType.COMPLEX64;
case INT64:
return org.nd4j.tensorflow.conversion.TensorDataType.INT64;
case BOOL:
return org.nd4j.tensorflow.conversion.TensorDataType.BOOL;
case QINT8:
return org.nd4j.tensorflow.conversion.TensorDataType.QINT8;
case QUINT8:
return org.nd4j.tensorflow.conversion.TensorDataType.QUINT8;
case QINT32:
return org.nd4j.tensorflow.conversion.TensorDataType.QINT32;
case BFLOAT16:
return org.nd4j.tensorflow.conversion.TensorDataType.BFLOAT16;
case QINT16:
return org.nd4j.tensorflow.conversion.TensorDataType.QINT16;
case QUINT16:
return org.nd4j.tensorflow.conversion.TensorDataType.QUINT16;
case UINT16:
return org.nd4j.tensorflow.conversion.TensorDataType.UINT16;
case COMPLEX128:
return org.nd4j.tensorflow.conversion.TensorDataType.COMPLEX128;
case HALF:
return org.nd4j.tensorflow.conversion.TensorDataType.HALF;
case RESOURCE:
return org.nd4j.tensorflow.conversion.TensorDataType.RESOURCE;
case VARIANT:
return org.nd4j.tensorflow.conversion.TensorDataType.VARIANT;
case UINT32:
return org.nd4j.tensorflow.conversion.TensorDataType.UINT32;
case UINT64:
return org.nd4j.tensorflow.conversion.TensorDataType.UINT64;
default:
throw new IllegalStateException("Unknown tensor data type: " + this);
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/model
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/model/loader/ModelLoader.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.model.loader;
import io.vertx.core.buffer.Buffer;
/**
* Model loader. Given a path
* knows how to load a model of a specified type from disk.
*
* @param <T> the type of the model
* @author Adam Gibson
*/
public interface ModelLoader<T> {
/**
* Save a model as a buffer
*
* @param model the model to save
* @return a buffer representing
* the binary representation of the model
*/
Buffer saveModel(T model);
/**
* Load the model
*
* @return the loaded model
* @throws Exception if an error occurs loading the model
*/
T loadModel() throws Exception;
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output/adapter/ClassificationMultiOutputAdapter.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.output.adapter;
import ai.konduit.serving.output.types.BatchOutput;
import io.vertx.ext.web.RoutingContext;
import org.datavec.api.transform.schema.Schema;
import org.nd4j.common.base.Preconditions;
import org.nd4j.linalg.api.ndarray.INDArray;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* A {@link MultiOutputAdapter} for classification.
* Internally it uses a cached {@link ClassifierOutputAdapter}
* to generate the json needed for obtaining interpretable
* results for classification.
*
* @author Adam Gibson
*/
public class ClassificationMultiOutputAdapter implements MultiOutputAdapter<INDArray[]> {
private ClassifierOutputAdapter classifierOutputAdapter;
@Override
public Map<String, BatchOutput> adapt(INDArray[] array, List<String> outputNames, RoutingContext routingContext) {
Map<String, BatchOutput> ret = new LinkedHashMap<>();
if (classifierOutputAdapter == null || classifierOutputAdapter.getLabels().length != outputNames.size()) {
Schema.Builder schemaBuilder = new Schema.Builder();
Preconditions.checkNotNull(outputNames, "Output names not defined!");
for (int i = 0; i < outputNames.size(); i++) {
schemaBuilder.addColumnDouble(outputNames.get(i));
}
classifierOutputAdapter = new ClassifierOutputAdapter(schemaBuilder.build());
}
ret.put(outputNames.get(0), classifierOutputAdapter.adapt(array[0], routingContext));
return ret;
}
@Override
public List<Class<? extends OutputAdapter<?>>> outputAdapterTypes() {
return Arrays.asList(ClassifierOutputAdapter.class);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output/adapter/ClassifierOutputAdapter.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.output.adapter;
import ai.konduit.serving.output.types.ClassifierOutput;
import io.vertx.ext.web.RoutingContext;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.datavec.api.transform.schema.Schema;
import org.dmg.pmml.FieldName;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* An output adapter for displaying
* {@link ClassifierOutput}
*
* @author Adam Gibson
*/
@NoArgsConstructor
@Slf4j
public class ClassifierOutputAdapter implements OutputAdapter<ClassifierOutput> {
private Schema schema;
private List<FieldName> fieldNames;
public ClassifierOutputAdapter(Schema schema) {
this.schema = schema;
fieldNames = new ArrayList<>(schema.numColumns());
for (int i = 0; i < schema.numColumns(); i++) {
fieldNames.add(FieldName.create(schema.getName(i)));
}
}
@Override
public ClassifierOutput adapt(INDArray array, RoutingContext routingContext) {
INDArray argMax = Nd4j.argMax(array, -1);
return ClassifierOutput.builder()
.labels(getLabels())
.decisions(argMax.data().asInt())
.probabilities(array.isVector() ? new double[][]{array.toDoubleVector()} : array.toDoubleMatrix())
.build();
}
@Override
public ClassifierOutput adapt(List<? extends Map<FieldName, ?>> pmmlExamples, RoutingContext routingContext) {
if (schema == null) {
throw new IllegalStateException("No inputSchema found. A inputSchema is required in order to create results.");
}
int[] labelIndices = new int[pmmlExamples.size()];
double[][] values = new double[pmmlExamples.size()][schema.numColumns()];
for (int i = 0; i < pmmlExamples.size(); i++) {
Map<FieldName, ?> example = pmmlExamples.get(i);
int maxIdx = -1;
double compare = Double.NEGATIVE_INFINITY;
for (int j = 0; j < schema.numColumns(); j++) {
Double result = (Double) example.get(FieldName.create("probability(" + schema.getName(j) + ")"));
if (result == null) {
throw new IllegalArgumentException("No label found for " + schema.getName(j));
}
if (result > compare) {
maxIdx = j;
compare = maxIdx;
}
values[i][j] = result;
}
labelIndices[i] = maxIdx;
}
return ClassifierOutput.builder()
.probabilities(values)
.labels(getLabels())
.decisions(labelIndices)
.build();
}
@Override
public ClassifierOutput adapt(Object input, RoutingContext routingContext) {
if (input instanceof INDArray) {
INDArray arr = (INDArray) input;
return adapt(arr, routingContext);
} else if (input instanceof List) {
List<? extends Map<FieldName, ?>> pmmlExamples = (List<? extends Map<FieldName, ?>>) input;
return adapt(pmmlExamples, routingContext);
}
throw new UnsupportedOperationException("Unable to convert input of type " + input);
}
@Override
public Class<ClassifierOutput> outputAdapterType() {
return ClassifierOutput.class;
}
public String[] getLabels() {
String[] labels = new String[schema.numColumns()];
for (int i = 0; i < schema.numColumns(); i++) {
labels[i] = schema.getName(i);
}
return labels;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output/adapter/MultiOutputAdapter.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.output.adapter;
import ai.konduit.serving.output.types.BatchOutput;
import io.vertx.ext.web.RoutingContext;
import org.nd4j.linalg.api.ndarray.INDArray;
import java.util.List;
import java.util.Map;
/**
* Convert one or more input {@link INDArray}
* (one per output name) in to an appropriate
* json object representing the domain to be interpreted.
*
* @author Adam Gibson
* @deprecated To be removed - see https://github.com/KonduitAI/konduit-serving/issues/298
*/
@Deprecated
public interface MultiOutputAdapter<T> {
/**
* Adapt a pair of {@link INDArray}
* and the output names,
* with the array input ordered by the output name
*
* @param input the arrays to adapt
* @param outputNames the output names to adapt
* @param routingContext routing context
* @return Adapted inputs
*/
Map<String, BatchOutput> adapt(T input, List<String> outputNames, RoutingContext routingContext);
/**
* Returns a map of the strings by output name
* to the {@link OutputAdapter}
* for each output
*
* @return the output adapter types for this multi output adapter
*/
List<Class<? extends OutputAdapter<?>>> outputAdapterTypes();
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output/adapter/OutputAdapter.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.output.adapter;
import ai.konduit.serving.output.types.BatchOutput;
import io.vertx.ext.web.RoutingContext;
import org.dmg.pmml.FieldName;
import org.nd4j.linalg.api.ndarray.INDArray;
import java.util.List;
import java.util.Map;
/**
* Convert an input {@link INDArray}
* or {@link FieldName} list map
* from PMML to human readable json.
*
* @param <T> the type to convert to
* @author Adam Gibson
* @deprecated To be removed - see https://github.com/KonduitAI/konduit-serving/issues/298
*/
@Deprecated
public interface OutputAdapter<T extends BatchOutput> {
/**
* Given an input array,
* output the desired type
*
* @param array the input array
* @param routingContext Vert.x routing context
* @return the desired output
*/
T adapt(INDArray array, RoutingContext routingContext);
/**
* Convert the pmml to
* the desired type
*
* @param pmmlExamples the list of examples to convert
* @param routingContext Vert.x routing context
* @return the desired output type
*/
T adapt(List<? extends Map<FieldName, ?>> pmmlExamples, RoutingContext routingContext);
/**
* Adapt an arbitrary object.
* This is for types that may be outside of pmml
* or {@link INDArray}
*
* @param input the input to convert
* @param routingContext routing context
* @return the output type
*/
T adapt(Object input, RoutingContext routingContext);
/**
* Returns the output type of this
* output adapter.
* This metadata is used for documentation
* generation
*
* @return adapter type
*/
Class<T> outputAdapterType();
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output/adapter/RawMultiOutputAdapter.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.output.adapter;
import ai.konduit.serving.output.types.BatchOutput;
import io.vertx.ext.web.RoutingContext;
import org.nd4j.linalg.api.ndarray.INDArray;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* A {@link MultiOutputAdapter} for classification.
* Internally it uses a cached {@link ClassifierOutputAdapter}
* to generate the json needed for obtaining interpretable
* results for classification.
*
* @author Adam Gibson
*/
public class RawMultiOutputAdapter implements MultiOutputAdapter<INDArray[]> {
private RawOutputAdapter rawOutputAdapter;
@Override
public Map<String, BatchOutput> adapt(INDArray[] array, List<String> outputNames, RoutingContext routingContext) {
Map<String, BatchOutput> ret = new LinkedHashMap<>();
if (rawOutputAdapter == null) {
rawOutputAdapter = new RawOutputAdapter();
}
for (int i = 0; i < array.length; i++) {
ret.put(outputNames.get(i), rawOutputAdapter.adapt(array[i], routingContext));
}
return ret;
}
@Override
public List<Class<? extends OutputAdapter<?>>> outputAdapterTypes() {
return Arrays.asList(ClassifierOutputAdapter.class);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output/adapter/RawOutputAdapter.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.output.adapter;
import ai.konduit.serving.output.types.NDArrayOutput;
import io.vertx.ext.web.RoutingContext;
import org.dmg.pmml.FieldName;
import org.nd4j.linalg.api.ndarray.INDArray;
import java.util.List;
import java.util.Map;
public class RawOutputAdapter implements OutputAdapter<NDArrayOutput> {
@Override
public NDArrayOutput adapt(INDArray array, RoutingContext routingContext) {
return NDArrayOutput.builder().ndArray(array).build();
}
@Override
public NDArrayOutput adapt(List<? extends Map<FieldName, ?>> pmmlExamples, RoutingContext routingContext) {
throw new UnsupportedOperationException("Unable to convert pmml to ndarrays.");
}
@Override
public NDArrayOutput adapt(Object input, RoutingContext routingContext) {
if (input instanceof INDArray) {
INDArray input2 = (INDArray) input;
return adapt(input2, routingContext);
}
return null;
}
@Override
public Class<NDArrayOutput> outputAdapterType() {
return NDArrayOutput.class;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output/adapter/RegressionMultiOutputAdapter.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.output.adapter;
import ai.konduit.serving.output.types.BatchOutput;
import io.vertx.ext.web.RoutingContext;
import org.datavec.api.transform.schema.Schema;
import org.nd4j.linalg.api.ndarray.INDArray;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* A {@link MultiOutputAdapter} for elnino.
* Internally it uses a cached {@link RegressionOutputAdapter}
* to generate the json needed for obtaining interpretable
* results for elnino.
*
* @author Adam Gibson
*/
public class RegressionMultiOutputAdapter implements MultiOutputAdapter<INDArray[]> {
private RegressionOutputAdapter regressionOutputAdapter;
@Override
public Map<String, BatchOutput> adapt(INDArray[] array, List<String> outputNames, RoutingContext routingContext) {
Map<String, BatchOutput> ret = new LinkedHashMap<>();
if (regressionOutputAdapter == null) {
Schema.Builder schemaBuilder = new Schema.Builder();
for (int i = 0; i < outputNames.size(); i++) {
schemaBuilder.addColumnDouble(outputNames.get(i));
}
regressionOutputAdapter = new RegressionOutputAdapter(schemaBuilder.build());
}
ret.put(outputNames.get(0), regressionOutputAdapter.adapt(array[0], routingContext));
return ret;
}
@Override
public List<Class<? extends OutputAdapter<?>>> outputAdapterTypes() {
return Arrays.asList(RegressionOutputAdapter.class);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output/adapter/RegressionOutputAdapter.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.output.adapter;
import ai.konduit.serving.output.types.RegressionOutput;
import io.vertx.ext.web.RoutingContext;
import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;
import org.datavec.api.transform.schema.Schema;
import org.dmg.pmml.FieldName;
import org.nd4j.linalg.api.ndarray.INDArray;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Convert the input based on the input
* {@link Schema} to {@link RegressionOutput}
* representing real valued output.
*
* @author Adam Gibson
*/
@AllArgsConstructor
@NoArgsConstructor
public class RegressionOutputAdapter implements OutputAdapter<RegressionOutput> {
private Schema schema;
private List<FieldName> fieldNames;
/**
* Create the output adapter
* with the output inputSchema
*
* @param schema the inputSchema of the output
*/
public RegressionOutputAdapter(Schema schema) {
this.schema = schema;
fieldNames = new ArrayList<>(schema.numColumns());
for (int i = 0; i < schema.numColumns(); i++) {
fieldNames.add(FieldName.create(schema.getName(i)));
}
}
@Override
public RegressionOutput adapt(INDArray array, RoutingContext routingContext) {
return RegressionOutput
.builder()
.values(array.toDoubleMatrix())
.build();
}
@Override
public RegressionOutput adapt(List<? extends Map<FieldName, ?>> pmmlExamples, RoutingContext routingContext) {
if (schema == null) {
throw new IllegalStateException("No inputSchema found. A inputSchema is required in order to create results.");
}
double[][] values = new double[pmmlExamples.size()][pmmlExamples.get(0).size()];
for (int i = 0; i < pmmlExamples.size(); i++) {
Map<FieldName, ?> example = pmmlExamples.get(i);
for (int j = 0; j < schema.numColumns(); j++) {
Double result = (Double) example.get(fieldNames.get(j));
values[i][j] = result;
}
}
return RegressionOutput.builder().values(values).build();
}
@Override
public RegressionOutput adapt(Object input, RoutingContext routingContext) {
if (input instanceof INDArray) {
INDArray arr = (INDArray) input;
return adapt(arr, routingContext);
} else if (input instanceof List) {
List<? extends Map<FieldName, ?>> pmmlExamples = (List<? extends Map<FieldName, ?>>) input;
return adapt(pmmlExamples, routingContext);
}
throw new UnsupportedOperationException("Unable to convert input of type " + input);
}
@Override
public Class<RegressionOutput> outputAdapterType() {
return RegressionOutput.class;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output/adapter/SSDOutputAdapter.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.output.adapter;
import ai.konduit.serving.output.types.BatchOutput;
import ai.konduit.serving.output.types.DetectedObjectsBatch;
import ai.konduit.serving.verticles.VerticleConstants;
import io.vertx.ext.web.RoutingContext;
import lombok.Getter;
import org.deeplearning4j.zoo.util.BaseLabels;
import org.deeplearning4j.zoo.util.Labels;
import org.nd4j.linalg.api.ndarray.INDArray;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.*;
/**
* An input adapter for ssd in tensorflow.
*
* @author Adam Gibson
*/
public class SSDOutputAdapter implements MultiOutputAdapter<INDArray[]> {
public final static String DEFAULT_LABELS_RESOURCE_NAME = "/mscoco_label_map.pbtxt";
private double threshold;
private int[] inputShape;
private Labels labels;
private int numLabels;
@Getter
private String[] inputs = new String[]{"image_tensor"};
@Getter
private String[] outputs = new String[]{"detection_boxes", "detection_scores", "detection_classes", "num_detections"};
public SSDOutputAdapter(double threshold, Labels labels, int numLabels) {
this.threshold = threshold;
inputShape = new int[]{3, 0, 0};
this.labels = labels;
this.numLabels = numLabels;
}
public SSDOutputAdapter(double threshold, int numLabels) {
this(threshold, getLabels(), numLabels);
}
public SSDOutputAdapter(double threshold, InputStream labels, int numLabels) {
this(threshold, getLabels(labels, numLabels), numLabels);
}
public static Labels getLabels(InputStream is, int numLabels) {
try {
return new BaseLabels() {
protected ArrayList<String> getLabels() {
Scanner scanner = new Scanner(is);
int id1 = -1;
int count = 0;
List<String> ret = new ArrayList<>();
String name = null;
while (scanner.hasNext()) {
String token = scanner.next();
if (token.equals("id:")) {
id1 = scanner.nextInt();
}
if (token.equals("display_name:")) {
name = scanner.nextLine();
name = name.substring(2, name.length() - 1);
}
if (id1 > 0 && name != null) {
ret.add(name);
id1 = -1;
name = null;
}
}
return (ArrayList<String>) ret;
}
@Override
protected URL getURL() {
return null;
}
@Override
protected String resourceName() {
return null;
}
@Override
protected String resourceMD5() {
return null;
}
};
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
public static Labels getLabels() {
return getLabels(SSDOutputAdapter.class.getResourceAsStream(DEFAULT_LABELS_RESOURCE_NAME), 100);
}
@Override
public Map<String, BatchOutput> adapt(INDArray[] input, List<String> outputNames, RoutingContext routingContext) {
int originalHeight = (int) routingContext.data().get(VerticleConstants.ORIGINAL_IMAGE_HEIGHT);
int originalWidth = (int) routingContext.data().get(VerticleConstants.ORIGINAL_IMAGE_WIDTH);
DetectedObjectsBatch[] detectedObjects = getPredictedObjects(input, threshold, outputNames.toArray(new String[outputNames.size()]), originalHeight, originalWidth);
Map<String, BatchOutput> ret = new HashMap<>();
for (int i = 0; i < outputNames.size(); i++) {
ret.put(outputNames.get(i), detectedObjects[i]);
}
return ret;
}
@Override
public List<Class<? extends OutputAdapter<?>>> outputAdapterTypes() {
return null;
}
private DetectedObjectsBatch[] getPredictedObjects(INDArray[] outputs, double threshold, String[] outputNames, int originalHeight, int originalWidth) {
INDArray boxes = null, classes = null, scores = null;
for (int i = 0; i < outputs.length; i++) {
if (outputNames[i].contains("box")) {
boxes = outputs[i];
} else if (outputNames[i].contains("class")) {
classes = outputs[i];
} else if (outputNames[i].contains("score")) {
scores = outputs[i];
}
}
List<DetectedObjectsBatch> detectedObjects = new ArrayList<>();
for (int i = 0; i < scores.columns(); i++) {
double score = scores.getDouble(0, i);
if (score < threshold) {
continue;
}
int n = classes.rank() >= 2 ? classes.getInt(0, i) : classes.getInt(i);
String label = labels.getLabel(n);
double y1 = boxes.getDouble(0, i, 0) * originalHeight;
double x1 = boxes.getDouble(0, i, 1) * originalWidth;
double y2 = boxes.getDouble(0, i, 2) * originalHeight;
double x2 = boxes.getDouble(0, i, 3) * originalWidth;
DetectedObjectsBatch d = new DetectedObjectsBatch();
d.setCenterX((float) (x1 + x2) / 2);
d.setCenterY((float) (y1 + y2) / 2);
d.setWidth((float) (x2 - x1));
d.setHeight((float) (y2 - y1));
d.setPredictedClassNumbers(new int[]{n});
d.setPredictedClasses(new String[]{label});
d.setConfidences(new float[]{(float) score});
detectedObjects.add(d);
}
return detectedObjects.toArray(new DetectedObjectsBatch[detectedObjects.size()]);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output/adapter/YOLOOutputAdapter.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.output.adapter;
import ai.konduit.serving.output.types.BatchOutput;
import ai.konduit.serving.output.types.DetectedObjectsBatch;
import ai.konduit.serving.output.types.ManyDetectedObjects;
import ai.konduit.serving.verticles.VerticleConstants;
import io.vertx.ext.web.RoutingContext;
import lombok.Builder;
import lombok.extern.slf4j.Slf4j;
import org.deeplearning4j.nn.layers.objdetect.DetectedObject;
import org.deeplearning4j.nn.layers.objdetect.YoloUtils;
import org.deeplearning4j.zoo.model.YOLO2;
import org.deeplearning4j.zoo.model.helper.DarknetHelper;
import org.deeplearning4j.zoo.util.BaseLabels;
import org.deeplearning4j.zoo.util.ClassPrediction;
import org.deeplearning4j.zoo.util.Labels;
import org.deeplearning4j.zoo.util.darknet.COCOLabels;
import org.nd4j.linalg.api.buffer.DataType;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.*;
@Slf4j
public class YOLOOutputAdapter implements MultiOutputAdapter<INDArray[]> {
private double threshold;
private int[] inputShape;
private Labels labels;
private INDArray boundingBoxPriors;
private int gridWidth;
private int gridHeight;
private int numLabels;
@Builder
public YOLOOutputAdapter(double threshold, int[] inputShape, Labels labels, int numLabels, double[][] boundingBoxPriors) {
this.labels = labels == null ? getLabels() : labels;
if (threshold == 0.0)
this.threshold = 0.5;
else
this.threshold = threshold;
if (inputShape != null)
this.inputShape = inputShape;
else
this.inputShape = new int[]{3, 608, 608};
this.labels = labels;
this.numLabels = numLabels;
if (boundingBoxPriors == null)
this.boundingBoxPriors = Nd4j.create(YOLO2.DEFAULT_PRIOR_BOXES).castTo(DataType.FLOAT);
else {
this.boundingBoxPriors = Nd4j.create(boundingBoxPriors).castTo(DataType.FLOAT);
}
gridWidth = DarknetHelper.getGridWidth(inputShape);
gridHeight = DarknetHelper.getGridHeight(inputShape);
}
public YOLOOutputAdapter(double threshold, Labels labels, int numLabels) {
this.threshold = threshold;
inputShape = new int[]{3, 608, 608};
this.labels = labels;
this.numLabels = numLabels;
boundingBoxPriors = Nd4j.create(YOLO2.DEFAULT_PRIOR_BOXES).castTo(DataType.FLOAT);
gridWidth = DarknetHelper.getGridWidth(inputShape);
gridHeight = DarknetHelper.getGridHeight(inputShape);
}
public YOLOOutputAdapter(double threshold, int numLabels) {
this(threshold, getLabels(), numLabels);
}
public YOLOOutputAdapter(double threshold, InputStream labels, int numLabels) {
this(threshold, labels == null ? getLabels() : getLabels(labels, numLabels), numLabels);
}
private static Labels getLabels(InputStream is, int numLabels) {
try {
return new BaseLabels() {
protected ArrayList<String> getLabels() {
Scanner scanner = new Scanner(is);
int id1 = -1;
int count = 0;
List<String> ret = new ArrayList<>();
String name = null;
while (scanner.hasNext()) {
String token = scanner.next();
if (token.equals("id:")) {
id1 = scanner.nextInt();
}
if (token.equals("display_name:")) {
name = scanner.nextLine();
name = name.substring(2, name.length() - 1);
}
if (id1 > 0 && name != null) {
ret.add(name);
id1 = -1;
name = null;
}
}
return (ArrayList<String>) ret;
}
@Override
public List<List<ClassPrediction>> decodePredictions(INDArray predictions, int n) {
return super.decodePredictions(predictions, n);
}
@Override
protected URL getURL() {
return null;
}
@Override
protected String resourceName() {
return null;
}
@Override
protected String resourceMD5() {
return null;
}
};
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
private static Labels getLabels() {
try {
return new COCOLabels();
} catch (IOException e) {
return null;
}
}
@Override
public Map<String, BatchOutput> adapt(INDArray[] input, List<String> outputNames, RoutingContext routingContext) {
int originalHeight = (int) routingContext.data().get(VerticleConstants.ORIGINAL_IMAGE_HEIGHT);
int originalWidth = (int) routingContext.data().get(VerticleConstants.ORIGINAL_IMAGE_WIDTH);
DetectedObjectsBatch[] detectedObjects = getPredictedObjects(input, threshold, outputNames.toArray(new String[outputNames.size()]), originalHeight, originalWidth);
Map<String, BatchOutput> ret = new HashMap<>();
ret.put(outputNames.get(0), ManyDetectedObjects.builder().detectedObjectsBatches(detectedObjects).build());
return ret;
}
@Override
public List<Class<? extends OutputAdapter<?>>> outputAdapterTypes() {
return null;
}
private DetectedObjectsBatch[] getPredictedObjects(INDArray[] outputs, double threshold, String[] outputNames, int originalHeight, int originalWidth) {
// assuming "standard" output from TensorFlow using a "normal" YOLOv2 model
//INDArray permuted = outputs[0].permute(0, 3, 1, 2);
INDArray permuted = outputs[0];
INDArray activated = YoloUtils.activate(boundingBoxPriors, permuted);
List<DetectedObject> predictedObjects1 = YoloUtils.getPredictedObjects(boundingBoxPriors, activated, threshold, 0.4);
DetectedObjectsBatch[] detectedObjects = new DetectedObjectsBatch[predictedObjects1.size()];
int n = numLabels; // an arbitrary number of classes returned per object
for (int i = 0; i < detectedObjects.length; i++) {
DetectedObject detectedObject = predictedObjects1.get(i);
long x = Math.round(originalWidth * predictedObjects1.get(i).getCenterX() / gridWidth);
long y = Math.round(originalHeight * predictedObjects1.get(i).getCenterY() / gridHeight);
long w = Math.round(originalWidth * predictedObjects1.get(i).getWidth() / gridWidth);
long h = Math.round(originalHeight * predictedObjects1.get(i).getHeight() / gridHeight);
detectedObjects[i] = new DetectedObjectsBatch();
detectedObjects[i].setCenterX(x);
detectedObjects[i].setCenterY(y);
detectedObjects[i].setWidth(w);
detectedObjects[i].setHeight(h);
detectedObjects[i].setPredictedClasses(new String[]{labels.getLabel(detectedObject.getPredictedClass())});
detectedObjects[i].setPredictedClassNumbers(new int[]{detectedObject.getPredictedClass()});
detectedObjects[i].setConfidences(new float[]{detectedObject.getClassPredictions().getFloat(detectedObject.getPredictedClass())});
}
return detectedObjects;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output/types/BatchOutput.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.output.types;
import java.io.Serializable;
/**
* The Batch Output represents
* the output from an output adapter.
* This includes any common fields that the output
* needs when returning data to the end user.
*
* @author Adam Gibson
*/
public interface BatchOutput extends Serializable {
/**
* Set the batch id for the batch output.
* This batch id is used during retraining.
*
* @param batchId the batch id for the batch output
*/
void setBatchId(String batchId);
/**
* Return the batch id for this batch output.
* The batch id used for retraining.
*
* @return batch id
*/
String batchId();
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output/types/ClassifierOutput.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.output.types;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class ClassifierOutput implements BatchOutput {
private int[] decisions;
private double[][] probabilities;
private String[] labels;
private String batchId;
@Override
public String batchId() {
return batchId;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output/types/DetectedObjectsBatch.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.output.types;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@NoArgsConstructor
@Data
@Builder
@AllArgsConstructor
public class DetectedObjectsBatch implements BatchOutput {
private float centerX;
private float centerY;
private float width;
private float height;
private int[] predictedClassNumbers;
private String[] predictedClasses;
private float[] confidences;
private String batchId;
@Override
public void setBatchId(String batchId) {
this.batchId = batchId;
}
@Override
public String batchId() {
return batchId;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output/types/ManyDetectedObjects.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.output.types;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class ManyDetectedObjects implements BatchOutput {
private DetectedObjectsBatch[] detectedObjectsBatches;
@Override
public void setBatchId(String batchId) {
for (DetectedObjectsBatch detectedObjectsBatch : detectedObjectsBatches) {
detectedObjectsBatch.setBatchId(batchId);
}
}
@Override
public String batchId() {
return detectedObjectsBatches[0].getBatchId();
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output/types/NDArrayOutput.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.output.types;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.serde.jackson.shaded.NDArrayTextDeSerializer;
import org.nd4j.serde.jackson.shaded.NDArrayTextSerializer;
import org.nd4j.shade.jackson.databind.annotation.JsonDeserialize;
import org.nd4j.shade.jackson.databind.annotation.JsonSerialize;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class NDArrayOutput implements BatchOutput {
private String batchId;
@JsonSerialize(using = NDArrayTextSerializer.class)
@JsonDeserialize(using = NDArrayTextDeSerializer.class)
private INDArray ndArray;
@Override
public void setBatchId(String batchId) {
this.batchId = batchId;
}
@Override
public String batchId() {
return batchId;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/output/types/RegressionOutput.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.output.types;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
public class RegressionOutput implements BatchOutput {
private double[][] values;
private String batchId;
@Override
public String batchId() {
return batchId;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/pipeline
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/pipeline/config/NormalizationConfig.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.pipeline.config;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
/**
* Configuration for data normalization in the ETL part of your pipeline.
*/
@Data
@AllArgsConstructor
@NoArgsConstructor
@Builder
public class NormalizationConfig implements Serializable {
@Builder.Default
private Map<String, String> config = new HashMap<>();
public void put(String key, String value) {
config.put(key, value);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/pipeline
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/pipeline/config/ObjectDetectionConfig.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.pipeline.config;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serializable;
@Data
/**
* Configuration for object detection output of models.
*/
@AllArgsConstructor
@Builder
@NoArgsConstructor
public class ObjectDetectionConfig implements Serializable {
@Builder.Default
private double threshold = 0.5;
private int numLabels;
private String labelsPath;
public static final double[][] DEFAULT_PRIOR_BOXES =
{{0.57273, 0.677385}, {1.87446, 2.06253}, {3.33843, 5.47434}, {7.88282, 3.52778}, {9.77052, 9.16828}};
@Builder.Default
private double[][] priors = DEFAULT_PRIOR_BOXES;
@Builder.Default
private int[] inputShape = {3, 608, 608};
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/settings
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/settings/constants/Constants.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.settings.constants;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
/**
* This class contains important keys for different operations inside konduit-serving
* @deprecated To be removed - https://github.com/KonduitAI/konduit-serving/issues/298
*/
@Deprecated
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public class Constants {
/**
* The name of the default base name of the konduit-serving working directory.
*/
public static final String DEFAULT_WORKING_BASE_DIR_NAME = ".konduit-serving";
/**
* Default base directory name for the endpoints log (/logs).
*/
public static final String DEFAULT_ENDPOINT_LOGS_DIR_NAME = "endpoint_logs";
/**
* Default directory name for containing the command log files.
*/
public static final String DEFAULT_COMMAND_LOGS_DIR_NAME = "command_logs";
/**
* Default directory name for containing the running server data. The files in
* this directory usually contains the server configurations. The format of the files is
* {@code <pid>.data}
*/
public static final String SERVERS_DATA_DIR_NAME = "servers";
/**
* Name of the log file which contains the logging data for the {@code /logs}
* endpoint.
*/
public static final String MAIN_ENDPOINT_LOGS_FILE = "main.log";
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/settings
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/settings/constants/EnvironmentConstants.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.settings.constants;
import io.vertx.ext.web.handler.BodyHandler;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
/**
* This class contains important constants for different environment variable settings
* for konduit-serving.
* @deprecated To be removed - https://github.com/KonduitAI/konduit-serving/issues/298
*/
@Deprecated
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public class EnvironmentConstants {
/**
* An environment variable for setting the working directory for konduit serving.
* The working directory contains the runtime files generated by vertx or
* konduit-serving itself. The runtime files could contain logs,
* running process details, vertx cache files etc.
*/
public static final String WORKING_DIR = "KONDUIT_WORKING_DIR";
/**
* This variable is responsible for setting the path where the log files for a konduit server
* is kept for the `/logs` endpoint.
*/
public static final String ENDPOINT_LOGS_DIR = "KONDUIT_ENDPOINT_LOGS_DIR";
/**
* Default directory for containing the command line logs for konduit-serving
*/
public static final String COMMAND_LOGS_DIR = "KONDUIT_COMMAND_LOGS_DIR";
/**
* Sets the directory where the file uploads are kept for Vertx {@link BodyHandler}
*/
public static final String FILE_UPLOADS_DIR = "KONDUIT_FILE_UPLOADS_DIR";
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/settings
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/settings/constants/PropertiesConstants.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.settings.constants;
import io.vertx.ext.web.handler.BodyHandler;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
/**
* This class contains important constants for different system properties
* for konduit-serving.
* @deprecated To be removed - https://github.com/KonduitAI/konduit-serving/issues/298
*/
@Deprecated
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public class PropertiesConstants {
/**
* For setting the working directory for konduit serving.
* The working directory contains the runtime files generated by vertx or
* konduit-serving itself. The runtime files could contain logs,
* running process details, vertx cache files etc.
*/
public static final String WORKING_DIR = "konduit.working.dir";
/**
* This system property is responsible for setting the path where the log files for a konduit server
* is kept for the `/logs` endpoint.
*/
public static final String ENDPOINT_LOGS_DIR = "konduit.endpoint.logs.dir";
/**
* Default directory for containing the command line logs for konduit-serving
*/
public static final String COMMAND_LOGS_DIR = "konduit.command.logs.dir";
/**
* Sets the directory where the file uploads are kept for Vertx {@link BodyHandler}
*/
public static final String FILE_UPLOADS_DIR = "konduit.file.uploads.dir";
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/util/MetricRenderUtils.java
|
/*
*
* * ******************************************************************************
* *
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.util;
import ai.konduit.serving.config.metrics.ColumnDistribution;
import org.nd4j.common.base.Preconditions;
/**
* Util class for anything related to {@link ai.konduit.serving.config.metrics.MetricsRenderer}
*
* @author Adam Gibson
*/
public class MetricRenderUtils {
/**
* De normalize the given input based on the {@link ColumnDistribution}
* @param input the input to de normalize
* @param columnDistribution the column distribution to de normalized based on
* @return the de normalized value
*/
public static double normalizeValue(double input, ColumnDistribution columnDistribution) {
switch(columnDistribution.getNormalizerType()) {
case MIN_MAX:
return (input - columnDistribution.getMin()) / columnDistribution.getMax();
case STANDARDIZE:
return (input - columnDistribution.getMean()) / columnDistribution.getStandardDeviation();
default:
throw new IllegalArgumentException("Illegal normalization type for reverting." + columnDistribution.getNormalizerType());
}
}
/**
* De normalize the given input based on the {@link ColumnDistribution}
* @param input the input to de normalize
* @param columnDistribution the column distribution to de normalized based on
* @return the de normalized value
*/
public static double deNormalizeValue(double input, ColumnDistribution columnDistribution) {
Preconditions.checkNotNull(columnDistribution,"Column distribution must not be null!");
Preconditions.checkNotNull(columnDistribution.getNormalizerType(),"Normalizer type is null!");
switch(columnDistribution.getNormalizerType()) {
case MIN_MAX:
return (input * columnDistribution.getMax()) + columnDistribution.getMin();
case STANDARDIZE:
return (input * columnDistribution.getStandardDeviation()) + columnDistribution.getMean();
default:
throw new IllegalArgumentException("Illegal normalization type for reverting." + columnDistribution.getNormalizerType());
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/util/ObjectMappers.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.util;
import lombok.NonNull;
import org.nd4j.shade.jackson.annotation.JsonAutoDetect;
import org.nd4j.shade.jackson.annotation.JsonInclude;
import org.nd4j.shade.jackson.annotation.PropertyAccessor;
import org.nd4j.shade.jackson.core.JsonProcessingException;
import org.nd4j.shade.jackson.databind.*;
import org.nd4j.shade.jackson.dataformat.yaml.YAMLFactory;
import org.nd4j.shade.jackson.dataformat.yaml.YAMLGenerator;
import org.nd4j.shade.jackson.datatype.joda.JodaModule;
import java.io.IOException;
/**
* A simple object mapper holder for using one single {@link ObjectMapper} across the whole project.
*/
public class ObjectMappers {
private static final ObjectMapper jsonMapper = configureMapper(new ObjectMapper());
private static final ObjectMapper yamlMapper = configureMapper(new ObjectMapper(new YAMLFactory()
.disable(YAMLGenerator.Feature.USE_NATIVE_TYPE_ID) // For preventing YAML from adding `!<TYPE>` with polymorphic objects
// and use Jackson's type information mechanism.
));
private ObjectMappers() {
}
/**
* Get a single object mapper for use with reading and writing JSON
*
* @return JSON object mapper
*/
public static ObjectMapper json() {
return jsonMapper;
}
/**
* Get a single object mapper for use with reading and writing YAML
*
* @return YAML object mapper
*/
public static ObjectMapper yaml() {
return yamlMapper;
}
private static ObjectMapper configureMapper(ObjectMapper ret) {
ret.registerModule(new JodaModule());
ret.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
ret.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
ret.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
ret.enable(SerializationFeature.INDENT_OUTPUT);
ret.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.NONE);
ret.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY);
ret.setVisibility(PropertyAccessor.CREATOR, JsonAutoDetect.Visibility.ANY);
ret.setSerializationInclusion(JsonInclude.Include.NON_NULL);
if(ret.getFactory() instanceof YAMLFactory) {
ret.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE);
}
return ret;
}
/**
* Convert the specified object to a YAML String, throwing an unchecked exception (RuntimeException) if conversion fails
*
* @param o Object
* @return Object as YAML
*/
public static String toYaml(@NonNull Object o) {
try {
return yaml().writeValueAsString(o);
} catch (JsonProcessingException e) {
throw new RuntimeException("Error converting object of class " + o.getClass().getName() + " to YAML", e);
}
}
/**
* Convert the specified object to a JSON String, throwing an unchecked exception (RuntimeException) if conversion fails
*
* @param o Object
* @return Object as JSON
*/
public static String toJson(@NonNull Object o) {
try {
return json().writeValueAsString(o);
} catch (JsonProcessingException e) {
throw new RuntimeException("Error converting object of class " + o.getClass().getName() + " to JSON", e);
}
}
/**
* Convert the specified YAML String to an object of the specified class, throwing an unchecked exception (RuntimeException) if conversion fails
*
* @param yaml YAML string
* @param c Class for the object
* @return Object from YAML
*/
public static <T> T fromYaml(@NonNull String yaml, @NonNull Class<T> c) {
try {
return yaml().readValue(yaml, c);
} catch (IOException e) {
throw new RuntimeException("Error deserializing YAML string to class " + c.getName(), e);
}
}
/**
* Convert the specified YAML String to an object of the specified class, throwing an unchecked exception (RuntimeException) if conversion fails
*
* @param json JSON string
* @param c Class for the object
* @return Object from JSON
*/
public static <T> T fromJson(@NonNull String json, @NonNull Class<T> c) {
try {
return json().readValue(json, c);
} catch (IOException e) {
throw new RuntimeException("Error deserializing JSON string to class " + c.getName(), e);
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/util/PortUtils.java
|
/* ******************************************************************************
* Copyright (c) 2022 Konduit K.K.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package ai.konduit.serving.util;
import lombok.NoArgsConstructor;
import java.io.IOException;
import java.net.ServerSocket;
@NoArgsConstructor
public class PortUtils {
/**
* @return single available port number
*/
public static int getAvailablePort() {
try (ServerSocket socket = new ServerSocket(0)) {
return socket.getLocalPort();
} catch (IOException e) {
throw new IllegalStateException("Cannot find available port: " + e.getMessage(), e);
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/util/WritableValueRetriever.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.util;
import org.datavec.api.writable.*;
import org.nd4j.common.base.Preconditions;
import org.nd4j.linalg.api.ndarray.INDArray;
/**
* Utilities for datavec's {@link Writable}
* this just perform basic conversion
* between unknown objects and {@link Writable}
* types.
*
* @author Adam Gibson
*/
public class WritableValueRetriever {
/**
* Create a {@link Writable}
* from the given value
* If an input type is invalid, an
* {@link IllegalArgumentException}
* will be thrown
*
* @param input the input object
* @return writable
*/
public static Writable writableFromValue(Object input) {
Preconditions.checkNotNull(input, "Unable to convert null value!");
if (input instanceof Double) {
return new DoubleWritable((Double) input);
} else if (input instanceof Float) {
return new FloatWritable((Float) input);
} else if (input instanceof String) {
return new Text(input.toString());
} else if (input instanceof Long) {
return new LongWritable((Long) input);
} else if (input instanceof INDArray) {
return new NDArrayWritable((INDArray) input);
} else if (input instanceof Integer) {
return new IntWritable((Integer) input);
} else if (input instanceof byte[]) {
return new BytesWritable((byte[]) input);
} else if (input instanceof Boolean) {
return new BooleanWritable((Boolean) input);
} else
throw new IllegalArgumentException("Unsupported type " + input.getClass().getName());
}
/**
* Get the underlying value fro the given {@link Writable}
*
* @param writable the writable to get the value for
* @return the underlying value represnted by the {@link Writable}
*/
public static Object getUnderlyingValue(Writable writable) {
switch (writable.getType()) {
case Float:
return writable.toFloat();
case Double:
return writable.toDouble();
case Int:
return writable.toInt();
case Long:
return writable.toLong();
case NDArray:
NDArrayWritable ndArrayWritable = (NDArrayWritable) writable;
return ndArrayWritable.get();
case Boolean:
BooleanWritable booleanWritable = (BooleanWritable) writable;
return booleanWritable.get();
case Byte:
ByteWritable byteWritable = (ByteWritable) writable;
return byteWritable.get();
case Bytes:
BytesWritable bytesWritable = (BytesWritable) writable;
return bytesWritable.getContent();
case Text:
return writable.toString();
default:
throw new UnsupportedOperationException();
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/verticles/Routable.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.verticles;
import io.vertx.core.Vertx;
import io.vertx.ext.web.Router;
/**
* An interface representing an object
* with a {@link Router}
* instance and a {@link Vertx}
* instance
*
* @author Adam Gibson
*/
public interface Routable {
/**
* Returns the {@link Router}
* associated with this object
*
* @return router
*/
Router router();
/**
* Returns the {@link Vertx}
* instance associated with this object
*
* @return router
*/
Vertx vertx();
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/verticles/VerticleConstants.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.verticles;
/**
* Constants for the config.json
* for initializing verticles.
*
* @author Adam Gibson
*/
public class VerticleConstants {
// General
public final static String KONDUIT_SERVING_PORT = "KONDUIT_SERVING_PORT";
public final static String CONVERTED_INFERENCE_DATA = "convertedInferenceData";
public final static String HTTP_PORT_KEY = "httpPort";
public final static String TRANSACTION_ID = "transactionId";
//keys for the routing context when doing object recognition
public final static String ORIGINAL_IMAGE_HEIGHT = "originalImageHeight";
public final static String ORIGINAL_IMAGE_WIDTH = "originalImageWidth";
// Mem map
public final static String MEM_MAP_VECTOR_PATH = "memMapVectorPath";
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/verticles
|
java-sources/ai/konduit/serving/konduit-serving-api/0.3.0/ai/konduit/serving/verticles/base/BaseRoutableVerticle.java
|
/*
*
* * ******************************************************************************
* * * Copyright (c) 2015-2019 Skymind Inc.
* * * Copyright (c) 2022 Konduit K.K.
* * *
* * * This program and the accompanying materials are made available under the
* * * terms of the Apache License, Version 2.0 which is available at
* * * https://www.apache.org/licenses/LICENSE-2.0.
* * *
* * * Unless required by applicable law or agreed to in writing, software
* * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * * License for the specific language governing permissions and limitations
* * * under the License.
* * *
* * * SPDX-License-Identifier: Apache-2.0
* * *****************************************************************************
*
*
*/
package ai.konduit.serving.verticles.base;
import ai.konduit.serving.verticles.Routable;
import ai.konduit.serving.verticles.VerticleConstants;
import io.vertx.core.AbstractVerticle;
import io.vertx.core.Promise;
import io.vertx.core.Vertx;
import io.vertx.ext.web.Router;
import io.vertx.ext.web.impl.RouterImpl;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.extern.slf4j.Slf4j;
/**
* A super class containing a router
* and boiler plate methods for managing
* http interaction.
*
* @author Adam Gibson
*/
@EqualsAndHashCode(callSuper = true)
@Slf4j
@Data
public abstract class BaseRoutableVerticle extends AbstractVerticle implements Routable {
private final static int DEFAULT_HTTP_PORT = 0; // 0 will find an available port when running HttpServer#listen
protected Router router;
protected int port;
public BaseRoutableVerticle() {
super();
}
/**
* Start an http server the port with the value configured
* as the httpPort key found in {@link #config()}
*/
protected void setupWebServer(Promise<Void> startPromise) {
RouterImpl router = (RouterImpl) router();
if (context != null && config().containsKey(VerticleConstants.HTTP_PORT_KEY)) {
String portKey = config().getValue(VerticleConstants.HTTP_PORT_KEY).toString();
port = Integer.parseInt(portKey);
} else {
port = DEFAULT_HTTP_PORT;
log.warn("No port defined in configuration! Using default port = " + port);
}
vertx.createHttpServer()
.requestHandler(router)
.exceptionHandler(Throwable::printStackTrace)
.listen(port, listenResult -> {
if (listenResult.failed()) {
log.error("Could not start HTTP server", listenResult.cause());
startPromise.fail(listenResult.cause());
} else {
log.info("Server started on port {}", port);
startPromise.complete();
}
});
}
@Override
public void start(Promise<Void> startPromise) {
setupWebServer(startPromise);
}
@Override
public void stop(Promise<Void> stopPromise) {
if (vertx != null) {
vertx.close(handler -> {
if(handler.succeeded()) {
log.debug("Shut down server.");
stopPromise.complete();
} else {
stopPromise.fail(handler.cause());
}
});
} else {
stopPromise.complete();
}
}
@Override
public Router router() {
return router;
}
@Override
public Vertx vertx() {
return vertx;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/build/GradleBuild.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.build;
import ai.konduit.serving.build.config.Config;
import ai.konduit.serving.build.config.Deployment;
import ai.konduit.serving.build.config.Target;
import ai.konduit.serving.build.dependencies.Dependency;
import ai.konduit.serving.build.deployments.*;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.SystemUtils;
import org.gradle.tooling.GradleConnector;
import org.gradle.tooling.ProjectConnection;
import org.nd4j.common.base.Preconditions;
import java.io.*;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class GradleBuild {
private static String createCopyTask(String taskName, String fromDir, String toDir, String fileMask,
String pluginOutput) {
String built = (fromDir + File.separator + "build" + File.separator + pluginOutput).
replace("\\","\\\\");;
String deployed = (toDir.replace("\\","\\\\"));
String retVal = "tasks.register<Copy>(\"" + taskName + "\") {\n" +
"\t from(\"" + built + "\")\n" +
"\t include(\"" + fileMask + "\")\n" +
"\t into(\"" + deployed + "\")\n}\n";
return retVal;
}
public static void generateGradleBuildFiles(File outputDir, Config config) throws IOException {
//TODO We need a proper solution for this!
//For now - the problem with the creation of a manifest (only) JAR is that the "tasks.withType(Jar::class)" gets
// put into the uber-jar.
boolean uberjar = false;
boolean classpathMF = false;
for(Deployment d : config.deployments()){
uberjar |= d instanceof UberJarDeployment;
classpathMF = (d instanceof ClassPathDeployment && ((ClassPathDeployment) d).type() == ClassPathDeployment.Type.JAR_MANIFEST);
}
Preconditions.checkState(uberjar != classpathMF || !uberjar, "Unable to create both a classpath manifest (ClassPathDeployment)" +
" and uber-JAR deployment at once");
copyResource("/gradle/gradlew", new File(outputDir, "gradlew"));
copyResource("/gradle/gradlew.bat", new File(outputDir, "gradlew.bat"));
File dockerResource = new File(String.valueOf(GradleBuild.class.getClassLoader().getResource("Dockerfile")));
if (dockerResource.exists())
FileUtils.copyFileToDirectory(dockerResource, outputDir);
//Generate build.gradle.kts (and gradle.properties if necessary)
StringBuilder kts = new StringBuilder();
for(Deployment d : config.deployments()){
List<String> imports = d.gradleImports();
if(imports != null && !imports.isEmpty()){
for(String s : imports) {
kts.append("import ").append(s).append("\n");
}
}
}
// ----- Repositories Section -----
kts.append("repositories {\n\tmavenCentral()\n\tmavenLocal()\n\tjcenter()\n\tmaven(\"https://oss.sonatype.org/content/repositories/snapshots\")\n}\n");
// ----- Plugins Section -----
kts.append("plugins { java \n");
/*
//Not yet released - uncomment this once gradle-javacpp-platform plugin is available
//Set JavaCPP platforms - https://github.com/bytedeco/gradle-javacpp#the-platform-plugin
kts.append("id(\"org.bytedeco.gradle-javacpp-platform\") version \"1.5.3\"\n"); //TODO THIS VERSION SHOULDN'T BE HARDCODED
*/
for(Deployment d : config.deployments()){
List<GradlePlugin> gi = d.gradlePlugins();
if(gi != null && !gi.isEmpty()){
for(GradlePlugin g : gi) {
if (StringUtils.isNotEmpty(g.version()))
kts.append("\t").append("id(\"").append(g.id()).append("\"").append(") version \"").append(g.version()).append("\"\n");
else
kts.append("\t").append("id(\"").append(g.id()).append("\")\n");
}
}
}
kts.append("\n}")
.append("\n");
/*
//Uncomment once gradle-javacpp-platform plugin available
kts.append("ext {\n")
.append("\tjavacppPlatorm = \"").append(config.target().toJavacppPlatform() + "\"\n")
.append("}\n\n");
*/
kts.append("group = \"ai.konduit\"\n");
//kts.append("version = \"1.0-SNAPSHOT\"\n");
List<Dependency> dependencies = config.resolveDependencies();
if (!dependencies.isEmpty()) {
kts.append("dependencies {\n");
}
for (Dependency dep : dependencies) {
if (dep.classifier() == null)
kts.append("\timplementation(\"" + dep.groupId() + ":" + dep.artifactId() + ":" + dep.version() + "\")").
append("\n");
else
kts.append("\timplementation(\"" + dep.groupId() + ":" + dep.artifactId() + ":" + dep.version() + ":" + dep.classifier() + "\")").
append("\n");
}
if (!dependencies.isEmpty()) {
kts.append("}").append("\n");
}
List<Deployment> deployments = config.deployments();
Preconditions.checkState(deployments != null, "No deployments (uberjar, docker, etc) were specified for the build");
for (Deployment deployment : deployments) {
if (deployment instanceof UberJarDeployment) {
String escaped = ((UberJarDeployment)deployment).outputDir().replace("\\","\\\\");
String jarName = ((UberJarDeployment)deployment).jarName();
if(jarName.endsWith(".jar")){
jarName = jarName.substring(0, jarName.length()-4);
}
addUberJarTask(kts, jarName, escaped);
}
else if (deployment instanceof RpmDeployment) {
RpmDeployment r = (RpmDeployment)deployment;
String escaped = r.outputDir().replace("\\","\\\\");
addUberJarTask(kts, "ks", escaped);
String rpmName = r.rpmName();
kts.append("ospackage { \n");
if(rpmName.endsWith(".rpm")){
rpmName = rpmName.substring(0, rpmName.length()-4);
}
kts.append("\tfrom(\"" + escaped + "\")\n");
kts.append("\tpackageName = \"" + rpmName + "\"\n");
kts.append("\tsetArch( " + getRpmDebArch(config.target()) + ")\n");
kts.append("\tos = " + getRpmDebOs(config.target()) + "\n");
kts.append("}\n");
kts.append(createCopyTask("copyRpm", outputDir.getAbsolutePath(),
r.outputDir(), "*.rpm", "distributions"));
}
else if (deployment instanceof DebDeployment) {
String escaped = ((DebDeployment)deployment).outputDir().replace("\\","\\\\");
addUberJarTask(kts, "ks", escaped);
String rpmName = ((DebDeployment)deployment).rpmName();
kts.append("ospackage {\n");
if(rpmName.endsWith(".deb")){
rpmName = rpmName.substring(0, rpmName.length()-4);
}
kts.append("\tfrom(\"" + escaped + "\")\n");
kts.append("\tpackageName = \"" + rpmName + "\"\n");
//kts.append("\tsetArch(" + ((DebDeployment)deployment).archName() + ")\n");
kts.append("\tos = " + getRpmDebOs(config.target()) + "\n");
kts.append("}").append("\n\n");
kts.append(createCopyTask("copyDeb", outputDir.getAbsolutePath(), ((DebDeployment)deployment).outputDir(),
"*.deb", "distributions"));
} else if(deployment instanceof ClassPathDeployment){
addClassPathTask(kts, (ClassPathDeployment) deployment);
}
else if (deployment instanceof ExeDeployment) {
String exeName = ((ExeDeployment)deployment).exeName();
kts.append("tasks.withType<DefaultLaunch4jTask> {\n");
if(exeName.endsWith(".exe")){
exeName = exeName.substring(0, exeName.length()-4);
}
kts.append("\toutfile = \"" + exeName + ".exe\"\n");
//kts.append("destinationDirectory.set(file(\"" + escaped + "\"))\n");
kts.append("\tmainClassName = \"ai.konduit.serving.cli.launcher.KonduitServingLauncher\"\n");
kts.append("}\n");
kts.append(createCopyTask("copyExe", outputDir.getAbsolutePath(), ((ExeDeployment)deployment).outputDir(),
"*.exe", "launch4j"));
}
else if (deployment instanceof DockerDeployment) {
String escapedInputDir = StringUtils.EMPTY;
DockerDeployment dd = (DockerDeployment)deployment;
if (StringUtils.isEmpty(dd.inputDir())) {
if (dockerResource != null)
escapedInputDir = dockerResource.getParent().replace("\\","\\\\");
}
else {
escapedInputDir = dd.inputDir().replace("\\", "\\\\");
}
kts.append("tasks.create(\"buildImage\", DockerBuildImage::class) {\n");
if (StringUtils.isNotEmpty(escapedInputDir))
kts.append("\tinputDir.set(file(\"" + escapedInputDir + "\"))\n");
else
kts.append("\tval baseImage = FromInstruction(From(\"").append(dd.baseImage()).append("\n");
if(dd.imageName() != null){
//Note image names must be lower case
kts.append("\timages.add(\"").append(dd.imageName().toLowerCase()).append("\")");
}
kts.append("}\n");
}
else if (deployment instanceof TarDeployment) {
String escaped = ((TarDeployment)deployment).outputDir().replace("\\","\\\\");
addUberJarTask(kts, "ks", escaped);
List<String> fromFiles = ((TarDeployment)deployment).files();
if (fromFiles.size() > 0) {
String rpmName = ((TarDeployment) deployment).archiveName();
kts.append("distributions {\n");
kts.append("\tmain {\n");
kts.append("\t\tdistributionBaseName.set( \"" + rpmName + "\")\n");
kts.append("\t\t contents {\n");
for (String file : fromFiles) {
String escapedFile = file.replace("\\","\\\\");
kts.append("\t\t\tfrom(\"" + escapedFile + "\")\n");
}
kts.append("\t\t\tfrom(\"" + escaped + "\")\n");
kts.append("\t\t }\n");
kts.append("\t}\n");
kts.append("}").append("\n\n");
kts.append(createCopyTask("copyTar", outputDir.getAbsolutePath(), ((TarDeployment)deployment).outputDir(),
"*.tar", "distributions"));
}
}
}
System.out.println(kts.toString());
Preconditions.checkState(!deployments.isEmpty(), "No deployments were specified");
System.out.println("Dependencies: " + dependencies);
System.out.println("Deployments: " + deployments);
File ktsFile = new File(outputDir, "build.gradle.kts");
FileUtils.writeStringToFile(ktsFile, kts.toString(), Charset.defaultCharset());
}
public static void runGradleBuild(File directory, Config config) throws IOException {
//Check for build.gradle.kts, properties
//Check for gradlew/gradlew.bat
File kts = new File(directory, "build.gradle.kts");
if (!kts.exists()) {
throw new IllegalStateException("build.gradle.kts doesn't exist");
}
File gradlew = new File(directory, "gradlew.bat");
if (!gradlew.exists()) {
throw new IllegalStateException("gradlew.bat doesn't exist");
}
//Execute gradlew
ProjectConnection connection = GradleConnector.newConnector()
.forProjectDirectory(directory)
//.useGradleVersion("6.1")
.connect();
List<String> tasks = new ArrayList<>();
tasks.add("wrapper");
for(Deployment d : config.deployments()){
for (String s : d.gradleTaskNames()) {
if (!tasks.contains(s)) {
tasks.add(s);
}
}
}
try(ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
connection.newBuild().setStandardOutput(baos).setStandardError(System.err).forTasks(tasks.toArray(new String[0])).run();
String output = baos.toString();
Pattern pattern = Pattern.compile("(Successfully built )(\\w)+");
Matcher matcher = pattern.matcher(output);
String dockerId = StringUtils.EMPTY;
while (matcher.find()){
String[] words = matcher.group(0).split(" ");
if (words.length >= 3) {
dockerId = words[2];
}
}
final String effDockerId = dockerId;
System.out.println(output);
if (StringUtils.isNotEmpty(dockerId)) {
config.deployments().stream().forEach(
d -> {
if (d instanceof DockerDeployment)
((DockerDeployment) d).imageId(effDockerId);
});
}
} finally {
connection.close();
}
}
public static String getRpmDebArch(Target t){
//https://github.com/craigwblake/redline/blob/master/src/main/java/org/redline_rpm/header/Architecture.java
switch (t.arch()){
case x86:
case x86_avx2:
case x86_avx512:
return "Architecture.X86_64";
case armhf:
return "Architecture.ARM";
case arm64:
return "Architecture.AARCH64";
case ppc64le:
return "Architecture.PPC64";
default:
throw new RuntimeException("Unknown arch for target: " + t);
}
}
public static String getRpmDebOs(Target t){
//https://github.com/craigwblake/redline/blob/master/src/main/java/org/redline_rpm/header/Os.java
switch (t.os()){
case LINUX:
return "Os.LINUX";
case WINDOWS:
return "Os.CYGWINNT";
case MACOSX:
return "Os.MACOSX";
//case ANDROID:
default:
throw new RuntimeException("Unknown os for target: " + t);
}
}
private static void addUberJarTask(StringBuilder kts, String fileName, String directoryName) {
kts.append("tasks.withType<ShadowJar> {\n");
String jarName = fileName;
kts.append("\tbaseName = \"" + jarName + "\"\n");
//needed for larger build files, shadowJar
//extends Jar which extends Zip
//a lot of documentation on the internet points to zip64 : true
//as the way to set this, the only way I found to do it in the
//kotlin dsl was to invoke the setter directly after a bit of reverse engineering
kts.append("\tsetZip64(true)\n");
kts.append("\tdestinationDirectory.set(file(\"" + directoryName + "\"))\n");
kts.append("\tmergeServiceFiles()"); //For service loader files
kts.append("}\n");
kts.append("//Add manifest - entry point\n")
.append("tasks.withType(Jar::class) {\n")
.append(" manifest {\n")
.append(" attributes[\"Manifest-Version\"] = \"1.0\"\n")
.append(" attributes[\"Main-Class\"] = \"ai.konduit.serving.cli.launcher.KonduitServingLauncher\"\n")
.append(" }\n")
.append("}\n\n");
}
private static void addClassPathTask(StringBuilder kts, ClassPathDeployment cpd){
String filePrefix = "file:/" + (SystemUtils.IS_OS_WINDOWS ? "" : "/");
//Adapted from: https://stackoverflow.com/a/54159784
if(cpd.type() == ClassPathDeployment.Type.TEXT_FILE) {
kts.append("//Task: ClassPathDeployment - writes the absolute path of all JAR files for the build to the specified text file, one per line\n")
.append("task(\"writeClassPathToFile\"){\n")
.append(" var spec2File: Map<String, File> = emptyMap()\n")
.append(" configurations.compileClasspath {\n")
.append(" val s2f: MutableMap<ResolvedModuleVersion, File> = mutableMapOf()\n")
.append(" // https://discuss.gradle.org/t/map-dependency-instances-to-file-s-when-iterating-through-a-configuration/7158\n")
.append(" resolvedConfiguration.resolvedArtifacts.forEach({ ra: ResolvedArtifact ->\n")
.append(" s2f.put(ra.moduleVersion, ra.file)\n").append(" })\n")
.append(" spec2File = s2f.mapKeys({\"${it.key.id.group}:${it.key.id.name}\"})\n")
.append(" spec2File.keys.sorted().forEach({ it -> println(it.toString() + \" -> \" + spec2File.get(it))})\n")
.append(" val sb = StringBuilder()\n")
.append(" spec2File.keys.sorted().forEach({ it -> sb.append(spec2File.get(it)); sb.append(\"\\n\")})\n")
.append(" File(\"").append(cpd.outputFile()).append("\").writeText(sb.toString())\n")
.append(" }\n")
.append("}\n");
} else {
//Write a manifest JAR
kts.append("//Write a JAR with a manifest containing the path of all dependencies, but no other content\n")
.append("tasks.withType(Jar::class) {\n")
.append(" manifest {\n")
.append(" attributes[\"Manifest-Version\"] = \"1.0\"\n")
.append(" attributes[\"Main-Class\"] = \"ai.konduit.serving.cli.launcher.KonduitServingLauncher\"\n")
.append(" attributes[\"Class-Path\"] = \"" + filePrefix + "\" + configurations.runtimeClasspath.get().getFiles().joinToString(separator=\" " + filePrefix + "\")\n")
.append(" }\n");
if(cpd.outputFile() != null){
String path = cpd.outputFile().replace("\\", "/");
kts.append("setProperty(\"archiveFileName\", \"").append(path).append("\")\n");
}
kts.append("}");
}
}
protected static void copyResource(String resource, File to){
InputStream is = GradleBuild.class.getResourceAsStream(resource);
Preconditions.checkState(is != null, "Could not find %s resource that should be available in konduit-serving-build JAR", resource);
to.getParentFile().mkdirs();
try(InputStream bis = new BufferedInputStream(is); OutputStream os = new BufferedOutputStream(new FileOutputStream(to))){
IOUtils.copy(bis, os);
} catch (IOException e){
throw new RuntimeException("Error copying resource " + resource + " to " + to, e);
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/build/GradlePlugin.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.build;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.experimental.Accessors;
@Data
@Accessors(fluent = true)
@AllArgsConstructor
public class GradlePlugin {
private String id;
private String version;
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/cli/BuildCLI.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.cli;
import ai.konduit.serving.build.build.GradleBuild;
import ai.konduit.serving.build.config.Module;
import ai.konduit.serving.build.config.Arch;
import ai.konduit.serving.build.config.Target;
import ai.konduit.serving.build.config.Deployment;
import ai.konduit.serving.build.config.OS;
import ai.konduit.serving.build.config.Config;
import ai.konduit.serving.build.config.DeploymentValidation;
import ai.konduit.serving.build.config.ComputeDevice;
import ai.konduit.serving.build.config.Serving;
import ai.konduit.serving.build.dependencies.Dependency;
import ai.konduit.serving.build.dependencies.DependencyRequirement;
import ai.konduit.serving.build.dependencies.ModuleRequirements;
import ai.konduit.serving.build.deployments.ClassPathDeployment;
import ai.konduit.serving.build.deployments.UberJarDeployment;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.Parameter;
import io.vertx.core.cli.CLIException;
import io.vertx.core.cli.annotations.Description;
import io.vertx.core.cli.annotations.Name;
import io.vertx.core.cli.annotations.Option;
import io.vertx.core.cli.annotations.Summary;
import io.vertx.core.spi.launcher.DefaultCommand;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.SystemUtils;
import org.nd4j.shade.guava.collect.Streams;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
/**
* Command line interface for performing Konduit Serving builds
* Allows the user to build a JAR or artifact such as a docker image suitable for performing inference on a given
* pipeline on a given deployment target (defined as an operating system, CPU architecture and optionally compute device).<br>
* <br>
* For example, can be used to build for any of the following:
* * HTTP (REST) server on x86 Windows (CPU), packaged as a stand-alone .exe<br>
* * HTTP and GRPC server on CUDA 10.2 + Linux, packaged as a docker image<br>
* And many more combinations
*
*
* @author Alex black
*/
@Name("build")
@Summary("Command line interface for performing Konduit Serving builds.")
@Description("Allows the user to build a JAR or artifact such as a docker image suitable " +
"for performing inference on a given pipeline on a given deployment target (defined " +
"as an operating system, CPU architecture and optionally compute device). " +
"For example, can be used to build for any of the following: \n" +
"-> HTTP (REST) server on x86 Windows (CPU), packaged as a stand-alone .exe\n" +
"-> HTTP and GRPC server on CUDA 10.2 + Linux, packaged as a docker image \n" +
"And many more combinations\n\n" +
"Example usages:\n" +
"--------------\n" +
"- Creates a deployment for classpath manifest jar for a CPU device:\n" +
"$ konduit build -dt classpath -c classpath.outputFile=manifest.jar \n" +
" classpath.type=jar_manifest -p pipeline.json -d CPU \n\n" +
"- Creates a uber jar deployment for a CUDA 10.1 device:\n" +
"$ konduit build -dt classpath -c jar.outputdir=build jar.name=uber.jar \n" +
" -p pipeline.json -d CUDA_10.1 \n" +
"--------------")
@Slf4j
public class BuildCLI extends DefaultCommand {
public static final String HTTP = "HTTP";
public static final String GRPC = "GRPC";
public static final String PIPELINE_OPTION_DESCRIPTION = "Path to a pipeline json file";
public static final String OS_OPTION_DESCRIPTION = "Operating systems to build for. Valid values: {linux, windows, mac} (case insensitive).\n" +
"If not set, the current system OS will be used";
public static final String ARCHITECTURE_OPTION_DESCRIPTION = "The target CPU architecture. Must be one of {x86, x86_avx2, x86_avx512, armhf, arm64, ppc64le}.\n " +
"Note that most modern desktops can be built with x86_avx2, which is the default";
public static final String DEVICE_OPTION_DESCRIPTION = "Compute device to be used. If not set: artifacts are build for CPU only.\n" +
"Valid values: CPU, CUDA_10.0, CUDA_10.1, CUDA_10.2 (case insensitive)";
public static final String MODULES_OPTION_DESCRIPTION = "Names of the Konduit Serving modules to include, as a comma-separated list of values.\nNote that " +
"this is not necessary when a pipeline is included (via -p/--pipeline), as the modules will be inferred " +
"automatically based on the pipeline contents";
public static final String DEPLOYMENT_TYPE_OPTION_DESCRIPTION = "The deployment types to use: JAR, DOCKER, EXE, WAR, RPM, DEB or TAR (case insensitive)";
public static final String SERVER_TYPE_OPTION_DESCRIPTION = "Type of server - HTTP or GRPC (case insensitive)";
public static final String ADDITIONAL_DEPENDENCIES_OPTION_DESCRIPTION = "Additional dependencies to include, in GAV(C) format: \"group_id:artifact_id:version\" / \"group_id:artifact_id:version:classifier\"";
public static final String CONFIG_OPTION_DESCRIPTION = "Configuration for the deployment types specified via -dt/--deploymentType.\n" +
"For example, \"-c jar.outputdir=/some/dir jar.name=my.jar\" etc.\n" +
"Configuration keys:\n" +
UberJarDeployment.CLI_KEYS + "\n" +
ClassPathDeployment.CLI_KEYS + "\n";
@Parameter(names = {"-p", "--pipeline"}, description = PIPELINE_OPTION_DESCRIPTION)
private String pipeline;
@Parameter(names = {"-o", "--os"}, validateValueWith = CLIValidators.OSValueValidator.class,
description = OS_OPTION_DESCRIPTION)
private List<String> os;
@Parameter(names = {"-a", "--arch"}, validateValueWith = CLIValidators.ArchValueValidator.class,
description = ARCHITECTURE_OPTION_DESCRIPTION)
private String arch = Arch.x86_avx2.toString();
@Parameter(names = {"-d", "--device"}, validateValueWith = CLIValidators.DeviceValidator.class,
description = DEVICE_OPTION_DESCRIPTION)
private String device;
@Parameter(names = {"-m", "--modules"}, validateValueWith = CLIValidators.ModuleValueValidator.class,
description = MODULES_OPTION_DESCRIPTION)
private List<String> modules;
@Parameter(names = {"-dt", "--deploymentType"}, validateValueWith = CLIValidators.DeploymentTypeValueValidator.class,
description = DEPLOYMENT_TYPE_OPTION_DESCRIPTION)
private List<String> deploymentTypes = Collections.singletonList(Deployment.JAR);
@Parameter(names = {"-s", "--serverType"},
description = SERVER_TYPE_OPTION_DESCRIPTION,
validateValueWith = CLIValidators.ServerTypeValidator.class)
private List<String> serverTypes = Arrays.asList(HTTP, GRPC);
@Parameter(names = {"-ad", "--addDep"},
description = ADDITIONAL_DEPENDENCIES_OPTION_DESCRIPTION,
validateValueWith = CLIValidators.AdditionalDependenciesValidator.class)
private List<String> additionalDependencies;
@Parameter(names = {"-c", "--config"},
description = CONFIG_OPTION_DESCRIPTION,
variableArity = true,
validateValueWith = CLIValidators.ConfigValidator.class)
private List<String> config;
@Parameter(names = {"-h", "--help"}, help = true, arity = 0)
private boolean help;
@Option(shortName = "p", longName = "pipeline")
@Description(PIPELINE_OPTION_DESCRIPTION)
public void setPipeline(String pipeline) {
this.pipeline = pipeline;
}
@Option(shortName = "o", longName = "os", acceptMultipleValues = true)
@Description(OS_OPTION_DESCRIPTION)
public void setOperatingSystem(List<String> operatingSystem) {
try {
operatingSystem = commandSeparatedListToExpandedList(operatingSystem);
new CLIValidators.OSValueValidator().validate("os", operatingSystem);
} catch (Exception e) {
out.println("Error validating OS (-o/--os): " + e.getMessage());
System.exit(1);
}
this.os = operatingSystem;
}
@Option(shortName = "a", longName = "arch")
@Description(ARCHITECTURE_OPTION_DESCRIPTION)
public void setArchitecture(String architecture) {
try {
new CLIValidators.ArchValueValidator().validate("arch", architecture);
} catch (Exception e) {
out.println("Error validating architecture (-a/--arch): " + e.getMessage());
System.exit(1);
}
this.arch = architecture;
}
@Option(shortName = "d", longName = "device")
@Description(DEVICE_OPTION_DESCRIPTION)
public void setDevice(String device) {
try {
new CLIValidators.DeviceValidator().validate("device", device);
} catch (Exception e) {
out.println("Error validating device (-d/--device): " + e.getMessage());
System.exit(1);
}
this.device = device;
}
@Option(shortName = "m", longName = "modules", acceptMultipleValues = true)
@Description(MODULES_OPTION_DESCRIPTION)
public void setModules(List<String> modules) {
try {
modules = commandSeparatedListToExpandedList(modules);
new CLIValidators.ModuleValueValidator().validate("modules", modules);
} catch (Exception e) {
out.println("Error validating modules (-m/--modules): " + e.getMessage());
System.exit(1);
}
this.modules = modules;
}
@Option(shortName = "dt", longName = "deploymentType", acceptMultipleValues = true)
@Description(DEPLOYMENT_TYPE_OPTION_DESCRIPTION)
public void setDeploymentTypes(List<String> deploymentTypes) {
try {
deploymentTypes = commandSeparatedListToExpandedList(deploymentTypes);
new CLIValidators.DeploymentTypeValueValidator().validate("deploymentType", deploymentTypes);
} catch (Exception e) {
out.println("Error validating deployment type (-dt/--deploymentType): " + e.getMessage());
System.exit(1);
}
this.deploymentTypes = deploymentTypes;
}
@Option(shortName = "s", longName = "serverType", acceptMultipleValues = true)
@Description(SERVER_TYPE_OPTION_DESCRIPTION)
public void setServerTypes(List<String> serverTypes) {
try {
serverTypes = commandSeparatedListToExpandedList(serverTypes);
new CLIValidators.ServerTypeValidator().validate("serverType", serverTypes);
} catch (Exception e) {
out.println("Error validating server type (-s/--serverType): " + e.getMessage());
System.exit(1);
}
this.serverTypes = serverTypes;
}
@Option(shortName = "ad", longName = "addDep", acceptMultipleValues = true)
@Description(ADDITIONAL_DEPENDENCIES_OPTION_DESCRIPTION)
public void setAdditionalDependencies(List<String> additionalDependencies) {
try {
additionalDependencies = commandSeparatedListToExpandedList(additionalDependencies);
new CLIValidators.AdditionalDependenciesValidator().validate("additionalDependencies", additionalDependencies);
} catch (Exception e) {
out.println("Error validating additional dependencies (-a/--addDep): " + e.getMessage());
System.exit(1);
}
this.additionalDependencies = additionalDependencies;
}
@Option(shortName = "c", longName = "config", acceptMultipleValues = true)
@Description(CONFIG_OPTION_DESCRIPTION)
public void setConfig(List<String> config) {
try {
config = commandSeparatedListToExpandedList(config);
new CLIValidators.ConfigValidator().validate("config", config);
} catch (Exception e) {
out.println("Error validating config (-c/--config): " + e.getMessage());
System.exit(1);
}
this.config = config;
}
public static void main(String... args) throws Exception {
new BuildCLI().exec(args);
}
public void exec(String[] args) {
JCommander jCommander = new JCommander();
jCommander.addObject(this);
jCommander.parse(args);
if(help) {
jCommander.usage();
return;
}
run();
}
@Override
public void run() throws CLIException {
if (out == null) {
out = System.out;
}
//Infer OS if necessary
if(os == null || os.isEmpty())
inferOS();
//------------------------------------- Build Configuration --------------------------------------
//Print out configuration / values
int width = 96;
int keyWidth = 30;
out.println(padTo("Konduit Serving Build Tool", '=', width));
out.println(padTo("Build Configuration", '-', width));
out.println(padRight("Pipeline:", ' ', keyWidth) + (pipeline == null ? "<not specified>" : pipeline));
out.println(padRight("Target OS:", ' ', keyWidth) + (os.size() == 1 ? os.get(0) : os.toString()));
out.println(padRight("Target CPU arch.:", ' ', keyWidth) + arch);
out.println(padRight("Target Device:", ' ', keyWidth) + (device == null ? "CPU" : device));
if(modules != null){
out.println(padRight("Additional modules:", ' ', keyWidth) + String.join(", ", modules));
}
out.println(padRight("Server type(s):", ' ', keyWidth) + String.join(", ", serverTypes));
out.println(padRight("Deployment type(s):", ' ', keyWidth) + String.join(", ", deploymentTypes));
if(additionalDependencies != null){
out.println(padRight("Additional dependencies:", ' ', keyWidth) + String.join(", ", additionalDependencies));
}
out.println("\n");
Map<String,String> propsIn = new HashMap<>();
if(config != null){
for(String s : config){
String[] split = s.split("=");
propsIn.put(split[0], split[1]);
}
}
List<Deployment> deployments = parseDeployments(propsIn);
for( int i=0; i<deployments.size(); i++ ){
Deployment d = deployments.get(i);
if(deployments.size() > 1){
out.println("Deployment " + (i+1) + " of " + deployments.size() + " configuration: " + d.getClass().getSimpleName());
} else {
out.println("Deployment configuration: " + d.getClass().getSimpleName());
}
Map<String,String> props = d.asProperties();
for(Map.Entry<String,String> e : props.entrySet()){
out.println(padRight(" " + e.getKey() + ":", ' ', keyWidth) + e.getValue());
}
}
out.println("\n");
//--------------------------------------- Validating Build ---------------------------------------
out.println(padTo("Validating Build", '-', width));
if((pipeline == null || pipeline.isEmpty()) && (modules == null || modules.isEmpty())){
String s = "BUILD FAILED: Either a path to a Pipeline (JSON or YAML) must be provided via -p/--pipeline" +
" or a list of modules to include must be provided via -m/--modules." +
" When a pipeline is provided via JSON or YAML, the required modules will be determined automatically.";
out.println(wrapString(s, width));
System.exit(1);
}
ComputeDevice cd = device == null ? null : ComputeDevice.forName(device);
Arch a = Arch.forName(arch);
Target t = new Target(OS.forName(os.get(0)), a, cd);
//Parse server type
List<Serving> serving = new ArrayList<>();
for(String s : serverTypes){
serving.add(Serving.valueOf(s.toUpperCase()));
}
Config c = new Config()
.pipelinePath(pipeline)
.target(t)
.deployments(deployments)
.serving(serving)
.additionalDependencies(additionalDependencies);
int width2 = 36;
if(pipeline != null){
out.println("Resolving modules required for pipeline execution...");
List<Module> resolvedModules = c.resolveModules();
for(Module m : resolvedModules){
out.println(" " + m.name());
}
out.println();
if(modules != null && !modules.isEmpty()){
out.println("Additional modules specified:");
List<Module> toAdd = new ArrayList<>();
boolean anyFailed = false;
List<String> failed = new ArrayList<>();
for(String s : modules){
boolean e1 = Module.moduleExistsForName(s, false);
boolean e2 = Module.moduleExistsForName(s, true);
if(e1 || e2){
Module m = e1 ? Module.forName(s) : Module.forShortName(s);
if(resolvedModules.contains(m)){
//Already resolved this one
continue;
} else {
out.println(" " + m.name());
toAdd.add(m);
}
} else {
anyFailed = true;
out.println(" " + s);
failed.add(s);
}
}
if(anyFailed){
out.println("Failed to resolve modules specified via -m/--modules: " + failed);
if(failed.size() == 1){
out.println("No module is known with this name: " + failed.get(0) );
} else {
out.println("No modules are known with these names: " + failed );
}
System.exit(1);
}
c.addModules(toAdd);
resolvedModules = c.modules();
out.println();
}
List<Dependency> d = c.resolveDependencies();
out.println("Resolving module optional/configurable dependencies for deployment target: " + t);
boolean anyFail = false;
for(Module m : resolvedModules){
ModuleRequirements r = m.dependencyRequirements();
boolean satisfied = r == null || r.satisfiedBy(t, d);
String s = padRight(" " + m.name() + ":", ' ', width2);
out.println(s + (satisfied ? " OK" : " FAILED TO RESOLVE REQUIRED DEPENDENCY FOR OS + TARGET ARCHITECTURE"));
if(!satisfied){
anyFail = true;
List<DependencyRequirement> l = r.reqs();
for(DependencyRequirement dr : l){
if(dr.satisfiedBy(t, d)){
out.println(" OK: " + dr);
} else {
out.println(" FAILED: " + dr);
}
}
}
}
if(anyFail){
out.println("BUILD FAILED: Unable to resolve optional dependencies for one or more modules");
out.println("This likely suggests the module dependencies do not support the target + architecture combination");
System.exit(1);
}
out.println();
if(!d.isEmpty()){
out.println("Resolved dependencies:");
for(Dependency dep : d){
out.println(" " + dep.gavString());
}
}
out.println();
out.println("Checking deployment configurations:");
boolean anyDeploymentsFailed = false;
for(Deployment dep : deployments){
DeploymentValidation v = dep.validate();
String s = dep.getClass().getSimpleName();
String s2 = padRight(" " + s + ":", ' ', width2);
out.println(s2 + (v.ok() ? "OK" : "FAILED"));
if(!v.ok()){
anyDeploymentsFailed = true;
for(String f : v.failureMessages()){
out.println(" " + f);
}
}
}
if(anyDeploymentsFailed){
out.println("BUILD FAILED: one or more deployment method configurations failed.");
out.println("See failure messages above for details");
System.exit(1);
}
out.println("\n>> Validation Passed\n");
}
//-------------------------------------------- Build ---------------------------------------------
out.println(padTo("Build", '-', width));
File tempDir = new File(FileUtils.getTempDirectory(), UUID.randomUUID().toString());
out.println("Generating build files...");
try {
GradleBuild.generateGradleBuildFiles(tempDir, c);
} catch (IOException cause) {
throw new CLIException("Failed to generate gradle build files.", cause);
}
out.println(">> Build file generation complete\n\n");
out.println("Starting build...");
long start = System.currentTimeMillis();
try {
GradleBuild.runGradleBuild(tempDir, c);
} catch (IOException cause) {
throw new CLIException("Gradle build failed", cause);
}
long end = System.currentTimeMillis();
out.println(">> Build complete\n\n");
out.println(padTo("Build Summary", '-', width));
out.println(padRight("Build duration:", ' ', keyWidth) + (end-start)/1000 + " sec");
out.println(padRight("Output artifacts:", ' ', keyWidth) + deployments.size());
out.println();
for(Deployment d : deployments){
out.println(" ----- " + d.getClass().getSimpleName() + " -----");
out.println(d.outputString());
out.println();
}
}
private String padTo(String in, char padChar, int toLength){
if(in.length() + 2 >= toLength){
return in;
}
int toAdd = toLength - in.length() - 2; //-2 for spaces
int before = toAdd / 2;
int after = toAdd - before;
StringBuilder sb = new StringBuilder();
for( int i=0; i<before; i++ ){
sb.append(padChar);
}
sb.append(" ").append(in).append(" ");
for( int i=0; i<after; i++ ){
sb.append(padChar);
}
return sb.toString();
}
private String padRight(String in, char padChar, int toLength){
if(in.length() >= toLength)
return in;
StringBuilder sb = new StringBuilder();
sb.append(in);
for(int i=0; i<toLength-in.length(); i++ ){
sb.append(padChar);
}
return sb.toString();
}
private String wrapString(String in, int maxLength){
if(in.length() <= maxLength)
return in;
StringBuilder sb = new StringBuilder();
String[] split = in.split(" ");
int lengthCurrLine = 0;
for(String s : split){
if(lengthCurrLine + s.length() + 1 >= maxLength){
sb.append("\n");
lengthCurrLine = 0;
}
if(lengthCurrLine > 0) {
sb.append(" ");
lengthCurrLine++;
}
sb.append(s);
lengthCurrLine += s.length();
}
return sb.toString();
}
public List<Deployment> parseDeployments(Map<String, String> props){
List<Deployment> out = new ArrayList<>();
for(String s : deploymentTypes){
switch (s.toUpperCase()){
case Deployment.CLASSPATH:
ClassPathDeployment classPathDeployment =
new ClassPathDeployment().type(ClassPathDeployment.Type.JAR_MANIFEST).outputFile("manifest.jar");
classPathDeployment.fromProperties(props);
out.add(classPathDeployment);
break;
case Deployment.JAR:
case Deployment.UBERJAR:
UberJarDeployment uberJarDeployment = new UberJarDeployment().outputDir(new File("").getAbsolutePath());
uberJarDeployment.fromProperties(props);
out.add(uberJarDeployment);
break;
default:
throw new RuntimeException("Deployment type not yet implemented: " + s);
}
}
return out;
}
protected void inferOS(){
if(SystemUtils.IS_OS_LINUX) {
os = Collections.singletonList(OS.LINUX.name());
} else if(SystemUtils.IS_OS_WINDOWS){
os = Collections.singletonList(OS.WINDOWS.name());
} else if(SystemUtils.IS_OS_MAC){
os = Collections.singletonList(OS.MACOSX.name());
} else {
throw new IllegalStateException("No OS was provided and operating system could not be inferred");
}
}
private List<String> commandSeparatedListToExpandedList(List<String> commandSeparatedList) {
AtomicReference<List<String>> output = new AtomicReference<>(new ArrayList<>());
commandSeparatedList.forEach(commaSeparatedString -> output
.set(Streams
.concat(output.get().stream(), commaSeparatedStringToList(commaSeparatedString).stream())
.collect(Collectors.toList())
)
);
return output.get();
}
private List<String> commaSeparatedStringToList(String commaSeparatedString) {
return Arrays.stream(commaSeparatedString.split(",")).collect(Collectors.toList());
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/cli/CLIValidators.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.cli;
import ai.konduit.serving.build.config.ComputeDevice;
import ai.konduit.serving.build.config.Deployment;
import ai.konduit.serving.build.config.Arch;
import ai.konduit.serving.build.config.OS;
import com.beust.jcommander.IValueValidator;
import com.beust.jcommander.ParameterException;
import org.nd4j.common.base.Preconditions;
import java.util.Arrays;
import java.util.List;
public class CLIValidators {
private CLIValidators(){ }
public static class OSValueValidator implements IValueValidator<List<String>> {
private static final String LINUX = OS.LINUX.toString();
private static final String WINDOWS = OS.WINDOWS.toString();
private static final String MAC = "MAC";
@Override
public void validate(String name, List<String> value) throws ParameterException {
if(value == null || value.isEmpty())
return; //Infer OS
for(String s : value){
if(!LINUX.equalsIgnoreCase(s) && !WINDOWS.equalsIgnoreCase(s) && !MAC.equalsIgnoreCase(s)){
throw new ParameterException("Invalid operating system: got \"" + s + "\" but must be one or more of {" + LINUX + "," + WINDOWS + "," + MAC + "} (case insensitive)");
}
}
}
}
public static class ArchValueValidator implements IValueValidator<String> {
private static final String X86 = Arch.x86.toString();
private static final String X86_AVX2 = Arch.x86_avx2.toString();
private static final String X86_AVX512 = Arch.x86_avx512.toString();
private static final String ARMHF = Arch.armhf.toString();
private static final String ARM64 = Arch.arm64.toString();
private static final String PPC64LE = Arch.ppc64le.toString();
@Override
public void validate(String name, String s) throws ParameterException {
if(!X86.equalsIgnoreCase(s) && !X86_AVX2.equalsIgnoreCase(s) && !X86_AVX512.equalsIgnoreCase(s) &&
!ARMHF.equalsIgnoreCase(s) && !ARM64.equalsIgnoreCase(s) && !PPC64LE.equalsIgnoreCase(s)){
throw new ParameterException("Invalid CPU architecture: Got \"" + s + "\" but must be one or more of {" + X86 + ", " + X86_AVX2 +
", " + X86_AVX512 + ", " + ARMHF + ", " + ARM64 + ", " + PPC64LE + "} (case insensitive)");
}
}
}
public static class DeploymentTypeValueValidator implements IValueValidator<List<String>> {
public static final List<String> VALUES = Arrays.asList(Deployment.CLASSPATH,
Deployment.JAR, Deployment.UBERJAR, Deployment.DOCKER, Deployment.EXE,
Deployment.WAR, Deployment.RPM, Deployment.DEB, Deployment.TAR);
@Override
public void validate(String name, List<String> value) throws ParameterException {
if(value == null || value.isEmpty()){
throw new ParameterException("No deployment types were provided. Valid values are: " + VALUES + " (case insensitive)");
}
for(String s : value){
boolean found = false;
for(String s2 : VALUES){
if(s2.equalsIgnoreCase(s)){
found = true;
break;
}
}
if(!found) {
throw new ParameterException("Invalid deployment type specified: \"" + s + "\" - valid values are: " + VALUES + " (case insensitive)");
}
}
}
}
public static class ModuleValueValidator implements IValueValidator<List<String>> {
@Override
public void validate(String name, List<String> value) throws ParameterException {
}
}
public static class ServerTypeValidator implements IValueValidator<List<String>> {
private static final List<String> VALUES = Arrays.asList(BuildCLI.HTTP, BuildCLI.GRPC);
@Override
public void validate(String name, List<String> value) throws ParameterException {
if(value == null || value.isEmpty()){
throw new ParameterException("No server type were provided. Valid values are: " + VALUES + " (case insensitive)");
}
for(String s : value){
boolean found = false;
for(String s2 : VALUES){
if(s2.equalsIgnoreCase(s)){
found = true;
break;
}
}
if(!found) {
throw new ParameterException("Invalid server type specified: \"" + s + "\" - valid values are: " + VALUES + " (case insensitive)");
}
}
}
}
public static class DeviceValidator implements IValueValidator<String> {
@Override
public void validate(String name, String value) throws ParameterException {
if(value == null || value.isEmpty())
return; //EMPTY = CPU
boolean ok = ComputeDevice.CPU.equalsIgnoreCase(value) ||
ComputeDevice.CUDA_100.equalsIgnoreCase(value) ||
ComputeDevice.CUDA_101.equalsIgnoreCase(value) ||
ComputeDevice.CUDA_102.equalsIgnoreCase(value) ||
ComputeDevice.CUDA_110.equalsIgnoreCase(value);
if(!ok){
throw new ParameterException("Invalid device string: must be blank (not set = CPU), or have value " +
ComputeDevice.CPU + ", " + ComputeDevice.CUDA_100 + ", " + ComputeDevice.CUDA_101 + ", " +
ComputeDevice.CUDA_102);
}
}
}
public static class AdditionalDependenciesValidator implements IValueValidator<List<String>>{
@Override
public void validate(String name, List<String> value) throws ParameterException {
for(String s : value){
String[] split = s.split(":");
if(split.length != 3 && split.length != 4){
throw new ParameterException("Invalid additionalDependency setting: Dependencies must " +
"be specified in \"group_id:artifact_id:version\" or \"group_id:artifact_id:version:classifier\" format. Got " + value);
}
}
}
}
public static class ConfigValidator implements IValueValidator<List<String>>{
@Override
public void validate(String name, List<String> value) throws ParameterException {
for(String s : value){
String[] split = s.split("=");
if(split.length != 2){
throw new ParameterException("Invalid config setting: Configuration for deployments " +
"be specified in the format \"key=value\". Got " + "[\"" + String.join("\", \"", value + "\"]"));
}
}
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/config/Arch.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.config;
import org.nd4j.common.base.Preconditions;
public enum Arch {x86, x86_avx2, x86_avx512, armhf, arm64, ppc64le;
public static Arch forName(String s){
switch (s.toLowerCase()) {
case "x86":
case "x86_64":
return Arch.x86;
case "x86_avx2":
case "x86-avx2":
case "x86_64-avx2":
return Arch.x86_avx2;
case "x86_avx512":
case "x86-avx512":
case "x86_64-avx512":
return Arch.x86_avx512;
case "arm64":
return Arch.arm64;
case "armhf":
return Arch.armhf;
case "ppc64le":
return Arch.ppc64le;
default:
return null;
}
}
/**
* What other architectures is this compatible with?
* Mainly used for x86: i.e., can run x86 on x86-avx2 and x86-avx512 systems.
* Note that this method also includes the original.
* For example, x86 -> {x86, x86_avx2, x86_avx512}
*/
public Arch[] compatibleWith(){
switch (this){
case x86:
return new Arch[]{Arch.x86, Arch.x86_avx2, Arch.x86_avx512};
case x86_avx2:
return new Arch[]{Arch.x86_avx2, Arch.x86_avx512};
default:
return new Arch[]{this};
}
}
/**
* Returns true if the code for this arch can generally be run on the specified arch.
* Mainly: x86 can be run on x86-avx2 and x86-avx512; x86-avx2 can be run on x86-avx512,
* but x86-avx2 can NOT be run on x86, and so on.
*/
public boolean isCompatibleWith(Arch other){
return this == other || (this == Arch.x86 && (other == Arch.x86_avx2 || other == Arch.x86_avx512)) ||
(this == Arch.x86_avx2 && other == Arch.x86_avx512);
}
public boolean lowerThan(Arch other){
Preconditions.checkState(isCompatibleWith(other), "Unable to compare non-compatible archs: %s and %s", this, other);
if(this == other)
return false;
if(this == Arch.x86 && other != Arch.x86)
return true;
if(this == Arch.x86_avx2 && other == Arch.x86_avx512)
return true;
return false;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/config/ComputeDevice.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.config;
import ai.konduit.serving.build.config.devices.CUDADevice;
public interface ComputeDevice {
String CPU = "CPU";
String CUDA_100 = "CUDA_10.0";
String CUDA_101 = "CUDA_10.1";
String CUDA_102 = "CUDA_10.2";
String CUDA_110 = "CUDA_11.0";
static ComputeDevice forName(String name){
if(name.equalsIgnoreCase(CPU)) {
return null;
} else if(name.toLowerCase().contains("cuda")){
return CUDADevice.forName(name);
}
throw new UnsupportedOperationException("Invalid, unknown, not supported or not yet implemented device type: " + name);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/config/Config.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.config;
import ai.konduit.serving.build.dependencies.Dependency;
import ai.konduit.serving.build.dependencies.DependencyAddition;
import ai.konduit.serving.build.dependencies.ModuleRequirements;
import ai.konduit.serving.build.dependencies.nativedep.NativeDependency;
import ai.konduit.serving.build.steps.RunnerInfo;
import ai.konduit.serving.build.steps.StepId;
import ai.konduit.serving.build.util.ModuleUtils;
import ai.konduit.serving.build.validation.ValidationFailure;
import ai.konduit.serving.build.validation.ValidationResult;
import ai.konduit.serving.pipeline.util.ObjectMappers;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import lombok.experimental.Accessors;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.nd4j.common.base.Preconditions;
import org.nd4j.shade.jackson.annotation.JsonProperty;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.*;
@Getter
@Setter
@Accessors(fluent = true)
@NoArgsConstructor
@Slf4j
public class Config {
private String pipelinePath;
private String ksVersion;
private Metadata metadata;
//Target system(s) - "Linux x86_avx2 CPU", "Linux ARM64", etc)
private Target target; //TODO this should allow (a) N independent artifacts (one per target), and (b) N targets within one artifact
//Konduit serving modules to include - "konduit-serving-tensorflow" etc
private List<Serving> serving = Collections.singletonList(Serving.HTTP);
private List<Module> modules;
//Additional dependencies, in GAV(C) format: "group_id:artifact_id:version" / "group_id:artifact_id:version:classifier"
private List<String> additionalDependencies;
private List<Deployment> deployments;
public Config(@JsonProperty("pipelinePath") String pipelinePath, @JsonProperty("ksVersion") String ksVersion,
@JsonProperty("metadata") Metadata metadata, @JsonProperty("target") Target target,
@JsonProperty("serving") List<Serving> serving, @JsonProperty("modules") List<Module> modules,
@JsonProperty("deployments") List<Deployment> deployments){
this.pipelinePath = pipelinePath;
this.ksVersion = ksVersion;
this.metadata = metadata;
this.target = target;
this.serving = serving;
this.modules = modules;
this.deployments = deployments;
}
public Config modules(List<Module> modules){
this.modules = modules;
return this;
}
public Config modules(Module... modules){
this.modules = Arrays.asList(modules);
return this;
}
public Config addModules(Module... modules){
return addModules(Arrays.asList(modules));
}
public Config addModules(List<Module> modules){
if(this.modules == null){
this.modules = modules;
} else {
List<Module> newList = new ArrayList<>(this.modules); //In case currently w e have an immutable list
newList.addAll(modules);
this.modules = newList;
}
return this;
}
public Config serving(List<Serving> serving){
this.serving = serving;
return this;
}
public Config serving(Serving... serving){
this.serving = Arrays.asList(serving);
return this;
}
public Config deployments(List<Deployment> deployments){
this.deployments = deployments;
return this;
}
public Config deployments(Deployment... deployments){
this.deployments = Arrays.asList(deployments);
return this;
}
public List<Deployment> deployments(){
return deployments;
}
public ValidationResult validate(){
//First: check that we have a module for every step in the pipeline
Map<StepId, List<RunnerInfo>> canRunWith = ModuleUtils.runnersForFile(new File(pipelinePath));
List<ValidationFailure> failures = new ArrayList<>();
//Check Target compatibility (OS/arch etc)
return new ValidationResult(failures);
}
//Can't rely on lombok @Data or @EqualsAndHashCode due to bug: https://github.com/rzwitserloot/lombok/issues/2193
@Override
public boolean equals(Object o){
if(!(o instanceof Config))
return false;
Config c = (Config)o;
return Objects.equals(pipelinePath, c.pipelinePath) &&
Objects.equals(ksVersion, c.ksVersion) &&
Objects.equals(metadata, c.metadata) &&
Objects.equals(target, c.target) &&
Objects.equals(serving, c.serving) &&
Objects.equals(modules, c.modules) &&
Objects.equals(deployments, c.deployments);
}
@Override
public int hashCode(){
return Objects.hashCode(pipelinePath) ^
Objects.hashCode(ksVersion) ^
Objects.hashCode(metadata) ^
Objects.hashCode(target) ^
Objects.hashCode(serving) ^
Objects.hashCode(modules) ^
Objects.hashCode(deployments);
}
public String toJson(){
try {
return ObjectMappers.json().writeValueAsString(this);
} catch (IOException e){
throw new RuntimeException("Error converting Config to JSON", e); //Should never happen
}
}
public String toYaml(){
try {
return ObjectMappers.yaml().writeValueAsString(this);
} catch (IOException e){
throw new RuntimeException("Error converting Config to JSON", e); //Should never happen
}
}
public static Config fromJson(String json){
try {
return ObjectMappers.json().readValue(json, Config.class);
} catch (IOException e){
throw new RuntimeException("Error deserializing JSON configuration", e);
}
}
public static Config fromYaml(String yaml){
try {
return ObjectMappers.yaml().readValue(yaml, Config.class);
} catch (IOException e){
throw new RuntimeException("Error deserializing YAML configuration", e);
}
}
public static Config fromFileJson(File f){
try {
return fromJson(FileUtils.readFileToString(f, StandardCharsets.UTF_8));
} catch (IOException e){
throw new RuntimeException("Error reading JSON file configuration: " + f.getAbsolutePath(), e);
}
}
public static Config fromFileYaml(File f){
try {
return fromYaml(FileUtils.readFileToString(f, StandardCharsets.UTF_8));
} catch (IOException e){
throw new RuntimeException("Error reading YAML file configuration: " + f.getAbsolutePath(), e);
}
}
public List<Module> modules(){
return modules;
}
public List<Module> resolveModules(){
Preconditions.checkState(pipelinePath != null && !pipelinePath.isEmpty(), "Pipeline path must be set before attempting" +
" to resolve required modules for it");
Set<Module> modules = new LinkedHashSet<>();
modules.add(Module.PIPELINE); //Always include core API
modules.add(Module.VERTX); //Always include core Vert.x module for serving
modules.add(Module.CLI); //Always include CLI for launching
for(Serving s : serving){
switch (s){
case HTTP:
modules.add(Module.HTTP);
break;
case GRPC:
modules.add(Module.GRPC);
break;
case MQTT:
modules.add(Module.MQTT);
break;
default:
throw new IllegalStateException("Unknown or not supported serving type: " + s);
}
}
Map<StepId, List<RunnerInfo>> m = ModuleUtils.runnersForFile(new File(pipelinePath));
for(Map.Entry<StepId, List<RunnerInfo>> e : m.entrySet()){
List<RunnerInfo> runners = e.getValue();
if(runners.size() > 1){
//TODO fix this - properly handle the case where one step can be executed by more than 1 runner
log.warn("More than one possible runner, selecting first: {}, {}", e.getKey(), runners);
}
Module mod = runners.get(0).module();
modules.add(mod);
}
//TODO what if user has set modules already, and they want extra modules for some reason?
this.modules = new ArrayList<>(modules);
return this.modules;
}
public List<Dependency> resolveDependencies(){
Preconditions.checkState(target != null, "Cannot resolve dependencies: No target has been set");
if(modules == null || modules.isEmpty())
resolveModules();
Set<Dependency> deps = new LinkedHashSet<>();
//First: go through the modules needed to run this pipeline, and add those module dependencies
for(Module m : modules){
deps.add(m.dependency());
}
//Second: go through each module, and work out what optional dependencies (nd4j backends, etc) we must add
for(Module m : modules){
ModuleRequirements req = m.dependencyRequirements();
if(req == null) //Module doesn't have any configurable required dependencies
continue;
if(!req.satisfiedBy(target, deps)){
List<DependencyAddition> l = req.suggestDependencies(target, deps);
if(l != null){
for( DependencyAddition da : l){
if(da.type() == DependencyAddition.Type.ALL_OF){
deps.addAll(da.toAdd());
} else {
//Any of
List<Dependency> toAdd = da.toAdd();
if(toAdd.size() == 1) {
deps.add(toAdd.get(0));
} else if(toAdd.size() > 1){
//Perhaps this is due to classifiers - both x86 and avx2 for example
boolean allSameExClassifier = true;
Dependency first = toAdd.get(0);
for( int i=1; i<toAdd.size(); i++ ){
Dependency d = toAdd.get(1);
allSameExClassifier = first.groupId().equals(d.groupId()) &&
first.artifactId().equals(d.artifactId()) &&
first.version().equals(d.version()) &&
(first.classifier() != null && d.classifier() != null);
if(!allSameExClassifier){
break;
}
}
boolean resolved = false;
if(allSameExClassifier){
boolean allNative = true;
for(Dependency d : toAdd){
if(!d.isNativeDependency()){
allNative = false;
break;
}
}
if(allNative){
//Now just select the dependency that matches our target...
for(Dependency d : toAdd){
NativeDependency nd = d.getNativeDependency();
Set<Target> supported = nd.getSupportedTargets();
//Just because it SUPPORTS this target, doesn't mean it's optimal...
boolean noneLower = true;
for(Target t : supported){
Arch a = t.arch();
if(a.isCompatibleWith(target.arch()) && t.arch().lowerThan(target.arch())){
noneLower = false;
break;
}
}
if(noneLower){
deps.add(d);
resolved = true;
}
}
}
}
if(!resolved) {
//TODO Currently both nd4j-native and nd4j-cuda-10.x can be recommended when the target is CUDA
//TODO we'll work out a better solution to this in the future... for now, just warn
log.warn("Multiple possible dependencies for requirement, picking first: {} - {}", req, toAdd);
deps.add(toAdd.get(0));
}
}
}
}
}
}
}
//Additional dependencies:
if(additionalDependencies != null && !additionalDependencies.isEmpty()){
for(String s : additionalDependencies){
String[] split = s.split(":");
Preconditions.checkState(split.length == 3 || split.length == 4, "Invalid additionalDependency setting: Dependencies must " +
"be specified in \"group_id:artifact_id:version\" or \"group_id:artifact_id:version:classifier\" format. Got %s", additionalDependencies);
String c = split.length == 4 ? split[3] : null;
Dependency d = new Dependency(split[0], split[1], split[2], c);
deps.add(d);
}
}
return new ArrayList<>(deps);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/config/Deployment.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.config;
import ai.konduit.serving.build.build.GradlePlugin;
import ai.konduit.serving.build.deployments.UberJarDeployment;
import org.nd4j.shade.jackson.annotation.JsonSubTypes;
import org.nd4j.shade.jackson.annotation.JsonTypeInfo;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Map;
import static org.nd4j.shade.jackson.annotation.JsonTypeInfo.Id.NAME;
@JsonSubTypes({
@JsonSubTypes.Type(value = UberJarDeployment.class, name = "uberjar"),
})
@JsonTypeInfo(use = NAME, include = JsonTypeInfo.As.WRAPPER_OBJECT)
public interface Deployment {
String CLASSPATH = "CLASSPATH";
String JAR = "JAR";
String UBERJAR = "UBERJAR";
String DOCKER = "DOCKER";
String EXE = "EXE";
String WAR = "WAR";
String RPM = "RPM";
String DEB = "DEB";
String TAR = "TAR";
List<String> propertyNames();
Map<String,String> asProperties();
void fromProperties(Map<String,String> props);
/**
* Validate the deployment configuration before the deployment build is attempted
* Used to detect obvious problems such as "output location is not set" etc
*/
DeploymentValidation validate();
/**
* Summary output string after the build completes
* i.e., info about the output after the build has completed
*/
String outputString();
List<String> gradleImports();
List<GradlePlugin> gradlePlugins();
List<String> gradleTaskNames();
static String defaultVersion(){
long time = System.currentTimeMillis();
SimpleDateFormat sdf = new SimpleDateFormat("YYYYMMDD-HHmmss.SSS");
return sdf.format(new Date(time));
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/config/DeploymentValidation.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.config;
import java.util.List;
public interface DeploymentValidation {
/**
* @return True if the deployment configuration is OK, false otherwise
*/
boolean ok();
/**
* @return Null if ok() == true, or a list of failure messages (i.e., the reasons why the deployment configuration
* is invalid)
*/
List<String> failureMessages();
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/config/Metadata.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.config;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import org.nd4j.shade.jackson.annotation.JsonProperty;
@Data
@Accessors(fluent = true)
@NoArgsConstructor
public class Metadata {
private String author;
private String timestamp;
private String buildVersion;
public Metadata(@JsonProperty("author") String author, @JsonProperty("timestamp") String timestamp,
@JsonProperty("buildVersion") String buildVersion){
this.author = author;
this.timestamp = timestamp;
this.buildVersion = buildVersion;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/config/Module.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.config;
import ai.konduit.serving.annotation.module.RequiresDependenciesProcessor;
import ai.konduit.serving.build.dependencies.*;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.experimental.Accessors;
import org.apache.commons.io.FileUtils;
import org.nd4j.common.base.Preconditions;
import org.nd4j.common.io.ClassPathResource;
import org.nd4j.common.primitives.Pair;
import org.nd4j.shade.jackson.annotation.JsonProperty;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.*;
@Data
@Accessors(fluent = true)
public class Module {
public static final String CURRENT_KS_VERSION = "0.1.0-SNAPSHOT"; //TODO auto infer - maybe from git.properties?
private static final Map<String, Module> MODULES = loadModuleInfo();
public static final Module PIPELINE = forName("konduit-serving-pipeline");
public static final Module VERTX = forName("konduit-serving-vertx");
public static final Module HTTP = forName("konduit-serving-http");
public static final Module GRPC = forName("konduit-serving-grpc");
public static final Module MQTT = forName("konduit-serving-mqtt");
public static final Module DL4J = forName("konduit-serving-deeplearning4j");
public static final Module SAMEDIFF = forName("konduit-serving-samediff");
public static final Module TENSORFLOW = forName("konduit-serving-samediff");
public static final Module IMAGE = forName("konduit-serving-image");
//CLI module can't be defined using forName (from konduit-serving-metadata files) due to it not being in metadata module
// (to avoid cyclical dependency: cli -> meta -> build -> cli
public static final Module CLI = new Module("konduit-serving-cli", new Dependency("ai.konduit.serving", "konduit-serving-cli", CURRENT_KS_VERSION), null, null);
private final String name;
private final Dependency dependency;
private final ModuleRequirements dependencyRequirements;
private final ModuleRequirements dependencyOptional;
public Module(@JsonProperty("name") String name, @JsonProperty("dependency") Dependency dependency,
@JsonProperty("dependencyRequirements") ModuleRequirements dependencyRequirements,
@JsonProperty("dependencyOptional") ModuleRequirements dependencyOptional) {
this.name = name;
this.dependency = dependency;
this.dependencyRequirements = dependencyRequirements;
this.dependencyOptional = dependencyOptional;
}
public Object dependenciesOptional() {
return dependencyOptional;
}
/**
* @param moduleName The full name of the module - for example "konduit-serving-tensorflow"
* @return The module for that name; throws an exception if it does not exist
*/
public static Module forName(String moduleName) {
Preconditions.checkState(MODULES.containsKey(moduleName), "No module with name \"%s\" is known", moduleName);
return MODULES.get(moduleName);
}
/**
* @param moduleShortName The full name of the module - for example "tensorflow" to get the "konduit-serving-tensorflow" module
* @return The module for that name; throws an exception if it does not exist
*/
public static Module forShortName(String moduleShortName) {
String name = "konduit-serving-" + moduleShortName;
return forName(name);
}
/**
* @param module Name of the module
* @param shortName If true: the name is a short name (as per {@link #forShortName(String)}
* @return True if a module with that name exists
*/
public static boolean moduleExistsForName(String module, boolean shortName) {
if (shortName) {
return MODULES.containsKey("konduit-serving-" + module);
} else {
return MODULES.containsKey(module);
}
}
private static Map<String, Module> loadModuleInfo() {
//Load module info
String s;
try {
File f = new ClassPathResource("META-INF/konduit-serving/ModuleRequiresDependencies").getFile();
s = FileUtils.readFileToString(f, StandardCharsets.UTF_8);
} catch (IOException e) {
throw new RuntimeException(e);
}
Map<String, Module> modulesInner = new LinkedHashMap<>();
Map<String, Module> modules = Collections.unmodifiableMap(modulesInner);
String[] lines = s.split("\n");
Map<String, String> inherit = new HashMap<>();
for (String line : lines) {
int idx = line.indexOf(',');
String module = line.substring(0, idx);
String deps = line.substring(idx + 1);
if (deps.startsWith(RequiresDependenciesProcessor.INHERIT_MODULE_PREFIX)) {
String inheritFrom = deps.substring(RequiresDependenciesProcessor.INHERIT_MODULE_PREFIX.length());
inherit.put(module, inheritFrom);
continue;
}
//First: need to work out if an "Any of" dependency, or just one instance of an ALL requirement
//Note that ALL requirements are on separate lines, whereas ANY are on one line
boolean isAny = deps.startsWith("{{") || deps.startsWith("{[");
ModuleRequirements r = null;
if (isAny) {
//Example format: {["org.nd4j:nd4j-native:1.0.0-SNAPSHOT","org.nd4j:nd4j-native:1.0.0-SNAPSHOT:{linux-x86_64,...}"],["org.nd4j:nd4j-cuda-10.0:1.0.0-SNAPSHOT","org.nd4j:nd4j-cuda-10.0:1.0.0-SNAPSHOT:{linux-x86_64,...}"]}
//This should be interpreted to mean: "We need ANY ONE of the [...] blocks, for which we need all of inner dependencies
//In this instance, we need nd4j-native AND its classifier - OR - we need nd4j-cuda-10.x AND its classifier
String before = deps;
deps = deps.substring(1, deps.length() - 1); //Strip first/last bracket
List<DependencyRequirement> toCombine = new ArrayList<>();
boolean thisAll = deps.startsWith("[");
String[] reqSplit = deps.split("[]}],[\\[{]"); //Split on: "],[" or "],{" or "},[" or "},{;
reqSplit[0] = reqSplit[0].substring(1); //Strip leading "["
reqSplit[reqSplit.length - 1] = reqSplit[reqSplit.length - 1].substring(0, reqSplit[reqSplit.length - 1].length() - 1); //Strip trainig "]"
for (String req : reqSplit) {
//req = req.substring(1); //Strip leading bracket; trailing bracket
if (req.endsWith("]") || req.endsWith("}"))
req = req.substring(0, req.length() - 1);
if (req.isEmpty())
continue; //Shouldn't happen except for malformed annotation (no @Dependency in block)
DependencyRequirement parse = parseDependenciesLine(req, !thisAll);
toCombine.add(parse);
}
DependencyRequirement req = new CompositeRequirement(CompositeRequirement.Type.ANY, toCombine);
r = new ModuleRequirements(Collections.singletonList(req));
} else {
//Example format: "org.nd4j:nd4j-native:1.0.0-SNAPSHOT:{linux-x86_64,linux-x86_64-avx2,linux-x86_64-avx512,linux-ppc64le,linux-arm64,linux-armhf,windows-x86_64,windows-x86_64-avx2,macosx-x86_64,macosx-x86_64-avx2}","org.nd4j:nd4j-cuda-10.0:1.0.0-SNAPSHOT:{linux-x86_64,linux-ppc64le,linux-arm64,windows-x86_64}","org.nd4j:nd4j-cuda-10.1:1.0.0-SNAPSHOT:{linux-x86_64,linux-ppc64le,linux-arm64,windows-x86_64}","org.nd4j:nd4j-cuda-10.2:1.0.0-SNAPSHOT:{linux-x86_64,linux-ppc64le,linux-arm64,windows-x86_64}"
//This should be interpreted as "any of the following"
deps = deps.substring(1, deps.length() - 1); //Strip first/last bracket
List<DependencyRequirement> reqs = new ArrayList<>();
List<Dependency> depsForReq = new ArrayList<>();
if (!deps.isEmpty()) { //Can be empty if there are no requirements for this module
DependencyRequirement req = parseDependenciesLine(deps, true);
reqs.add(req);
}
if (!reqs.isEmpty()) {
r = new ModuleRequirements(reqs);
}
}
if (modulesInner.containsKey(module)) {
Module mod = modulesInner.get(module);
List<DependencyRequirement> currReqs = mod.dependencyRequirements().reqs();
if (currReqs == null) {
mod = new Module(module, ksModule(module), r, null);
modulesInner.put(module, mod);
} else if (r != null) {
List<DependencyRequirement> newRews = mod.dependencyRequirements().reqs();
newRews.addAll(currReqs);
mod = new Module(module, ksModule(module), r, null);
modulesInner.put(module, mod);
}
} else {
Module mod = new Module(module, ksModule(module), r, null);
modulesInner.put(module, mod);
}
}
//Handle dependency inheritence
//Note that we need to ALSO take into account transitive: x -> y -> z
if (!inherit.isEmpty()) {
Set<Pair<String, String>> toProcess = new HashSet<>();
for (Map.Entry<String, String> e : inherit.entrySet()) {
toProcess.add(Pair.of(e.getKey(), e.getValue()));
}
while (!toProcess.isEmpty()) {
Iterator<Pair<String, String>> iter = toProcess.iterator();
boolean anyRemoved = false;
while (iter.hasNext()) {
Pair<String, String> next = iter.next();
if (modulesInner.containsKey(next.getSecond())) {
//Already processed the module we want to inherit from
String m = next.getFirst();
String from = next.getSecond();
Module fromM = modulesInner.get(from);
Module mod = modulesInner.get(m);
if (mod == null) {
mod = new Module(m, ksModule(m), fromM.dependencyRequirements(), fromM.dependencyOptional());
modulesInner.put(m, mod);
} else {
ModuleRequirements reqs = mod.dependencyRequirements();
List<DependencyRequirement> toAdd = fromM.dependencyRequirements().reqs();
List<DependencyRequirement> l = reqs.reqs();
if (toAdd != null) {
if (l == null) {
reqs.reqs(toAdd);
} else {
//Add
for (DependencyRequirement dr : toAdd) {
if (!l.contains(dr)) {
l.add(dr);
}
}
}
}
}
iter.remove();
anyRemoved = true;
}
}
if (!anyRemoved) {
throw new IllegalStateException("Unable to resolve inherited dependencies: unknown modules or cyclical" +
"inheritance situation?\n" + toProcess);
}
}
}
return modules;
}
protected static DependencyRequirement parseDependenciesLine(String line, boolean any) {
String[] depsSplit = line.split("\",\"");
depsSplit[0] = depsSplit[0].substring(1); //Remove leading quote
depsSplit[depsSplit.length - 1] = depsSplit[depsSplit.length - 1].substring(0, depsSplit[depsSplit.length - 1].length() - 1); //Remove training quote
List<DepSet> set = new ArrayList<>();
for (String d : depsSplit) {
String[] split = d.split(":");
if (split.length == 4) {
String classifiers = split[3];
if (classifiers.startsWith("{") || classifiers.startsWith("[")) {
boolean allClassifier = classifiers.startsWith("["); //{any} vs. [all]
classifiers = classifiers.substring(1, classifiers.length() - 1); //Strip brackets
String[] cs = classifiers.split(",");
List<Dependency> dList = new ArrayList<>();
List<Dependency> classifierSet = new ArrayList<>();
for (String c : cs) {
classifierSet.add(new Dependency(split[0], split[1], split[2], c));
}
if (allClassifier) {
//All classifiers are needed
throw new UnsupportedOperationException("Not yet implemented");
} else {
//Only one of the classifiers are needed (usual case)
set.add(new DepSet(classifierSet));
}
} else {
//Single classifier
set.add(new DepSet(Collections.singletonList(new Dependency(split[0], split[1], split[2]))));
}
} else {
//GAV only
set.add(new DepSet(Collections.singletonList(new Dependency(split[0], split[1], split[2]))));
}
}
boolean allSingle = true;
for (DepSet s : set) {
allSingle = s.list.size() == 1;
if (!allSingle)
break;
}
if (allSingle) {
//Combine into a single AllRequirement
List<Dependency> finalDeps = new ArrayList<>();
for (DepSet s : set) {
finalDeps.addAll(s.list);
}
return new AllRequirement("", finalDeps);
} else {
//Combine into a composite requirement
List<DependencyRequirement> reqs = new ArrayList<>();
for (DepSet s : set) {
if (s.list.size() == 1) {
reqs.add(new AllRequirement("", s.list));
} else {
//Multiple classifiers
reqs.add(new AnyRequirement("", s.list));
}
}
return new CompositeRequirement(any ? CompositeRequirement.Type.ANY : CompositeRequirement.Type.ALL, reqs);
}
}
@AllArgsConstructor
@Data
private static class DepSet {
private List<Dependency> list;
}
protected static Dependency ksModule(String name) {
//TODO don't hardcode versions
return new Dependency("ai.konduit.serving", name, CURRENT_KS_VERSION, null);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/config/OS.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.config;
public enum OS {LINUX, WINDOWS, MACOSX, ANDROID;
public static OS forName(String s){
if("MAC".equalsIgnoreCase(s) || "OSX".equalsIgnoreCase(s)){
return MACOSX;
}
return valueOf(s.toUpperCase());
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/config/Serving.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.config;
public enum Serving {
HTTP, GRPC, MQTT
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/config/SimpleDeploymentValidation.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.config;
import lombok.AllArgsConstructor;
import java.util.Arrays;
import java.util.List;
@AllArgsConstructor
public class SimpleDeploymentValidation implements DeploymentValidation {
private List<String> failures;
public SimpleDeploymentValidation(String... failures){
this.failures = (failures == null || failures.length == 0) ? null : Arrays.asList(failures);
}
@Override
public boolean ok() {
return failures == null || failures.isEmpty();
}
@Override
public List<String> failureMessages() {
return failures;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/config/Target.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.config;
import ai.konduit.serving.build.config.devices.CUDADevice;
import lombok.Data;
import lombok.experimental.Accessors;
import org.nd4j.shade.jackson.annotation.JsonProperty;
/**
* The Target class represents a deployment device/target - as defined in terms of an operatin system, CPU architecture,
* and device (such as CPU vs. CUDA, etc). If no device is specified, it is assumed that CPU execution will be used.
*/
@Data
@Accessors(fluent = true)
public class Target {
public static final Target LINUX_X86 = new Target(OS.LINUX, Arch.x86, null);
public static final Target LINUX_X86_AVX2 = new Target(OS.LINUX, Arch.x86_avx2, null);
public static final Target LINUX_X86_AVX512 = new Target(OS.LINUX, Arch.x86_avx512, null);
public static final Target WINDOWS_X86 = new Target(OS.WINDOWS, Arch.x86, null);
public static final Target WINDOWS_X86_AVX2 = new Target(OS.WINDOWS, Arch.x86_avx2, null);
public static final Target MACOSX_X86 = new Target(OS.MACOSX, Arch.x86, null);
public static final Target MACOSX_X86_AVX2 = new Target(OS.MACOSX, Arch.x86_avx2, null);
public static final Target LINUX_CUDA_10_0 = new Target(OS.LINUX, Arch.x86, new CUDADevice("10.0"));
public static final Target LINUX_CUDA_10_1 = new Target(OS.LINUX, Arch.x86, new CUDADevice("10.1"));
public static final Target LINUX_CUDA_10_2 = new Target(OS.LINUX, Arch.x86, new CUDADevice("10.2"));
public static final Target WINDOWS_CUDA_10_0 = new Target(OS.WINDOWS, Arch.x86, new CUDADevice("10.0"));
public static final Target WINDOWS_CUDA_10_1 = new Target(OS.WINDOWS, Arch.x86, new CUDADevice("10.1"));
public static final Target WINDOWS_CUDA_10_2 = new Target(OS.WINDOWS, Arch.x86, new CUDADevice("10.2"));
/** Linux, Windows and Mac x86, x86 avx2 and avx512 */
public static final Target[] LWM_X86 = new Target[]{LINUX_X86, LINUX_X86_AVX2, LINUX_X86_AVX512, WINDOWS_X86, WINDOWS_X86_AVX2,
MACOSX_X86, MACOSX_X86_AVX2};
private final OS os;
private final Arch arch;
private final ComputeDevice device; //If null: CPU
public Target(@JsonProperty("os") OS os, @JsonProperty("arch") Arch arch, @JsonProperty("device") ComputeDevice device){
this.os = os;
this.arch = arch;
this.device = device;
}
@Override
public String toString(){
return "Target(" + os + "," + arch + (device == null ? "" : "," + device.toString()) + ")";
}
public String toJavacppPlatform(){
//https://github.com/bytedeco/javacpp/tree/master/src/main/resources/org/bytedeco/javacpp/properties
return os.name() + "-" + arch.name();
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/config
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/config/devices/CUDADevice.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.config.devices;
import ai.konduit.serving.build.config.ComputeDevice;
import lombok.AllArgsConstructor;
import lombok.Data;
@AllArgsConstructor
@Data
public class CUDADevice implements ComputeDevice {
private String cudaVersion;
public static CUDADevice forName(String s){
String str = s.toLowerCase();
if(str.contains("10.0")){
return new CUDADevice("10.0");
} else if(str.contains("10.1")){
return new CUDADevice("10.1");
} else if(str.contains("10.2")) {
return new CUDADevice("10.2");
} else if(str.contains("11.0")){
return new CUDADevice("11.0");
} else {
throw new UnsupportedOperationException("Invalid, unknown, not supported or not yet implemneted CUDA version: " + s);
}
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/dependencies/AllAddition.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.dependencies;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.experimental.Accessors;
import java.util.List;
@AllArgsConstructor
@Data
@Accessors(fluent = true)
public class AllAddition implements DependencyAddition {
private List<Dependency> add;
private DependencyRequirement forReq;
@Override
public Type type() {
return Type.ALL_OF;
}
@Override
public List<Dependency> toAdd() {
return add;
}
@Override
public DependencyRequirement forRequirement() {
return forReq;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/dependencies/AllRequirement.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.dependencies;
import ai.konduit.serving.build.config.Target;
import ai.konduit.serving.build.dependencies.nativedep.NativeDependency;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.experimental.Accessors;
import java.util.*;
@AllArgsConstructor
@Data
@Accessors(fluent = true)
public class AllRequirement implements DependencyRequirement {
private final String name;
private Set<Dependency> set;
public AllRequirement(String name, List<Dependency> dependencies) {
this(name, new HashSet<>(dependencies));
}
public AllRequirement(String name, Dependency... dependencies){
this.name = name;
this.set = new HashSet<>(Arrays.asList(dependencies));
}
@Override
public String name() {
return name;
}
@Override
public boolean satisfiedBy(Target target, Collection<Dependency> currDeps) {
//We need ALL of the requirements to be satisfied (considering native code + target)
for (Dependency need : set) {
boolean matchFound = false;
for (Dependency d : currDeps) {
if (need.equals(d)) {
//GAV(C) match, but maybe it's a native dependency, and platform doesn't match
if (need.isNativeDependency()) {
NativeDependency nd = need.getNativeDependency();
if (nd.supports(target)) {
matchFound = true;
break;
}
} else {
//Pure Java dependency
matchFound = true;
break;
}
}
}
if(!matchFound)
return false;
}
return true;
}
@Override
public List<DependencyAddition> suggestDependencies(Target target, Collection<Dependency> currDeps) {
if(satisfiedBy(target, currDeps))
return null;
//We need ALL of the requirements to be satisfied (considering native code + target)
Set<Dependency> notFound = new HashSet<>();
for (Dependency need : set) {
boolean matchFound = false;
for (Dependency d : currDeps) {
if (need.equals(d)) {
//GAV(C) match, but maybe it's a native dependency, and platform doesn't match
if (need.isNativeDependency()) {
NativeDependency nd = need.getNativeDependency();
if (nd.supports(target)) {
matchFound = true;
break;
}
} else {
//Pure Java dependency
matchFound = true;
break;
}
}
}
if(!matchFound){
if(need.isNativeDependency()){
//Don't suggest a native dependency that can't be run on this target, even if it's a requirement for
// other targets that it _does_ run on
NativeDependency nd = need.getNativeDependency();
if(nd.supports(target)){
notFound.add(need);
}
} else {
notFound.add(need);
}
}
}
if(notFound.isEmpty())
return null;
return Collections.singletonList(new AllAddition(new ArrayList<>(notFound), this));
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/dependencies/AnyAddition.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.dependencies;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.experimental.Accessors;
import java.util.List;
@AllArgsConstructor
@Data
@Accessors(fluent = true)
public class AnyAddition implements DependencyAddition {
private List<Dependency> add;
private DependencyRequirement forReq;
@Override
public Type type() {
return Type.ONE_OF;
}
@Override
public List<Dependency> toAdd() {
return add;
}
@Override
public DependencyRequirement forRequirement() {
return forReq;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/dependencies/AnyRequirement.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.dependencies;
import ai.konduit.serving.build.config.Target;
import ai.konduit.serving.build.dependencies.nativedep.NativeDependency;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.experimental.Accessors;
import java.util.*;
@AllArgsConstructor
@Data
@Accessors(fluent = true)
public class AnyRequirement implements DependencyRequirement {
private final String name;
private final Set<Dependency> set;
public AnyRequirement(String name, List<Dependency> dependencies) {
this(name, new HashSet<>(dependencies));
}
public AnyRequirement(String name, Dependency... dependencies) {
this.name = name;
set = new HashSet<>(Arrays.asList(dependencies));
}
@Override
public String name() {
return name;
}
@Override
public boolean satisfiedBy(Target target, Collection<Dependency> currDeps) {
//Only need one of the requirements to be satisfied (considering native code + target)
for (Dependency need : set) {
for (Dependency d : currDeps) {
if (need.equals(d)) {
//GAV(C) match, but maybe it's a native dependency, and platform doesn't match
if (need.isNativeDependency()) {
NativeDependency nd = need.getNativeDependency();
if (nd.supports(target)) {
return true;
}
} else {
//Pure Java dependency
return true;
}
}
}
}
return false;
}
@Override
public List<DependencyAddition> suggestDependencies(Target target, Collection<Dependency> currentDeps) {
if(satisfiedBy(target, currentDeps))
return null;
//If not already satisfied, it means that none of the dependencies are available
//But we still have to filter by what can run on this target
List<Dependency> out = new ArrayList<>();
for(Dependency d : set){
if(d.isNativeDependency()){
NativeDependency nd = d.getNativeDependency();
if(nd.supports(target)){
out.add(d);
}
} else {
out.add(d);
}
}
if(out.isEmpty())
return null;
return Collections.singletonList(new AnyAddition(out, this));
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/dependencies/CompositeRequirement.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.dependencies;
import ai.konduit.serving.build.config.Target;
import lombok.Data;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
@Data
public class CompositeRequirement implements DependencyRequirement {
public enum Type {ANY, ALL}
private final Type type;
private DependencyRequirement[] reqs;
public CompositeRequirement(Type type, List<DependencyRequirement> reqs){
this(type, reqs.toArray(new DependencyRequirement[0]));
}
public CompositeRequirement(Type type, DependencyRequirement... reqs){
this.type = type;
this.reqs = reqs;
}
@Override
public String name() {
return ""; //TODO
}
@Override
public boolean satisfiedBy(Target target, Collection<Dependency> currentDeps) {
boolean anySatisfied = false;
boolean allSatisfied = true;
for(DependencyRequirement r : reqs){
boolean thisSat = r.satisfiedBy(target, currentDeps);
anySatisfied |= thisSat;
allSatisfied &= thisSat;
}
if(type == Type.ANY){
return anySatisfied;
} else {
return allSatisfied;
}
}
@Override
public List<DependencyAddition> suggestDependencies(Target target, Collection<Dependency> currentDeps) {
//TODO this should be reconsidered - what if multiple sub-requirements make the same recommendation?
List<DependencyAddition> l = new ArrayList<>();
for(DependencyRequirement r : reqs){
List<DependencyAddition> add = r.suggestDependencies(target, currentDeps);
if(add != null) {
l.addAll(add);
}
}
return l;
}
@Override
public String toString(){
StringBuilder sb = new StringBuilder();
sb.append(type.toString()).append("(");
List<String> l = new ArrayList<>();
for(DependencyRequirement d : reqs)
l.add(d.toString());
sb.append(String.join(",", l));
sb.append(")");
return sb.toString();
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/dependencies/Dependency.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.dependencies;
import ai.konduit.serving.build.dependencies.nativedep.NativeDependency;
import ai.konduit.serving.build.dependencies.nativedep.NativeDependencyRegistry;
import lombok.Data;
import lombok.experimental.Accessors;
import org.nd4j.common.base.Preconditions;
import org.nd4j.shade.jackson.annotation.JsonProperty;
@Data
@Accessors(fluent = true)
public class Dependency {
private final String groupId;
private final String artifactId;
private final String version;
private final String classifier;
public Dependency(String groupId, String artifactId, String version){
this(groupId, artifactId, version, null);
}
public Dependency(@JsonProperty("groupId") String groupId, @JsonProperty("artifactId") String artifactId,
@JsonProperty("version") String version, @JsonProperty("classifier") String classifier){
this.groupId = groupId;
this.artifactId = artifactId;
this.version = version;
this.classifier = classifier;
}
public boolean isNativeDependency(){
return NativeDependencyRegistry.isNativeDependency(this);
}
public NativeDependency getNativeDependency(){
Preconditions.checkState(isNativeDependency(), "Can only get NativeDependency information if the depnedency has native code");
return NativeDependencyRegistry.getNativeDependency(this);
}
public String gavString(){
return groupId + ":" + artifactId + ":" + version + (classifier == null ? "" : ":" + classifier);
}
@Override
public String toString(){
return "Dependency(" + gavString() + ")";
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/dependencies/DependencyAddition.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.dependencies;
import java.util.List;
public interface DependencyAddition {
enum Type {ALL_OF, ONE_OF}
Type type();
List<Dependency> toAdd();
DependencyRequirement forRequirement();
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/dependencies/DependencyRequirement.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.dependencies;
import ai.konduit.serving.build.config.Target;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
public interface DependencyRequirement {
String name();
//TODO proper descriptions
default String description(){
return name();
}
boolean satisfiedBy(Target target, Collection<Dependency> currentDeps);
List<DependencyAddition> suggestDependencies(Target target, Collection<Dependency> currentDeps);
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/dependencies/ModuleRequirements.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.dependencies;
import ai.konduit.serving.build.config.Target;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.experimental.Accessors;
import java.util.*;
@Data
@AllArgsConstructor
@Accessors(fluent = true)
public class ModuleRequirements {
private List<DependencyRequirement> reqs;
public boolean satisfiedBy(Target target, Collection<Dependency> currentDeps){
for(DependencyRequirement req : reqs){
if(!req.satisfiedBy(target, currentDeps))
return false;
}
return true;
}
public List<DependencyAddition> suggestDependencies(Target target, Collection<Dependency> currentDeps){
if(satisfiedBy(target, currentDeps))
return null;
List<DependencyAddition> l = new ArrayList<>();
for(DependencyRequirement r : reqs ){
if(r.satisfiedBy(target, currentDeps))
continue;
//This requirement is not satisfied...
l.addAll(r.suggestDependencies(target, currentDeps));
}
//TODO we should filter for duplicates
return l;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/dependencies
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/dependencies/nativedep/NativeDependency.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.dependencies.nativedep;
import ai.konduit.serving.build.config.Target;
import ai.konduit.serving.build.dependencies.Dependency;
import lombok.AllArgsConstructor;
import lombok.Data;
import java.util.Set;
@AllArgsConstructor
@Data
public class NativeDependency {
private final Dependency dependency;
private final Set<Target> supportedTargets;
public boolean supports(Target target){
return supportedTargets.contains(target);
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/dependencies
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/dependencies/nativedep/NativeDependencyRegistry.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.dependencies.nativedep;
import ai.konduit.serving.build.config.ComputeDevice;
import ai.konduit.serving.build.config.Arch;
import ai.konduit.serving.build.config.OS;
import ai.konduit.serving.build.config.Target;
import ai.konduit.serving.build.config.devices.CUDADevice;
import ai.konduit.serving.build.dependencies.Dependency;
import org.nd4j.common.base.Preconditions;
import java.util.*;
/**
* This is a PLACEHOLDER implementation... this native metadata will probably be redesigned (different collection
* method, different storage method, etc)
*/
public class NativeDependencyRegistry {
public static final String LINUX = "linux";
public static final String WINDOWS = "windows";
public static final String MACOSX = "macosx";
public static final String X86_64 = "x86_64";
public static final String X86_64_AVX2 = "x86_64-avx2";
public static final String X86_64_AVX512 = "x86_64-avx512";
public static final String ARM64 = "arm64";
public static final String ARMHF = "armhf";
public static final String PPC64LE = "ppc64le";
public static final String LINUX_X86_64 = LINUX + "-" + X86_64;
public static final String LINUX_X86_AVX2 = LINUX + "-" + X86_64_AVX2;
public static final String LINUX_X86_AVX512 = LINUX + "-" + X86_64_AVX512;
public static final String WINDOWS_X86_64 = WINDOWS + "-" + X86_64;
public static final String WINDOWS_X86_AVX2 = WINDOWS + "-" + X86_64_AVX2;
public static final String MACOSX_X86_64 = MACOSX + "-" + X86_64;
public static final String MACOSX_X86_AVX2 = MACOSX + "-" + X86_64_AVX2;
private static final Map<Dependency, NativeDependency> map = new HashMap<>();
private static void put(Dependency d, Target... targets){
map.put(d, new NativeDependency(d, new HashSet<>(Arrays.asList(targets))));
}
static {
//These are dependencies that can only run on a specific target
//TODO - TF, ONNX, etc
//ND4J native
put(new Dependency("org.nd4j", "nd4j-native", "1.0.0-SNAPSHOT", null), Target.LWM_X86);
//CUDA
put(new Dependency("org.nd4j", "nd4j-cuda-10.0", "1.0.0-SNAPSHOT", null), Target.LINUX_CUDA_10_0, Target.WINDOWS_CUDA_10_0);
put(new Dependency("org.nd4j", "nd4j-cuda-10.1", "1.0.0-SNAPSHOT", null), Target.LINUX_CUDA_10_1, Target.WINDOWS_CUDA_10_1);
put(new Dependency("org.nd4j", "nd4j-cuda-10.2", "1.0.0-SNAPSHOT", null), Target.LINUX_CUDA_10_2, Target.WINDOWS_CUDA_10_2);
//CUDA classifiers
put(new Dependency("org.nd4j", "nd4j-cuda-10.0", "1.0.0-SNAPSHOT", LINUX_X86_64), Target.LINUX_CUDA_10_0);
put(new Dependency("org.nd4j", "nd4j-cuda-10.1", "1.0.0-SNAPSHOT", LINUX_X86_64), Target.LINUX_CUDA_10_1);
put(new Dependency("org.nd4j", "nd4j-cuda-10.2", "1.0.0-SNAPSHOT", LINUX_X86_64), Target.LINUX_CUDA_10_2);
put(new Dependency("org.nd4j", "nd4j-cuda-10.0", "1.0.0-SNAPSHOT", WINDOWS_X86_64), Target.WINDOWS_CUDA_10_0);
put(new Dependency("org.nd4j", "nd4j-cuda-10.1", "1.0.0-SNAPSHOT", WINDOWS_X86_64), Target.WINDOWS_CUDA_10_1);
put(new Dependency("org.nd4j", "nd4j-cuda-10.2", "1.0.0-SNAPSHOT", WINDOWS_X86_64), Target.WINDOWS_CUDA_10_2);
}
public static boolean isNativeDependency(Dependency d){
if(d.classifier() != null){
String c = d.classifier();
if(c.startsWith(LINUX) || c.startsWith(WINDOWS) || c.startsWith(MACOSX)){
//JavaCPP and ND4J etc dependencies
return true;
}
}
return map.containsKey(d);
}
public static NativeDependency getNativeDependency(Dependency d){
Preconditions.checkState(isNativeDependency(d), "Not a native dependency");
if(d.classifier() != null){
String c = d.classifier();
if(c.startsWith(LINUX + "-") || c.startsWith(WINDOWS + "-") || c.startsWith(MACOSX + "-")){
//JavaCPP and ND4J etc dependencies
int idx = c.indexOf("-");
String osStr = c.substring(0,idx);
String archStr = c.substring(idx+1);
OS os = OS.forName(osStr);
Arch arch = Arch.forName(archStr);
ComputeDevice device = deviceFor(d);
Preconditions.checkState(arch != null, "Could not infer target architecture for %s", d);
Arch[] compatibleWith = arch.compatibleWith();
Set<Target> supported = new HashSet<>();
for(Arch a : compatibleWith){
supported.add(new Target(os, a, device));
}
return new NativeDependency(d, supported);
}
}
return map.get(d);
}
public static ComputeDevice deviceFor(Dependency d){
//TODO this won't work for things like CUDA! And isn't robust to new versions... Need a more robust approach to this...
String a = d.artifactId().toLowerCase();
if(a.contains("cuda-10.0") || (a.contains("cuda") && d.version().contains("10.0"))){ //Second condition - for example: org.bytedeco:cuda:10.2-7.6-1.5.3:linux-x86_64
return new CUDADevice("10.0");
} else if(a.contains("cuda-10.1") || (a.contains("cuda") && d.version().contains("10.1"))){
return new CUDADevice("10.1");
} else if(a.contains("cuda-10.2") || (a.contains("cuda") && d.version().contains("10.2"))){
return new CUDADevice("10.2");
}
return null;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/deployments/ClassPathDeployment.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.deployments;
import ai.konduit.serving.build.build.GradlePlugin;
import ai.konduit.serving.build.config.Deployment;
import ai.konduit.serving.build.config.DeploymentValidation;
import ai.konduit.serving.build.config.SimpleDeploymentValidation;
import lombok.Data;
import lombok.experimental.Accessors;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.*;
@Slf4j
@Data
@Accessors(fluent = true)
public class ClassPathDeployment implements Deployment {
public enum Type {
TEXT_FILE,
JAR_MANIFEST
}
public static final String OUTPUT_FILE_PROP = "classpath.outputFile";
public static final String TYPE_PROP = "classpath.type";
public static final String CLI_KEYS = "ClassPathDeployment config keys: " + OUTPUT_FILE_PROP + ", " + TYPE_PROP;
private String outputFile;
private Type type;
@Override
public List<String> propertyNames() {
return Arrays.asList(OUTPUT_FILE_PROP, TYPE_PROP);
}
@Override
public Map<String, String> asProperties() {
Map<String,String> map = new HashMap<>();
map.put(OUTPUT_FILE_PROP, outputFile);
map.put(TYPE_PROP, type == null ? null : type.toString());
return map;
}
@Override
public void fromProperties(Map<String, String> props) {
outputFile = props.getOrDefault(OUTPUT_FILE_PROP, outputFile);
if(props.containsKey(TYPE_PROP)){
type = Type.valueOf(props.get(TYPE_PROP).toUpperCase());
}
}
@Override
public DeploymentValidation validate() {
if (outputFile != null && !outputFile.isEmpty() && type != null) {
return new SimpleDeploymentValidation();
}
List<String> errs = new ArrayList<>();
if(outputFile == null || outputFile.isEmpty()){
errs.add("Output classpath file (" + OUTPUT_FILE_PROP + " property) is not set");
}
if(type == null){
errs.add("Output classpath file type - " + Type.TEXT_FILE + " or " + Type.JAR_MANIFEST + " (" + TYPE_PROP + " property) is not set");
} else if(type == Type.JAR_MANIFEST && outputFile != null && !outputFile.endsWith(".jar")){
errs.add("Output classpath file (JAR_MANIFEST type) output file name (" + TYPE_PROP + " property) must end with .jar, got \"" + outputFile + "\"");
}
return new SimpleDeploymentValidation(errs);
}
@Override
public String outputString() {
File f = new File(outputFile);
StringBuilder sb = new StringBuilder();
sb.append("Classpath file location: ").append(f.getAbsolutePath()).append("\n");
String nLines;
if (f.exists()) {
try {
nLines = String.valueOf(FileUtils.readLines(f, StandardCharsets.UTF_8).size());
} catch (IOException e) {
nLines = "<Error reading generated classpath file>";
log.warn("Error reading generated classpath file", e);
}
} else {
nLines = "<output file not found>";
}
sb.append("Number of classpath entries: ").append(nLines).append("\n");
return sb.toString();
}
@Override
public List<String> gradleImports() {
return Collections.emptyList();
}
@Override
public List<GradlePlugin> gradlePlugins() {
return Collections.emptyList();
}
@Override
public List<String> gradleTaskNames() {
return Collections.singletonList("build");
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/deployments/DebDeployment.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.deployments;
import ai.konduit.serving.build.build.GradlePlugin;
import ai.konduit.serving.build.config.Deployment;
import ai.konduit.serving.build.config.DeploymentValidation;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import org.nd4j.shade.jackson.annotation.JsonProperty;
import java.io.File;
import java.util.*;
@Data
@Accessors(fluent = true)
@NoArgsConstructor
public class DebDeployment implements Deployment {
public static final String DEFAULT_EXE_NAME = "konduit-serving-deployment.deb";
public static final String PROP_OUTPUTDIR = "deb.outputdir";
public static final String PROP_RPMNAME = "deb.name";
private String outputDir;
private String rpmName;
private String version;
private String archName;
public DebDeployment(String outputDir) {
this(outputDir, "ks", Deployment.defaultVersion());
}
public DebDeployment(@JsonProperty("outputDir") String outputDir, @JsonProperty("rpmName") String rpmName,
@JsonProperty("version") String version){
this.outputDir = outputDir;
this.rpmName = rpmName;
this.version = version;
}
@Override
public List<String> propertyNames() {
return Arrays.asList(PROP_OUTPUTDIR, PROP_RPMNAME);
}
@Override
public Map<String, String> asProperties() {
Map<String,String> m = new LinkedHashMap<>();
m.put(PROP_OUTPUTDIR, outputDir);
m.put(PROP_RPMNAME, rpmName);
return m;
}
@Override
public void fromProperties(Map<String, String> props) {
outputDir = props.getOrDefault(PROP_OUTPUTDIR, outputDir);
rpmName = props.getOrDefault(PROP_RPMNAME, rpmName);
}
@Override
public DeploymentValidation validate() {
return null;
}
@Override
public String outputString() {
File outFile = new File(outputDir, rpmName);
StringBuilder sb = new StringBuilder();
sb.append("DEB location: ").append(outFile.getAbsolutePath()).append("\n");
String size;
if(outFile.exists()){
long bytes = outFile.length();
double bytesPerMB = 1024 * 1024;
double mb = bytes / bytesPerMB;
size = String.format("%.2f", mb) + " MB";
} else {
size = "<DEB not found>";
}
sb.append("DEB size: ").append(size);
return sb.toString();
}
@Override
public List<String> gradleImports() {
List<String> retVal = new ArrayList<>();
retVal.add("org.redline_rpm.header.Os");
retVal.add("org.redline_rpm.header.Architecture");
retVal.add("com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar");
return retVal;
}
@Override
public List<GradlePlugin> gradlePlugins() {
List<GradlePlugin> retVal = new ArrayList<>();
retVal.add(new GradlePlugin("nebula.ospackage", "8.3.0"));
retVal.add(new GradlePlugin("com.github.johnrengelman.shadow", "2.0.4"));
return retVal;
}
@Override
public List<String> gradleTaskNames() {
List<String> ret = new ArrayList<>();
ret.add("shadowJar");
ret.add("buildDeb");
ret.add("copyDeb");
return ret;
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/deployments/DockerDeployment.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.deployments;
import ai.konduit.serving.build.build.GradlePlugin;
import ai.konduit.serving.build.config.Deployment;
import ai.konduit.serving.build.config.DeploymentValidation;
import ai.konduit.serving.build.config.SimpleDeploymentValidation;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import org.nd4j.shade.jackson.annotation.JsonProperty;
import java.util.*;
@Data
@Accessors(fluent = true)
public class DockerDeployment implements Deployment {
public static final String DEFAULT_BASE_IMAGE = "openjdk:8-jre";
public static final String DEFAULT_IMAGE_NAME = "ks";
public static final String PROP_BASE_IMG = "docker.baseimage";
public static final String PROP_NAME = "docker.name";
private String baseImage;
private String inputDir;
private String imageName;
private String version;
private String imageId; //Should be in form: "somerepo:version"
public DockerDeployment() {
this(DEFAULT_BASE_IMAGE, "ks", Deployment.defaultVersion());
}
public DockerDeployment(@JsonProperty("baseImage") String baseImage, @JsonProperty("rpmName") String imageName,
@JsonProperty("version") String version){
this.baseImage = baseImage;
this.imageName = imageName;
this.version = version;
}
@Override
public List<String> propertyNames() {
return Arrays.asList(PROP_BASE_IMG, PROP_NAME);
}
@Override
public Map<String, String> asProperties() {
Map<String,String> m = new LinkedHashMap<>();
m.put(PROP_BASE_IMG, baseImage);
m.put(PROP_NAME, imageName);
return m;
}
@Override
public void fromProperties(Map<String, String> props) {
baseImage = props.getOrDefault(PROP_BASE_IMG, baseImage);
imageName = props.getOrDefault(PROP_NAME, imageName);
}
@Override
public DeploymentValidation validate() {
if(baseImage == null || baseImage.isEmpty()){
return new SimpleDeploymentValidation("No base image name is set (property: " + PROP_BASE_IMG + ")");
}
return new SimpleDeploymentValidation();
}
@Override
public String outputString() {
StringBuilder sb = new StringBuilder();
sb.append("JAR location: ");
sb.append("Docker image name: ").append(imageName).append("\n");
sb.append("Docker base image: ").append(baseImage).append("\n");
sb.append("Docker image id: ").append(imageId).append("\n");
return sb.toString();
}
@Override
public List<String> gradleImports() {
return Collections.singletonList("com.bmuschko.gradle.docker.tasks.image.*");
}
@Override
public List<GradlePlugin> gradlePlugins() {
return Collections.singletonList(new GradlePlugin("com.bmuschko.docker-remote-api", "6.4.0"));
}
@Override
public List<String> gradleTaskNames() {
return Collections.singletonList("buildImage");
}
}
|
0
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build
|
java-sources/ai/konduit/serving/konduit-serving-build/0.3.0/ai/konduit/serving/build/deployments/ExeDeployment.java
|
/*
* ******************************************************************************
* * Copyright (c) 2022 Konduit K.K.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.deployments;
import ai.konduit.serving.build.build.GradlePlugin;
import ai.konduit.serving.build.config.Deployment;
import ai.konduit.serving.build.config.DeploymentValidation;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import org.nd4j.shade.jackson.annotation.JsonProperty;
import java.io.File;
import java.util.*;
@Data
@Accessors(fluent = true)
@NoArgsConstructor
public class ExeDeployment implements Deployment {
public static final String DEFAULT_EXE_NAME = "konduit-serving-deployment.exe";
public static final String PROP_OUTPUTDIR = "exe.outputdir";
public static final String PROP_EXENAME = "exe.name";
private String outputDir;
private String exeName;
private String version;
public ExeDeployment(String outputDir) {
this(outputDir, "ks", Deployment.defaultVersion());
}
public ExeDeployment(@JsonProperty("outputDir") String outputDir, @JsonProperty("exeName") String exeName,
@JsonProperty("version") String version){
this.outputDir = outputDir;
this.exeName = exeName;
this.version = version;
}
@Override
public List<String> propertyNames() {
return Arrays.asList(PROP_OUTPUTDIR, PROP_EXENAME);
}
@Override
public Map<String, String> asProperties() {
Map<String,String> m = new LinkedHashMap<>();
m.put(PROP_OUTPUTDIR, outputDir);
m.put(PROP_EXENAME, exeName);
return m;
}
@Override
public void fromProperties(Map<String, String> props) {
outputDir = props.getOrDefault(PROP_OUTPUTDIR, outputDir);
exeName = props.getOrDefault(PROP_EXENAME, exeName);
}
@Override
public DeploymentValidation validate() {
return null;
}
@Override
public String outputString() {
File outFile = new File(outputDir, exeName);
StringBuilder sb = new StringBuilder();
sb.append("EXE location: ").append(outFile.getAbsolutePath()).append("\n");
String size;
if(outFile.exists()){
long bytes = outFile.length();
double bytesPerMB = 1024 * 1024;
double mb = bytes / bytesPerMB;
size = String.format("%.2f", mb) + " MB";
} else {
size = "<EXE not found>";
}
sb.append("EXE size: ").append(size);
return sb.toString();
}
@Override
public List<String> gradleImports() {
List<String> retVal = new ArrayList<>();
retVal.add("edu.sc.seis.launch4j.tasks.DefaultLaunch4jTask");
retVal.add("com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar");
return retVal;
}
@Override
public List<GradlePlugin> gradlePlugins() {
List<GradlePlugin> retVal = new ArrayList<>();
retVal.add(new GradlePlugin("nebula.ospackage", "8.3.0"));
retVal.add(new GradlePlugin("com.github.johnrengelman.shadow", "2.0.4"));
retVal.add(new GradlePlugin("edu.sc.seis.launch4j", "2.4.6"));
return retVal;
}
@Override
public List<String> gradleTaskNames() {
List<String> ret = new ArrayList<>();
ret.add("shadowJar");
ret.add("createExe");
ret.add("copyExe");
return ret;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.