index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/djl/pytorch/pytorch-engine/0.34.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine/0.34.0/ai/djl/pytorch/engine/PtNDManager.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.engine;
import ai.djl.Device;
import ai.djl.engine.Engine;
import ai.djl.ndarray.BaseNDManager;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.ndarray.types.SparseFormat;
import ai.djl.pytorch.jni.JniUtils;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.charset.Charset;
/** {@code PtNDManager} is the PyTorch implementation of {@link NDManager}. */
public class PtNDManager extends BaseNDManager {
private static final PtNDManager SYSTEM_MANAGER = new SystemManager();
private PtNDManager(NDManager parent, Device device) {
super(parent, device);
}
static PtNDManager getSystemManager() {
return SYSTEM_MANAGER;
}
/** {@inheritDoc} */
@Override
public ByteBuffer allocateDirect(int capacity) {
return ByteBuffer.allocateDirect(capacity).order(ByteOrder.nativeOrder());
}
/** {@inheritDoc} */
@Override
public PtNDArray from(NDArray array) {
if (array == null || array instanceof PtNDArray) {
return (PtNDArray) array;
}
PtNDArray result = create(array.toByteBuffer(), array.getShape(), array.getDataType());
result.setName(array.getName());
return result;
}
/** {@inheritDoc} */
@Override
public PtNDArray create(Shape shape, DataType dataType) {
return JniUtils.createEmptyNdArray(this, shape, dataType, device, SparseFormat.DENSE);
}
/** {@inheritDoc} */
@Override
public PtNDArray create(Buffer data, Shape shape, DataType dataType) {
int size = Math.toIntExact(shape.size());
BaseNDManager.validateBuffer(data, dataType, size);
if (data.isDirect() && data instanceof ByteBuffer) {
return JniUtils.createNdFromByteBuffer(
this, (ByteBuffer) data, shape, dataType, SparseFormat.DENSE, device);
}
ByteBuffer buf = allocateDirect(size * dataType.getNumOfBytes());
copyBuffer(data, buf);
return JniUtils.createNdFromByteBuffer(
this, buf, shape, dataType, SparseFormat.DENSE, device);
}
/** {@inheritDoc} */
@Override
public NDArray create(String[] data, Charset charset, Shape shape) {
return new PtNDArray(this, data, shape);
}
/** {@inheritDoc} */
@Override
public NDArray createCoo(Buffer data, long[][] indices, Shape shape) {
// length should be the same as indices dim 1
try (NDArray valueNd = create(data, new Shape(indices[0].length))) {
try (NDArray indicesNd = create(indices)) {
return JniUtils.createSparseCoo((PtNDArray) indicesNd, (PtNDArray) valueNd, shape);
}
}
}
/** {@inheritDoc} */
@Override
public NDArray zeros(Shape shape, DataType dataType) {
return JniUtils.createZerosNdArray(this, shape, dataType, device, SparseFormat.DENSE);
}
/** {@inheritDoc} */
@Override
public NDArray ones(Shape shape, DataType dataType) {
return JniUtils.createOnesNdArray(this, shape, dataType, device, SparseFormat.DENSE);
}
/** {@inheritDoc} */
@Override
public NDArray full(Shape shape, float value, DataType dataType) {
return JniUtils.full(this, shape, value, dataType, device, SparseFormat.DENSE);
}
/** {@inheritDoc} */
@Override
public NDArray arange(int start, int stop, int step, DataType dataType) {
return arange((float) start, (float) stop, (float) step, dataType, device);
}
/** {@inheritDoc} */
@Override
public NDArray arange(float start, float stop, float step, DataType dataType) {
if (Math.signum(stop - start) != Math.signum(step)) {
return create(new Shape(0), dataType, device);
}
return JniUtils.arange(this, start, stop, step, dataType, device, SparseFormat.DENSE);
}
/** {@inheritDoc} */
@Override
public NDArray eye(int rows, int cols, int k, DataType dataType) {
if (k != 0) {
throw new UnsupportedOperationException(
"index of the diagonal is not supported in PyTorch");
}
return JniUtils.eye(this, rows, cols, dataType, device, SparseFormat.DENSE);
}
/** {@inheritDoc} */
@Override
public NDArray linspace(float start, float stop, int num, boolean endpoint) {
if (!endpoint) {
throw new UnsupportedOperationException("endpoint only support true");
}
return JniUtils.linspace(
this, start, stop, num, DataType.FLOAT32, device, SparseFormat.DENSE);
}
/** {@inheritDoc} */
@Override
public NDArray randomInteger(long low, long high, Shape shape, DataType dataType) {
return JniUtils.randint(this, low, high, shape, dataType, device);
}
/** {@inheritDoc} */
@Override
public NDArray randomPermutation(long n) {
return JniUtils.randperm(this, n, DataType.INT64, device);
}
/** {@inheritDoc} */
@Override
public NDArray randomUniform(float low, float high, Shape shape, DataType dataType) {
return JniUtils.uniform(this, low, high, shape, dataType, device);
}
/** {@inheritDoc} */
@Override
public NDArray randomNormal(float loc, float scale, Shape shape, DataType dataType) {
return JniUtils.normal(this, loc, scale, shape, dataType, device);
}
/** {@inheritDoc} */
@Override
public NDArray hanningWindow(long numPoints) {
return JniUtils.hannWindow(this, numPoints, true, device);
}
/** {@inheritDoc} */
@Override
public PtNDManager newSubManager(Device device) {
PtNDManager manager = new PtNDManager(this, device);
attachUncappedInternal(manager.uid, manager);
return manager;
}
/** {@inheritDoc} */
@Override
public final Engine getEngine() {
return Engine.getEngine(PtEngine.ENGINE_NAME);
}
/** The SystemManager is the root {@link PtNDManager} of which all others are children. */
private static final class SystemManager extends PtNDManager implements SystemNDManager {
SystemManager() {
super(null, null);
}
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine/0.34.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine/0.34.0/ai/djl/pytorch/engine/PtSymbolBlock.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.engine;
import ai.djl.MalformedModelException;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.AbstractSymbolBlock;
import ai.djl.nn.Parameter;
import ai.djl.nn.ParameterList;
import ai.djl.nn.SymbolBlock;
import ai.djl.pytorch.jni.IValue;
import ai.djl.pytorch.jni.IValueUtils;
import ai.djl.pytorch.jni.JniUtils;
import ai.djl.training.ParameterStore;
import ai.djl.util.PairList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
/**
* {@code PtSymbolBlock} is the PyTorch implementation of {@link SymbolBlock}.
*
* <p>You can create a {@code PtSymbolBlock} using {@link ai.djl.Model#load(java.nio.file.Path,
* String)}.
*/
// TODO: Memory handling
public class PtSymbolBlock extends AbstractSymbolBlock implements AutoCloseable {
private static final Logger logger = LoggerFactory.getLogger(PtSymbolBlock.class);
private AtomicReference<Long> handle;
private String uid;
private PtNDManager manager;
private boolean isTrain;
private PairList<String, Shape> inputDescriptions;
private PairList<String, Shape> outputDescriptions;
private boolean first;
private Map<String, Parameter> parameters;
/**
* Constructs a {@code PtSymbolBlock}.
*
* <p>You can create a {@code PtSymbolBlock} using {@link ai.djl.Model#load(java.nio.file.Path,
* String)}.
*
* @param manager the manager to use for the block
* @param handle the module handle
*/
@SuppressWarnings("this-escape")
public PtSymbolBlock(PtNDManager manager, long handle) {
this(manager);
this.handle = new AtomicReference<>(handle);
uid = String.valueOf(handle);
manager.attachInternal(uid, this);
}
/**
* Constructs an Empty {@code PtSymbolBlock}.
*
* @param manager the manager to use for the block
*/
public PtSymbolBlock(PtNDManager manager) {
this.manager = manager;
// training mode is on by default
isTrain = true;
first = true;
}
/** {@inheritDoc} */
@Override
public void close() {
Long pointer = handle.getAndSet(null);
if (pointer != null) {
JniUtils.deleteModule(pointer);
manager.detachInternal(uid);
manager = null;
}
}
/**
* Runs the forward of this PyTorch module.
*
* @param inputs the input {@link IValue}
* @return the result {@link IValue}
*/
public IValue forward(IValue... inputs) {
return IValueUtils.forward(this, inputs);
}
/** {@inheritDoc} */
@Override
protected NDList forwardInternal(
ParameterStore parameterStore,
NDList inputs,
boolean training,
PairList<String, Object> params) {
// TODO refactor the forward to not take ParameterStore
if (isTrain != training) {
isTrain = training;
if (isTrain) {
JniUtils.enableTrainingMode(this);
} else {
JniUtils.enableInferenceMode(this);
}
}
if (System.getProperty("ai.djl.pytorch.graph_optimizer") != null) {
/*
* By default, graph_optimizer is enabled. But it requires a warm-up time in a few
* inference calls. This optimizer setting is thread local, thus has to be disabled per
* thread. User must programmatically call JniUtils.setGraphExecutorOptimize(false) if
* he wants to disable graph optimizer per model.
*/
boolean setOptimizer = Boolean.getBoolean("ai.djl.pytorch.graph_optimizer");
JniUtils.setGraphExecutorOptimize(setOptimizer);
}
if (first) {
synchronized (this) {
if (first) {
inputDescriptions = new PairList<>();
outputDescriptions = new PairList<>();
for (NDArray array : inputs) {
inputDescriptions.add(array.getName(), array.getShape());
}
NDList outputs = IValueUtils.forward(this, inputs, training);
for (NDArray array : outputs) {
outputDescriptions.add(array.getName(), array.getShape());
}
first = false;
return outputs;
}
}
}
return IValueUtils.forward(this, inputs, training);
}
/** {@inheritDoc} */
@Override
public PairList<String, Shape> describeInput() {
if (inputDescriptions == null) {
logger.warn(
"Input shapes are unknown, please run predict or forward once"
+ " and call describeInput again.");
}
return inputDescriptions;
}
/** {@inheritDoc} */
@Override
public ParameterList getDirectParameters() {
if (parameters == null) {
NDList params = JniUtils.moduleGetParams(this, manager);
parameters = new LinkedHashMap<>(params.size());
for (NDArray param : params) {
parameters.put(
param.getName(),
Parameter.builder()
.setName(param.getName())
.setType(inferType(param.getName()))
.optArray(param)
.build());
}
}
// Defensive copy
return new ParameterList(parameters);
}
private static Parameter.Type inferType(String name) {
if (name.contains("bias")) {
return Parameter.Type.BIAS;
} else if (name.contains("gamma")) {
return Parameter.Type.GAMMA;
} else if (name.contains("beta")) {
return Parameter.Type.BETA;
} else if (name.contains("moving_mean") || name.contains("running_mean")) {
return Parameter.Type.RUNNING_MEAN;
} else if (name.contains("moving_var") || name.contains("running_var")) {
return Parameter.Type.RUNNING_VAR;
} else if (name.contains("weight")) {
return Parameter.Type.WEIGHT;
}
return Parameter.Type.OTHER;
}
/** {@inheritDoc} */
@Override
public PairList<String, Shape> describeOutput() {
if (outputDescriptions == null) {
logger.warn(
"Output shapes are unknown, please run predict or forward once"
+ " and call describeOutput again.");
}
return outputDescriptions;
}
/** {@inheritDoc} */
@Override
public Shape[] getOutputShapes(Shape[] inputShapes) {
try (NDManager manager = NDManager.newBaseManager()) {
NDList list = new NDList();
// TODO: Only tested for float32
for (Shape shape : inputShapes) {
list.add(manager.ones(shape));
}
NDList result = forwardInternal(new ParameterStore(manager, false), list, false, null);
return result.stream().map(NDArray::getShape).toArray(Shape[]::new);
}
}
/** {@inheritDoc} */
@Override
public Shape[] getOutputShapes(Shape[] inputShapes, DataType[] dataTypes) {
try (NDManager manager = NDManager.newBaseManager("PyTorch")) {
NDList list = new NDList();
for (int i = 0; i < inputShapes.length; i++) {
list.add(
manager.ones(
inputShapes[i],
dataTypes == null ? DataType.FLOAT32 : dataTypes[i]));
}
NDList result = forwardInternal(new ParameterStore(manager, false), list, false, null);
return result.stream().map(NDArray::getShape).toArray(Shape[]::new);
}
}
/** {@inheritDoc} */
@Override
public void saveParameters(DataOutputStream os) throws IOException {
os.writeByte(version);
JniUtils.writeModule(this, os, true);
}
/** {@inheritDoc} */
@Override
public void loadParameters(NDManager manager, DataInputStream is)
throws IOException, MalformedModelException {
byte loadVersion = is.readByte();
if (loadVersion != version) {
throw new MalformedModelException("Unsupported encoding version: " + loadVersion);
}
long rawHandle = JniUtils.loadModuleHandle(is, manager.getDevice(), true, true);
this.handle = new AtomicReference<>(rawHandle);
uid = String.valueOf(rawHandle);
manager.attachInternal(uid, this);
}
/**
* Get the native PyTorch model pointer.
*
* @return the pointer
*/
public Long getHandle() {
Long reference = handle.get();
if (reference == null) {
throw new IllegalStateException("PyTorch model handle has been released!");
}
return reference;
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine/0.34.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine/0.34.0/ai/djl/pytorch/engine/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes to interface with the underlying PyTorch Engine. */
package ai.djl.pytorch.engine;
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine/0.34.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine/0.34.0/ai/djl/pytorch/jni/IValue.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.jni;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.pytorch.engine.PtNDArray;
import ai.djl.pytorch.engine.PtNDManager;
import ai.djl.util.NativeResource;
import java.util.Arrays;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* A class represent a PyTorch {@code IValue} data.
*
* <p>DJL doesn't support creating nested IValue.
*/
public class IValue extends NativeResource<Long> {
IValue(long handle) {
super(handle);
}
/**
* Returns the type of the IValue.
*
* @return the type of the IValue
*/
public String getType() {
return PyTorchLibrary.LIB.iValueGetType(getHandle());
}
/**
* Returns if the IValue is a {@code Tensor} type.
*
* @return if the IValue is a Tensor type
*/
public boolean isTensor() {
return PyTorchLibrary.LIB.iValueIsTensor(getHandle());
}
/**
* Returns if the IValue is a {@code boolean} type.
*
* @return if the IValue is a boolean type
*/
public boolean isBoolean() {
return PyTorchLibrary.LIB.iValueIsBool(getHandle());
}
/**
* Returns if the IValue is a {@code long} type.
*
* @return if the IValue is a long type
*/
public boolean isLong() {
return PyTorchLibrary.LIB.iValueIsLong(getHandle());
}
/**
* Returns if the IValue is a {@code double} type.
*
* @return if the IValue is a double type
*/
public boolean isDouble() {
return PyTorchLibrary.LIB.iValueIsDouble(getHandle());
}
/**
* Returns if the IValue is a {@code String} type.
*
* @return if the IValue is a String type
*/
public boolean isString() {
return PyTorchLibrary.LIB.iValueIsString(getHandle());
}
/**
* Returns if the IValue is a {@code boolean[]} type.
*
* @return if the IValue is a boolean[] type
*/
public boolean isBooleanList() {
return PyTorchLibrary.LIB.iValueIsBoolList(getHandle());
}
/**
* Returns if the IValue is a {@code long[]} type.
*
* @return if the IValue is a long[] type
*/
public boolean isLongList() {
return PyTorchLibrary.LIB.iValueIsLongList(getHandle());
}
/**
* Returns if the IValue is a {@code double[]} type.
*
* @return if the IValue is a double[] type
*/
public boolean isDoubleList() {
return PyTorchLibrary.LIB.iValueIsDoubleList(getHandle());
}
/**
* Returns if the IValue is a {@code IValue[]} type.
*
* <p>The elements in the array must have the same type.
*
* @return if the IValue is a IValue[] type
*/
public boolean isTensorList() {
return PyTorchLibrary.LIB.iValueIsTensorList(getHandle());
}
/**
* Returns if the IValue is a {@code IValue[]} type.
*
* <p>The elements in the array must have the same type.
*
* @return if the IValue is a IValue[] type
*/
public boolean isList() {
return PyTorchLibrary.LIB.iValueIsList(getHandle());
}
/**
* Returns if the IValue is a {@code Map<String, V>} type.
*
* @return if the IValue is a Map<String, V> type
*/
public boolean isMap() {
return PyTorchLibrary.LIB.iValueIsMap(getHandle());
}
/**
* Returns if the IValue is a tuple type.
*
* @return if the IValue is a tuple type
*/
public boolean isTuple() {
return PyTorchLibrary.LIB.iValueIsTuple(getHandle());
}
/**
* Creates a new {@code IValue} of type {@code PtNDArray}.
*
* @param array the NDArray
* @return a new {@code IValue} of type {@code PtNDArray}
*/
public static IValue from(PtNDArray array) {
if (array.getDataType() == DataType.STRING) {
Shape shape = array.getShape();
String[] strs = array.toStringArray();
if (shape.isScalar()) {
return from(strs[0]);
}
IValue[] list = new IValue[strs.length];
PtNDManager manager = array.getManager();
for (int i = 0; i < strs.length; i++) {
IValue ivalue = from(strs[i]);
manager.attachUncappedInternal(ivalue.getUid(), ivalue);
list[i] = ivalue;
}
return listFrom(list);
}
return new IValue(PyTorchLibrary.LIB.iValueFromTensor(array.getHandle()));
}
/**
* Creates a new {@code IValue} of type {@code boolean}.
*
* @param value the boolean value
* @return a new {@code IValue} of type {@code boolean}
*/
public static IValue from(boolean value) {
return new IValue(PyTorchLibrary.LIB.iValueFromBool(value));
}
/**
* Creates a new {@code IValue} of type {@code long}.
*
* @param value the long value
* @return a new {@code IValue} of type {@code long}
*/
public static IValue from(long value) {
return new IValue(PyTorchLibrary.LIB.iValueFromLong(value));
}
/**
* Creates a new {@code IValue} of type {@code double}.
*
* @param value the double value
* @return a new {@code IValue} of type {@code double}
*/
public static IValue from(double value) {
return new IValue(PyTorchLibrary.LIB.iValueFromDouble(value));
}
/**
* Creates a new {@code IValue} of type {@code String}.
*
* @param value the String value
* @return a new {@code IValue} of type {@code String}
*/
public static IValue from(String value) {
return new IValue(PyTorchLibrary.LIB.iValueFromString(value));
}
/**
* Creates a new {@code IValue} of type {@code boolean[]}.
*
* @param list the boolean[] value
* @return a new {@code IValue} of type {@code boolean[]}
*/
public static IValue listFrom(boolean... list) {
return new IValue(PyTorchLibrary.LIB.iValueFromBoolList(list));
}
/**
* Creates a new {@code IValue} of type {@code long[]}.
*
* @param list the long[] value
* @return a new {@code IValue} of type {@code long[]}
*/
public static IValue listFrom(long... list) {
return new IValue(PyTorchLibrary.LIB.iValueFromLongList(list));
}
/**
* Creates a new {@code IValue} of type {@code double[]}.
*
* @param list the double[] value
* @return a new {@code IValue} of type {@code double[]}
*/
public static IValue listFrom(double... list) {
return new IValue(PyTorchLibrary.LIB.iValueFromDoubleList(list));
}
/**
* Creates a new {@code IValue} of type {@code NDArray[]}.
*
* @param list the NDArray[] value
* @return a new {@code IValue} of type {@code NDArray[]}
*/
public static IValue listFrom(PtNDArray... list) {
long[] tensors = Arrays.stream(list).mapToLong(PtNDArray::getHandle).toArray();
return new IValue(PyTorchLibrary.LIB.iValueFromTensorList(tensors));
}
/**
* Creates a new {@code IValue} of type {@code NDArray[]}.
*
* @param list the NDArray[] value
* @return a new {@code IValue} of type {@code NDArray[]}
*/
public static IValue listFrom(IValue... list) {
if (list.length == 0) {
throw new IllegalArgumentException("Empty IValue list is not supported.");
}
long[] tensors = Arrays.stream(list).mapToLong(IValue::getHandle).toArray();
return new IValue(PyTorchLibrary.LIB.iValueFromList(tensors));
}
/**
* Creates a new {@code IValue} of type {@code NDArray[]}.
*
* @param list the NDArray[] value
* @return a new {@code IValue} of type {@code NDArray[]}
*/
public static IValue tupleFrom(IValue... list) {
long[] tensors = Arrays.stream(list).mapToLong(IValue::getHandle).toArray();
return new IValue(PyTorchLibrary.LIB.iValueFromTuple(tensors));
}
/**
* Creates a new {@code IValue} of type {@code Map[String, PtNDArray]}.
*
* @param map the Map[String, IValue] value
* @return a new {@code IValue} of type {@code Map[String, PtNDArray]}
*/
public static IValue stringMapFrom(Map<String, PtNDArray> map) {
String[] keys = new String[map.size()];
long[] handles = new long[map.size()];
int i = 0;
for (Map.Entry<String, PtNDArray> entry : map.entrySet()) {
keys[i] = entry.getKey();
handles[i] = entry.getValue().getHandle();
++i;
}
return new IValue(PyTorchLibrary.LIB.iValueFromStringMap(keys, handles));
}
/**
* Creates a new {@code IValue} of type {@code Map[String, IValue]}.
*
* @param map the Map[String, IValue] value
* @return a new {@code IValue} of type {@code Map[String, IValue]}
*/
public static IValue stringIValueMapFrom(Map<String, IValue> map) {
String[] keys = new String[map.size()];
long[] handles = new long[map.size()];
int i = 0;
for (Map.Entry<String, IValue> entry : map.entrySet()) {
keys[i] = entry.getKey();
handles[i] = entry.getValue().getHandle();
++i;
}
return new IValue(PyTorchLibrary.LIB.iValueFromStringIValueMap(keys, handles));
}
/**
* Returns the {@code boolean} value of this IValue.
*
* @return the boolean value of this IValue
*/
public boolean toBoolean() {
return PyTorchLibrary.LIB.iValueToBool(getHandle());
}
/**
* Returns the {@code long} value of this IValue.
*
* @return the long value of this IValue
*/
public long toLong() {
return PyTorchLibrary.LIB.iValueToLong(getHandle());
}
/**
* Returns the {@code double} value of this IValue.
*
* @return the double value of this IValue
*/
public double toDouble() {
return PyTorchLibrary.LIB.iValueToDouble(getHandle());
}
/**
* Returns the {@code String} value of this IValue.
*
* @return the String value of this IValue
*/
public String toStringValue() {
return PyTorchLibrary.LIB.iValueToString(getHandle());
}
/**
* Returns the {@code boolean[]} value of this IValue.
*
* @return the boolean[] value of this IValue
*/
public boolean[] toBooleanArray() {
return PyTorchLibrary.LIB.iValueToBoolList(getHandle());
}
/**
* Returns the {@code long[]} value of this IValue.
*
* @return the long[] value of this IValue
*/
public long[] toLongArray() {
return PyTorchLibrary.LIB.iValueToLongList(getHandle());
}
/**
* Returns the {@code double[]} value of this IValue.
*
* @return the double[] value of this IValue
*/
public double[] toDoubleArray() {
return PyTorchLibrary.LIB.iValueToDoubleList(getHandle());
}
/**
* Returns the {@code NDArray} value of this IValue.
*
* @param manager the {@code NDManager} to create the NDArray
* @return the NDArray value of this IValue
*/
public PtNDArray toTensor(PtNDManager manager) {
return new PtNDArray(manager, PyTorchLibrary.LIB.iValueToTensor(getHandle()));
}
/**
* Returns the {@code NDArray[]} value of this IValue.
*
* @param manager the NDManager to create NDArray
* @return the NDArray[] value of this IValue
*/
public PtNDArray[] toTensorArray(PtNDManager manager) {
long[] handles = PyTorchLibrary.LIB.iValueToTensorList(getHandle());
PtNDArray[] ret = new PtNDArray[handles.length];
for (int i = 0; i < ret.length; ++i) {
ret[i] = new PtNDArray(manager, handles[i]);
}
return ret;
}
/**
* Returns the {@code IValue[]} value of this IValue list.
*
* @return the IValue[] value of this IValue list
*/
public IValue[] toIValueArray() {
long[] handles = PyTorchLibrary.LIB.iValueToIValueList(getHandle());
IValue[] ret = new IValue[handles.length];
for (int i = 0; i < ret.length; ++i) {
ret[i] = new IValue(handles[i]);
}
return ret;
}
/**
* Returns the {@code Map<String, IValue>} value of this IValue.
*
* @return the Map<String, IValue> value of this IValue
*/
public Map<String, IValue> toIValueMap() {
long[] handles = PyTorchLibrary.LIB.iValueToMap(getHandle());
Map<String, IValue> map = new ConcurrentHashMap<>();
for (int i = 0; i < handles.length; i += 2) {
IValue key = new IValue(handles[i]);
map.put(key.toStringValue(), new IValue(handles[i + 1]));
key.close();
}
return map;
}
/**
* Returns the {@code Map<String, IValue>} value of this IValue.
*
* @return the Map<String, IValue> value of this IValue
*/
public IValue[] toIValueTuple() {
long[] handles = PyTorchLibrary.LIB.iValueToIValueTuple(getHandle());
IValue[] ret = new IValue[handles.length];
for (int i = 0; i < ret.length; ++i) {
ret[i] = new IValue(handles[i]);
}
return ret;
}
/**
* Returns the {@code NDList} value of this IValue.
*
* @param manager the NDManager to create NDArray
* @return the {@code NDList} value of this IValue
*/
public NDList toNDList(PtNDManager manager) {
if (isTensor()) {
return new NDList(toTensor(manager));
} else if (isTensorList()) {
return new NDList(toTensorArray(manager));
} else if (isMap()) {
// Only allows one level <String, NDArray> type of map
NDList list = new NDList();
Map<String, IValue> map = toIValueMap();
for (Map.Entry<String, IValue> entry : map.entrySet()) {
IValue iv = entry.getValue();
if (!iv.isTensor()) {
throw new UnsupportedOperationException("Only one level of map is supported.");
}
PtNDArray value = entry.getValue().toTensor(manager);
value.setName(entry.getKey());
list.add(value);
iv.close();
}
return list;
} else if (isList()) {
NDList list = new NDList();
for (IValue ivalue : toIValueArray()) {
list.addAll(ivalue.toNDList(manager));
ivalue.close();
}
return list;
} else if (isTuple()) {
NDList list = new NDList();
for (IValue ivalue : toIValueTuple()) {
list.addAll(ivalue.toNDList(manager));
ivalue.close();
}
return list;
} else if (isString()) {
return new NDList(manager.create(toStringValue()));
}
throw new UnsupportedOperationException("Unsupported IValue type.");
}
/** {@inheritDoc} */
@Override
public void close() {
Long pointer = handle.getAndSet(null);
if (pointer != null) {
PyTorchLibrary.LIB.torchDeleteIValue(pointer);
}
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine/0.34.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine/0.34.0/ai/djl/pytorch/jni/IValueUtils.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.jni;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.pytorch.engine.PtNDArray;
import ai.djl.pytorch.engine.PtNDManager;
import ai.djl.pytorch.engine.PtSymbolBlock;
import ai.djl.util.Pair;
import ai.djl.util.PairList;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/** IValueUtils is utility class to deal with IValue in PyTorch. */
public final class IValueUtils {
private static final Pattern PATTERN_LIST = Pattern.compile("\\w+\\[]");
private static final Pattern PATTERN_TUPLE = Pattern.compile("\\w+\\(\\)");
private static final Pattern PATTERN_TUPLE_OF_TUPLE = Pattern.compile("\\w+(\\([\\d,]+\\))");
private static final boolean CUDA_STREAM =
Boolean.getBoolean("ai.djl.pytorch.enable_cuda_stream");
private IValueUtils() {}
/**
* Runs the forward of PyTorch module.
*
* @param block the block that contains PyTorch module
* @param inputs the input {@link NDList}
* @param isTrain if running on training mode
* @return the result {@link NDList}
*/
public static NDList forward(PtSymbolBlock block, NDList inputs, boolean isTrain) {
Pair<IValue[], String> inputPair = getInputs(inputs);
IValue[] ivalues = inputPair.getKey();
String method = inputPair.getValue();
long[] iValueHandles = Arrays.stream(ivalues).mapToLong(IValue::getHandle).toArray();
long result =
PyTorchLibrary.LIB.moduleRunMethod(
block.getHandle(), method, iValueHandles, isTrain, CUDA_STREAM);
PtNDManager manager = (PtNDManager) inputs.get(0).getManager();
Arrays.stream(ivalues).forEach(IValue::close);
try (IValue iValue = new IValue(result)) {
return iValue.toNDList(manager);
}
}
/**
* Runs the forward of PyTorch module.
*
* @param block the block that contains PyTorch module
* @param inputs the input {@link IValue}
* @return the result {@link IValue}
*/
public static IValue forward(PtSymbolBlock block, IValue[] inputs) {
return runMethod(block, "forward", inputs);
}
/**
* Runs the method of PyTorch module.
*
* @param block the block that contains PyTorch module
* @param methodName the name of method for calling
* @param inputs the input {@link IValue}
* @return the result {@link IValue}
*/
public static IValue runMethod(PtSymbolBlock block, String methodName, IValue... inputs) {
long[] iValueHandles = Arrays.stream(inputs).mapToLong(IValue::getHandle).toArray();
return new IValue(
PyTorchLibrary.LIB.moduleRunMethod(
block.getHandle(), methodName, iValueHandles, false, CUDA_STREAM));
}
private static int addToMap(
Map<String, Integer> map, String key, List<PairList<String, PtNDArray>> list) {
return map.computeIfAbsent(
key,
k -> {
list.add(new PairList<>());
return list.size() - 1;
});
}
static Pair<IValue[], String> getInputs(NDList ndList) {
List<PairList<String, PtNDArray>> outputs = new ArrayList<>();
Map<String, Integer> indexMap = new ConcurrentHashMap<>();
String methodName = "forward";
for (NDArray array : ndList) {
String name = array.getName();
Matcher m;
if (name != null && name.contains(".")) {
String[] strings = name.split("\\.", 2);
int index = addToMap(indexMap, strings[0], outputs);
PairList<String, PtNDArray> pl = outputs.get(index);
pl.add(strings[1], (PtNDArray) array);
} else if (name != null && name.startsWith("module_method:")) {
methodName = name.substring(14);
} else if (name != null && PATTERN_LIST.matcher(name).matches()) {
int index = addToMap(indexMap, name, outputs);
PairList<String, PtNDArray> pl = outputs.get(index);
pl.add("[]", (PtNDArray) array);
} else if (name != null && PATTERN_TUPLE.matcher(name).matches()) {
int index = addToMap(indexMap, name, outputs);
PairList<String, PtNDArray> pl = outputs.get(index);
pl.add("()", (PtNDArray) array);
} else if (name != null && (m = PATTERN_TUPLE_OF_TUPLE.matcher(name)).matches()) {
int index = addToMap(indexMap, name, outputs);
String key = m.group(1);
PairList<String, PtNDArray> pl = outputs.get(index);
pl.add(key, (PtNDArray) array);
} else {
PairList<String, PtNDArray> pl = new PairList<>();
pl.add(null, (PtNDArray) array);
outputs.add(pl);
}
}
IValue[] ret = new IValue[outputs.size()];
for (int i = 0; i < outputs.size(); ++i) {
PairList<String, PtNDArray> pl = outputs.get(i);
String key = pl.get(0).getKey();
if (key == null) {
// not List, Dict, Tuple input
ret[i] = IValue.from(pl.get(0).getValue());
} else if ("[]".equals(key)) {
// list
PtNDArray[] arrays = pl.values().toArray(new PtNDArray[0]);
ret[i] = IValue.listFrom(arrays);
} else if ("()".equals(key)) {
// Tuple
IValue[] arrays = pl.values().stream().map(IValue::from).toArray(IValue[]::new);
ret[i] = IValue.tupleFrom(arrays);
} else if (key.startsWith("(")) {
// Tuple of tuple
String[] keys = key.substring(1, key.length() - 1).split(",");
int[] dim = Arrays.stream(keys).mapToInt(Integer::parseInt).toArray();
List<PtNDArray> arrays = pl.values();
int product = 1;
for (int d : dim) {
product *= d;
}
if (product != arrays.size()) {
throw new IllegalArgumentException("Invalid NDList tuple size: " + key);
}
ret[i] = IValueUtils.toTupleIValueRecur(arrays, dim, 0, 0).getKey();
} else {
Map<String, PtNDArray> map = new ConcurrentHashMap<>();
for (Pair<String, PtNDArray> pair : pl) {
map.put(pair.getKey(), pair.getValue());
}
ret[i] = IValue.stringMapFrom(map);
}
}
return new Pair<>(ret, methodName);
}
private static Pair<IValue, Integer> toTupleIValueRecur(
List<PtNDArray> list, int[] dims, int start, int level) {
if (dims.length - 1 == level) {
int dim = dims[level];
IValue[] iValues = new IValue[dim];
for (int i = 0; i < dim; i++) {
iValues[i] = IValue.from(list.get(i + start));
}
return new Pair<>(IValue.tupleFrom(iValues), Math.toIntExact((start + dim)));
}
IValue[] output = new IValue[dims[0]];
for (int j = 0; j < dims[level]; j++) {
Pair<IValue, Integer> p = toTupleIValueRecur(list, dims, start, level + 1);
start = p.getValue();
output[j] = p.getKey();
}
return new Pair<>(IValue.tupleFrom(output), start);
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine/0.34.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine/0.34.0/ai/djl/pytorch/jni/JniUtils.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.jni;
import ai.djl.Device;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.index.NDIndex;
import ai.djl.ndarray.index.dim.NDIndexAll;
import ai.djl.ndarray.index.dim.NDIndexBooleans;
import ai.djl.ndarray.index.dim.NDIndexElement;
import ai.djl.ndarray.index.dim.NDIndexFixed;
import ai.djl.ndarray.index.dim.NDIndexNull;
import ai.djl.ndarray.index.dim.NDIndexPick;
import ai.djl.ndarray.index.dim.NDIndexSlice;
import ai.djl.ndarray.index.dim.NDIndexTake;
import ai.djl.ndarray.index.full.NDIndexFullPick;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.ndarray.types.SparseFormat;
import ai.djl.nn.recurrent.RNN;
import ai.djl.pytorch.engine.PtDeviceType;
import ai.djl.pytorch.engine.PtNDArray;
import ai.djl.pytorch.engine.PtNDManager;
import ai.djl.pytorch.engine.PtSymbolBlock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.ListIterator;
import java.util.Set;
/**
* A class containing utilities to interact with the PyTorch Engine's Java Native Interface (JNI)
* layer.
*/
@SuppressWarnings("MissingJavadocMethod")
public final class JniUtils {
private static final Logger logger = LoggerFactory.getLogger(JniUtils.class);
private static Set<String> configs;
private static final int NULL_PTR = 0;
private static final int BYTE_LENGTH = 4194304;
private JniUtils() {}
private static int layoutMapper(SparseFormat fmt, Device device) {
if (fmt == SparseFormat.DENSE) {
// Enable MKLDNN with environment variable
// Using MKLDNN with GPU would throw exception on libtorch
if (Boolean.getBoolean("ai.djl.pytorch.use_mkldnn") && !device.equals(Device.gpu())) {
return 2;
}
return 0;
} else if (fmt == SparseFormat.COO) {
return 1;
} else {
throw new IllegalArgumentException(
"Current PyTorch only support SparseFormat.DENSE and SparseFormat.COO");
}
}
public static boolean isGradMode() {
return PyTorchLibrary.LIB.torchIsGradMode();
}
public static void setGradMode(boolean enable) {
PyTorchLibrary.LIB.torchSetGradMode(enable);
}
public static int getNumInteropThreads() {
return PyTorchLibrary.LIB.torchGetNumInteropThreads();
}
public static int getNumThreads() {
return PyTorchLibrary.LIB.torchGetNumThreads();
}
public static void setNumInteropThreads(int threads) {
PyTorchLibrary.LIB.torchSetNumInteropThreads(threads);
}
public static void setNumThreads(int threads) {
PyTorchLibrary.LIB.torchSetNumThreads(threads);
}
public static void setBenchmarkCuDNN(boolean enable) {
PyTorchLibrary.LIB.torchSetBenchmarkCuDNN(enable);
}
public static synchronized Set<String> getFeatures() {
if (configs != null) {
return configs;
}
Set<String> features = new HashSet<>();
PyTorchLibrary.LIB.torchShowConfig(features);
configs = features;
return configs;
}
public static void setSeed(long seed) {
PyTorchLibrary.LIB.torchManualSeed(seed);
}
/**
* Calls this method to start profile the area you are interested in.
*
* <p>Example usage
*
* <pre>
* JniUtils.startProfile(false, true, true);
* Predictor.predict(img);
* JniUtils.stopProfile(outputFile)
* </pre>
*
* @param useCuda Enables timing of CUDA events as well using the cudaEvent API.
* @param recordShape If shapes recording is set, information about input dimensions will be
* collected
* @param profileMemory Whether to report memory usage
*/
public static synchronized void startProfile(
boolean useCuda, boolean recordShape, boolean profileMemory) {
PyTorchLibrary.LIB.torchStartProfile(useCuda, recordShape, profileMemory);
}
public static synchronized void stopProfile(String outputFile) {
PyTorchLibrary.LIB.torchStopProfile(outputFile);
}
// TODO: Unchecked Datatype and device mapping
public static PtNDArray createNdFromByteBuffer(
PtNDManager manager,
ByteBuffer data,
Shape shape,
DataType dType,
SparseFormat fmt,
Device device) {
int layout = layoutMapper(fmt, device);
long handle =
PyTorchLibrary.LIB.torchFromBlob(
data,
shape.getShape(),
dType.ordinal(),
layout,
new int[] {PtDeviceType.toDeviceType(device), device.getDeviceId()},
false);
if (layout == 1 || layout == 2 || device.isGpu()) {
// MKLDNN & COO & GPU device will explicitly make a copy in native code
// so we don't want to hold a reference on Java side
return new PtNDArray(manager, handle);
}
return new PtNDArray(manager, handle, data);
}
public static void emptyCudaCache() {
PyTorchLibrary.LIB.torchCudaEmptyCache();
}
public static PtNDArray createEmptyNdArray(
PtNDManager manager, Shape shape, DataType dType, Device device, SparseFormat fmt) {
int layoutVal = layoutMapper(fmt, device);
return new PtNDArray(
manager,
PyTorchLibrary.LIB.torchEmpty(
shape.getShape(),
dType.ordinal(),
layoutVal,
new int[] {PtDeviceType.toDeviceType(device), device.getDeviceId()},
false));
}
public static PtNDArray createZerosNdArray(
PtNDManager manager, Shape shape, DataType dType, Device device, SparseFormat fmt) {
int layoutVal = layoutMapper(fmt, device);
return new PtNDArray(
manager,
PyTorchLibrary.LIB.torchZeros(
shape.getShape(),
dType.ordinal(),
layoutVal,
new int[] {PtDeviceType.toDeviceType(device), device.getDeviceId()},
false));
}
public static PtNDArray createOnesNdArray(
PtNDManager manager, Shape shape, DataType dType, Device device, SparseFormat fmt) {
int layoutVal = layoutMapper(fmt, device);
return new PtNDArray(
manager,
PyTorchLibrary.LIB.torchOnes(
shape.getShape(),
dType.ordinal(),
layoutVal,
new int[] {PtDeviceType.toDeviceType(device), device.getDeviceId()},
false));
}
public static PtNDArray full(
PtNDManager manager,
Shape shape,
double fillValue,
DataType dType,
Device device,
SparseFormat fmt) {
int layoutVal = layoutMapper(fmt, device);
return new PtNDArray(
manager,
PyTorchLibrary.LIB.torchFull(
shape.getShape(),
fillValue,
dType.ordinal(),
layoutVal,
new int[] {PtDeviceType.toDeviceType(device), device.getDeviceId()},
false));
}
public static PtNDArray zerosLike(
PtNDArray array, DataType dType, Device device, SparseFormat fmt) {
int layoutVal = layoutMapper(fmt, device);
return new PtNDArray(
array.getManager(),
PyTorchLibrary.LIB.torchZerosLike(
array.getHandle(),
dType.ordinal(),
layoutVal,
new int[] {PtDeviceType.toDeviceType(device), device.getDeviceId()},
false));
}
public static PtNDArray onesLike(
PtNDArray array, DataType dType, Device device, SparseFormat fmt) {
int layoutVal = layoutMapper(fmt, device);
return new PtNDArray(
array.getManager(),
PyTorchLibrary.LIB.torchOnesLike(
array.getHandle(),
dType.ordinal(),
layoutVal,
new int[] {PtDeviceType.toDeviceType(device), device.getDeviceId()},
false));
}
public static PtNDArray arange(
PtNDManager manager,
float start,
float stop,
float step,
DataType dType,
Device device,
SparseFormat fmt) {
int layoutVal = layoutMapper(fmt, device);
return new PtNDArray(
manager,
PyTorchLibrary.LIB.torchArange(
start,
stop,
step,
dType.ordinal(),
layoutVal,
new int[] {PtDeviceType.toDeviceType(device), device.getDeviceId()},
false));
}
public static PtNDArray linspace(
PtNDManager manager,
float start,
float stop,
int step,
DataType dType,
Device device,
SparseFormat fmt) {
int layoutVal = layoutMapper(fmt, device);
return new PtNDArray(
manager,
PyTorchLibrary.LIB.torchLinspace(
start,
stop,
step,
dType.ordinal(),
layoutVal,
new int[] {PtDeviceType.toDeviceType(device), device.getDeviceId()},
false));
}
public static PtNDArray createSparseCoo(PtNDArray indices, PtNDArray values, Shape shape) {
return new PtNDArray(
values.getManager(),
PyTorchLibrary.LIB.torchSparseCoo(
shape.getShape(), indices.getHandle(), values.getHandle(), false));
}
public static PtNDArray to(PtNDArray ndArray, DataType dataType, Device device) {
PtNDManager manager = ndArray.getManager();
// the device of the manager should always match the one in NDArray which the manager attach
// to
if (!device.equals(manager.getDevice())) {
manager = manager.newSubManager(device);
}
return new PtNDArray(
manager,
PyTorchLibrary.LIB.torchTo(
ndArray.getHandle(),
dataType.ordinal(),
new int[] {PtDeviceType.toDeviceType(device), device.getDeviceId()}));
}
public static PtNDArray toSparse(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchToSparse(ndArray.getHandle()));
}
public static PtNDArray toDense(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchToDense(ndArray.getHandle()));
}
public static PtNDArray broadcast(PtNDArray ndArray, Shape shape) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchExpand(ndArray.getHandle(), shape.getShape()));
}
public static PtNDArray slice(PtNDArray ndArray, long dim, long start, long stop, long step) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchSlice(ndArray.getHandle(), dim, start, stop, step));
}
public static PtNDArray index(
PtNDArray ndArray,
long[] minIndices,
long[] maxIndices,
long[] stepIndices,
PtNDManager manager) {
return new PtNDArray(
manager,
PyTorchLibrary.LIB.torchIndex(
ndArray.getHandle(), minIndices, maxIndices, stepIndices));
}
@SuppressWarnings("OptionalGetWithoutIsPresent")
public static PtNDArray indexAdv(PtNDArray ndArray, NDIndex index, PtNDManager manager) {
if (ndArray == null) {
return ndArray;
}
List<NDIndexElement> indices = index.getIndices();
long torchIndexHandle = PyTorchLibrary.LIB.torchIndexInit(indices.size());
try {
// Index aggregation
ListIterator<NDIndexElement> it = indices.listIterator();
while (it.hasNext()) {
if (it.nextIndex() == index.getEllipsisIndex()) {
PyTorchLibrary.LIB.torchIndexAppendNoneEllipsis(torchIndexHandle, true);
}
NDIndexElement elem = it.next();
if (elem instanceof NDIndexNull) {
PyTorchLibrary.LIB.torchIndexAppendNoneEllipsis(torchIndexHandle, false);
} else if (elem instanceof NDIndexSlice) {
Long min = ((NDIndexSlice) elem).getMin();
Long max = ((NDIndexSlice) elem).getMax();
Long step = ((NDIndexSlice) elem).getStep();
int nullSliceBinary = (min == null ? 1 : 0) * 2 + (max == null ? 1 : 0);
// nullSliceBinary encodes whether the slice end {min, max} is null:
// is_null == 1, ! is_null == 0;
// 0b11 == 3, 0b10 = 2, ...
// If {min, max} is null, then its value is ineffective, thus set to -1.
PyTorchLibrary.LIB.torchIndexAppendSlice(
torchIndexHandle,
min == null ? -1 : min,
max == null ? -1 : max,
step == null ? 1 : step,
nullSliceBinary);
} else if (elem instanceof NDIndexAll) {
PyTorchLibrary.LIB.torchIndexAppendSlice(torchIndexHandle, -1, -1, 1, 3);
} else if (elem instanceof NDIndexFixed) {
PyTorchLibrary.LIB.torchIndexAppendFixed(
torchIndexHandle, ((NDIndexFixed) elem).getIndex());
} else if (elem instanceof NDIndexBooleans) {
PtNDArray indexArr = (PtNDArray) ((NDIndexBooleans) elem).getIndex();
PyTorchLibrary.LIB.torchIndexAppendArray(
torchIndexHandle, indexArr.getHandle());
} else if (elem instanceof NDIndexTake) {
PtNDArray indexArr = manager.from(((NDIndexTake) elem).getIndex());
if (indexArr.getDataType() != DataType.INT64) {
indexArr = indexArr.toType(DataType.INT64, true);
}
PyTorchLibrary.LIB.torchIndexAppendArray(
torchIndexHandle, indexArr.getHandle());
} else if (elem instanceof NDIndexPick) {
// Backward compatible
NDIndexFullPick fullPick =
NDIndexFullPick.fromIndex(index, ndArray.getShape()).get();
return pick(ndArray, manager.from(fullPick.getIndices()), fullPick.getAxis());
}
}
if (indices.size() == index.getEllipsisIndex()) {
PyTorchLibrary.LIB.torchIndexAppendNoneEllipsis(torchIndexHandle, true);
}
long ret = PyTorchLibrary.LIB.torchIndexAdvGet(ndArray.getHandle(), torchIndexHandle);
return new PtNDArray(manager, ret);
} finally {
PyTorchLibrary.LIB.torchDeleteIndex(torchIndexHandle);
}
}
@SuppressWarnings("OptionalGetWithoutIsPresent")
public static void indexAdvPut(PtNDArray ndArray, NDIndex index, PtNDArray data) {
if (ndArray == null) {
return;
}
List<NDIndexElement> indices = index.getIndices();
long torchIndexHandle = PyTorchLibrary.LIB.torchIndexInit(indices.size());
try {
// Index aggregation
ListIterator<NDIndexElement> it = indices.listIterator();
while (it.hasNext()) {
if (it.nextIndex() == index.getEllipsisIndex()) {
PyTorchLibrary.LIB.torchIndexAppendNoneEllipsis(torchIndexHandle, true);
}
NDIndexElement elem = it.next();
if (elem instanceof NDIndexNull) {
PyTorchLibrary.LIB.torchIndexAppendNoneEllipsis(torchIndexHandle, false);
} else if (elem instanceof NDIndexSlice) {
Long min = ((NDIndexSlice) elem).getMin();
Long max = ((NDIndexSlice) elem).getMax();
Long step = ((NDIndexSlice) elem).getStep();
int nullSliceBinary = (min == null ? 1 : 0) * 2 + (max == null ? 1 : 0);
// nullSliceBinary encodes whether the slice end {min, max} is null:
// is_null == 1, ! is_null == 0;
// 0b11 == 3, 0b10 = 2, ...
// If {min, max} is null, then its value is ineffective, thus set to -1.
PyTorchLibrary.LIB.torchIndexAppendSlice(
torchIndexHandle,
min == null ? -1 : min,
max == null ? -1 : max,
step == null ? 1 : step,
nullSliceBinary);
} else if (elem instanceof NDIndexAll) {
PyTorchLibrary.LIB.torchIndexAppendSlice(torchIndexHandle, -1, -1, 1, 3);
} else if (elem instanceof NDIndexFixed) {
PyTorchLibrary.LIB.torchIndexAppendFixed(
torchIndexHandle, ((NDIndexFixed) elem).getIndex());
} else if (elem instanceof NDIndexBooleans) {
PtNDArray indexArr = (PtNDArray) ((NDIndexBooleans) elem).getIndex();
PyTorchLibrary.LIB.torchIndexAppendArray(
torchIndexHandle, indexArr.getHandle());
} else if (elem instanceof NDIndexTake) {
PtNDArray indexArr = (PtNDArray) ((NDIndexTake) elem).getIndex();
if (indexArr.getDataType() != DataType.INT64) {
indexArr = indexArr.toType(DataType.INT64, true);
}
PyTorchLibrary.LIB.torchIndexAppendArray(
torchIndexHandle, indexArr.getHandle());
} else if (elem instanceof NDIndexPick) {
// Backward compatible
NDIndexFullPick fullPick =
NDIndexFullPick.fromIndex(index, ndArray.getShape()).get();
pick(
ndArray,
ndArray.getManager().from(fullPick.getIndices()),
fullPick.getAxis());
return;
}
}
if (indices.size() == index.getEllipsisIndex()) {
PyTorchLibrary.LIB.torchIndexAppendNoneEllipsis(torchIndexHandle, true);
}
PyTorchLibrary.LIB.torchIndexAdvPut(
ndArray.getHandle(), torchIndexHandle, data.getHandle());
} finally {
PyTorchLibrary.LIB.torchDeleteIndex(torchIndexHandle);
}
}
public static void indexSet(
PtNDArray ndArray,
PtNDArray value,
long[] minIndices,
long[] maxIndices,
long[] stepIndices) {
PyTorchLibrary.LIB.torchIndexPut(
ndArray.getHandle(), value.getHandle(), minIndices, maxIndices, stepIndices);
}
public static void set(PtNDArray self, ByteBuffer data) {
// Note the ByteBuffer here is directByteBuffer
PyTorchLibrary.LIB.torchSet(self.getHandle(), data);
}
public static PtNDArray gather(PtNDArray ndArray, PtNDArray index, long dim) {
if (index.getDataType() != DataType.INT64) {
index = index.toType(DataType.INT64, true);
}
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchGather(ndArray.getHandle(), index.getHandle(), dim, false));
}
public static PtNDArray take(PtNDArray ndArray, PtNDArray index, PtNDManager manager) {
if (index.getDataType() != DataType.INT64) {
index = index.toType(DataType.INT64, true);
}
return new PtNDArray(
manager, PyTorchLibrary.LIB.torchTake(ndArray.getHandle(), index.getHandle()));
}
public static PtNDArray put(PtNDArray ndArray, PtNDArray index, PtNDArray value) {
if (index.getDataType() != DataType.INT64) {
index = index.toType(DataType.INT64, true);
}
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchPut(
ndArray.getHandle(), index.getHandle(), value.getHandle()));
}
public static PtNDArray scatter(PtNDArray ndArray, PtNDArray index, PtNDArray value, int axis) {
if (index.getDataType() != DataType.INT64) {
index = index.toType(DataType.INT64, true);
}
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchScatter(
ndArray.getHandle(), index.getHandle(), value.getHandle(), axis));
}
public static PtNDArray pick(PtNDArray ndArray, PtNDArray index, long dim) {
Shape indexShape = index.getShape();
Shape ndShape = ndArray.getShape();
int shapeDims = indexShape.dimension();
int ndDims = ndShape.dimension();
if (shapeDims != ndDims) {
for (int i = 0; i < ndDims - shapeDims; ++i) {
if (indexShape.equals(ndShape.slice(i, shapeDims))) {
long[] shapes = indexShape.getShape();
long[] newShape = new long[ndDims];
Arrays.fill(newShape, 0, i, 1L);
Arrays.fill(newShape, i, i + shapes.length, shapes[i]);
Arrays.fill(newShape, i + shapes.length, ndDims, 1L);
indexShape = new Shape(newShape);
break;
}
}
if (indexShape.equals(index.getShape())) {
throw new IllegalArgumentException(
"expand shape failed! Cannot expand from " + indexShape + "to " + ndShape);
}
index = index.reshape(indexShape);
}
if (index.getDataType() != DataType.INT64) {
index = index.toType(DataType.INT64, true);
}
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchGather(ndArray.getHandle(), index.getHandle(), dim, false));
}
public static PtNDArray where(PtNDArray condition, PtNDArray self, PtNDArray other) {
return new PtNDArray(
self.getManager(),
PyTorchLibrary.LIB.torchWhere(
condition.getHandle(), self.getHandle(), other.getHandle()));
}
public static PtNDArray booleanMask(PtNDArray ndArray, PtNDArray indicesNd) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchMaskedSelect(ndArray.getHandle(), indicesNd.getHandle()));
}
public static void booleanMaskSet(PtNDArray ndArray, PtNDArray value, PtNDArray indicesNd) {
PyTorchLibrary.LIB.torchMaskedPut(
ndArray.getHandle(), value.getHandle(), indicesNd.getHandle());
}
public static PtNDArray getItem(PtNDArray ndArray, long[] indices, PtNDManager manager) {
// use a specialized API here
// due to significant performance gain
// for commonly used data loading call
if (indices.length == 1) {
return new PtNDArray(
manager, PyTorchLibrary.LIB.torchGetItem(ndArray.getHandle(), indices[0]));
}
return new PtNDArray(
manager, PyTorchLibrary.LIB.torchGetItem(ndArray.getHandle(), indices));
}
public static PtNDArray clone(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.tensorClone(ndArray.getHandle()));
}
public static PtNDArray pad(PtNDArray ndArray, long[] shape, double value) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchPad(ndArray.getHandle(), shape, value));
}
public static PtNDArray reshape(PtNDArray ndArray, long[] shape) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchReshape(ndArray.getHandle(), shape));
}
public static PtNDArray stack(PtNDArray[] arrays, int dim) {
long[] pointers = Arrays.stream(arrays).mapToLong(PtNDArray::getHandle).toArray();
return new PtNDArray(arrays[0].getManager(), PyTorchLibrary.LIB.torchStack(pointers, dim));
}
public static PtNDArray cat(PtNDArray[] arrays, long dim) {
long[] pointers = Arrays.stream(arrays).mapToLong(PtNDArray::getHandle).toArray();
return new PtNDArray(arrays[0].getManager(), PyTorchLibrary.LIB.torchCat(pointers, dim));
}
public static PtNDArray tile(PtNDArray ndArray, long[] repeats) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchRepeat(ndArray.getHandle(), repeats));
}
public static PtNDArray repeat(PtNDArray ndArray, long repeat, long dim) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchRepeatInterleave(ndArray.getHandle(), repeat, dim));
}
public static PtNDArray softmax(PtNDArray ndArray, long dim, DataType dTpe) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchSoftmax(ndArray.getHandle(), dim, dTpe.ordinal()));
}
public static PtNDArray logSoftmax(PtNDArray ndArray, long dim, DataType dTpe) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchLogSoftmax(ndArray.getHandle(), dim, dTpe.ordinal()));
}
public static PtNDArray argMax(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchArgMax(ndArray.getHandle()));
}
public static PtNDArray argMax(PtNDArray ndArray, long dim, boolean keepDim) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchArgMax(ndArray.getHandle(), dim, keepDim));
}
public static NDList topK(
PtNDArray ndArray, long k, long axis, boolean largest, boolean sorted) {
long[] handles =
PyTorchLibrary.LIB.torchTopK(ndArray.getHandle(), k, axis, largest, sorted);
NDList list = new NDList(handles.length);
for (long handle : handles) {
PtNDArray array = new PtNDArray(ndArray.getManager(), handle);
list.add(array);
}
return list;
}
public static PtNDArray argMin(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchArgMin(ndArray.getHandle()));
}
public static PtNDArray argMin(PtNDArray ndArray, long dim, boolean keepDim) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchArgMin(ndArray.getHandle(), dim, keepDim));
}
public static PtNDArray argSort(PtNDArray ndArray, long dim, boolean keepDim) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchArgSort(ndArray.getHandle(), dim, keepDim));
}
public static PtNDArray sort(PtNDArray ndArray, long dim, boolean descending) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchSort(ndArray.getHandle(), dim, descending));
}
public static PtNDArray permute(PtNDArray ndArray, long[] dims) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchPermute(ndArray.getHandle(), dims));
}
public static PtNDArray flip(PtNDArray ndArray, long[] dims) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchFlip(ndArray.getHandle(), dims));
}
public static PtNDArray transpose(PtNDArray ndArray, long dim1, long dim2) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchTranspose(ndArray.getHandle(), dim1, dim2));
}
public static boolean contentEqual(PtNDArray ndArray1, PtNDArray ndArray2) {
return PyTorchLibrary.LIB.contentEqual(ndArray1.getHandle(), ndArray2.getHandle());
}
public static PtNDArray add(PtNDArray ndArray1, PtNDArray ndArray2) {
return new PtNDArray(
ndArray1.getManager(),
PyTorchLibrary.LIB.torchAdd(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static void addi(PtNDArray ndArray1, PtNDArray ndArray2) {
PyTorchLibrary.LIB.torchAddi(ndArray1.getHandle(), ndArray2.getHandle());
}
public static PtNDArray sub(PtNDArray ndArray1, PtNDArray ndArray2) {
return new PtNDArray(
ndArray1.getManager(),
PyTorchLibrary.LIB.torchSub(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static void subi(PtNDArray ndArray1, PtNDArray ndArray2) {
PyTorchLibrary.LIB.torchSubi(ndArray1.getHandle(), ndArray2.getHandle());
}
public static PtNDArray mul(PtNDArray ndArray1, PtNDArray ndArray2) {
return new PtNDArray(
ndArray1.getManager(),
PyTorchLibrary.LIB.torchMul(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static void muli(PtNDArray ndArray1, PtNDArray ndArray2) {
PyTorchLibrary.LIB.torchMuli(ndArray1.getHandle(), ndArray2.getHandle());
}
public static PtNDArray div(PtNDArray ndArray1, PtNDArray ndArray2) {
return new PtNDArray(
ndArray1.getManager(),
PyTorchLibrary.LIB.torchTrueDivide(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static void divi(PtNDArray ndArray1, PtNDArray ndArray2) {
PyTorchLibrary.LIB.torchTrueDividei(ndArray1.getHandle(), ndArray2.getHandle());
}
public static PtNDArray remainder(PtNDArray ndArray1, PtNDArray ndArray2) {
return new PtNDArray(
ndArray1.getManager(),
PyTorchLibrary.LIB.torchRemainder(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static void remainderi(PtNDArray ndArray1, PtNDArray ndArray2) {
PyTorchLibrary.LIB.torchRemainderi(ndArray1.getHandle(), ndArray2.getHandle());
}
public static PtNDArray pow(PtNDArray ndArray1, PtNDArray ndArray2) {
return new PtNDArray(
ndArray1.getManager(),
PyTorchLibrary.LIB.torchPow(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static void powi(PtNDArray ndArray1, PtNDArray ndArray2) {
PyTorchLibrary.LIB.torchPowi(ndArray1.getHandle(), ndArray2.getHandle());
}
public static PtNDArray sign(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchSign(ndArray.getHandle()));
}
public static void signi(PtNDArray ndArray) {
PyTorchLibrary.LIB.torchSigni(ndArray.getHandle());
}
public static PtNDArray logicalAnd(PtNDArray ndArray1, PtNDArray ndArray2) {
return new PtNDArray(
ndArray1.getManager(),
PyTorchLibrary.LIB.torchLogicalAnd(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static PtNDArray logicalOr(PtNDArray ndArray1, PtNDArray ndArray2) {
return new PtNDArray(
ndArray1.getManager(),
PyTorchLibrary.LIB.torchLogicalOr(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static PtNDArray logicalXor(PtNDArray ndArray1, PtNDArray ndArray2) {
return new PtNDArray(
ndArray1.getManager(),
PyTorchLibrary.LIB.torchLogicalXor(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static PtNDArray logicalNot(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchLogicalNot(ndArray.getHandle()));
}
public static PtNDArray matmul(PtNDArray ndArray1, PtNDArray ndArray2) {
return new PtNDArray(
ndArray1.getManager(),
PyTorchLibrary.LIB.torchMatmul(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static PtNDArray bmm(PtNDArray ndArray1, PtNDArray ndArray2) {
return new PtNDArray(
ndArray1.getManager(),
PyTorchLibrary.LIB.torchBmm(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static PtNDArray xlogy(PtNDArray ndArray1, PtNDArray ndArray2) {
return new PtNDArray(
ndArray1.getManager(),
PyTorchLibrary.LIB.torchXLogY(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static PtNDArray dot(PtNDArray ndArray1, PtNDArray ndArray2) {
if (ndArray1.getShape().dimension() == 1) {
return new PtNDArray(
ndArray1.getManager(),
PyTorchLibrary.LIB.torchDot(ndArray1.getHandle(), ndArray2.getHandle()));
}
return new PtNDArray(
ndArray1.getManager(),
PyTorchLibrary.LIB.torchMatmul(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static PtNDArray max(PtNDArray ndArray1, PtNDArray ndArray2) {
return new PtNDArray(
ndArray1.getManager(),
PyTorchLibrary.LIB.torchMaximum(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static PtNDArray max(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchMax(ndArray.getHandle()));
}
public static PtNDArray max(PtNDArray ndArray, long dim, boolean keepDim) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchMax(ndArray.getHandle(), dim, keepDim));
}
public static PtNDArray min(PtNDArray ndArray1, PtNDArray ndArray2) {
return new PtNDArray(
ndArray1.getManager(),
PyTorchLibrary.LIB.torchMinimum(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static PtNDArray min(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchMin(ndArray.getHandle()));
}
public static PtNDArray min(PtNDArray ndArray, long dim, boolean keepDim) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchMin(ndArray.getHandle(), dim, keepDim));
}
public static NDList median(PtNDArray ndArray, long dim, boolean keepDim) {
long[] handles = PyTorchLibrary.LIB.torchMedian(ndArray.getHandle(), dim, keepDim);
return new NDList(
new PtNDArray(ndArray.getManager(), handles[0]),
new PtNDArray(ndArray.getManager(), handles[1]));
}
public static PtNDArray mean(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchMean(ndArray.getHandle()));
}
public static PtNDArray mean(PtNDArray ndArray, long dim, boolean keepDim) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchMean(ndArray.getHandle(), dim, keepDim));
}
public static PtNDArray rot90(PtNDArray ndArray, int times, int[] axes) {
long[] longaxes = Arrays.stream(axes).mapToLong(i -> i).toArray();
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchRot90(ndArray.getHandle(), times, longaxes));
}
public static PtNDArray sum(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchSum(ndArray.getHandle()));
}
public static PtNDArray sum(PtNDArray ndArray, long[] dims, boolean keepDim) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchSum(ndArray.getHandle(), dims, keepDim));
}
public static PtNDArray cumProd(PtNDArray ndArray, long dim, DataType dataType) {
int dtPosition = -1;
if (dataType != null) {
dtPosition = dataType.ordinal();
}
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchCumProd(ndArray.getHandle(), dim, dtPosition));
}
public static PtNDArray prod(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchProd(ndArray.getHandle()));
}
public static PtNDArray prod(PtNDArray ndArray, long dim, boolean keepDim) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchProd(ndArray.getHandle(), dim, keepDim));
}
public static PtNDArray cumSum(PtNDArray ndArray, long dim) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchCumSum(ndArray.getHandle(), dim));
}
public static PtNDArray diagonal(PtNDArray ndArray, long offset, long axis1, long axis2) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchDiagonal(ndArray.getHandle(), offset, axis1, axis2));
}
public static PtNDArray oneHot(PtNDArray ndArray, int depth, DataType dataType) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchNNOneHot(
ndArray.toType(DataType.INT64, false).getHandle(), depth))
.toType(dataType, false);
}
public static NDList split(PtNDArray ndArray, long size, long axis) {
long[] ndPtrs = PyTorchLibrary.LIB.torchSplit(ndArray.getHandle(), size, axis);
NDList list = new NDList();
for (long ptr : ndPtrs) {
list.add(new PtNDArray(ndArray.getManager(), ptr));
}
return list;
}
public static NDList split(PtNDArray ndArray, long[] indices, long axis) {
long[] ndPtrs = PyTorchLibrary.LIB.torchSplit(ndArray.getHandle(), indices, axis);
NDList list = new NDList();
for (long ptr : ndPtrs) {
list.add(new PtNDArray(ndArray.getManager(), ptr));
}
return list;
}
public static PtNDArray squeeze(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchSqueeze(ndArray.getHandle()));
}
public static PtNDArray squeeze(PtNDArray ndArray, long dim) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchSqueeze(ndArray.getHandle(), dim));
}
public static PtNDArray unsqueeze(PtNDArray ndArray, long dim) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchUnsqueeze(ndArray.getHandle(), dim));
}
public static NDList unique(
PtNDArray ndArray,
Integer dim,
boolean sorted,
boolean returnInverse,
boolean returnCounts) {
long[] handles;
if (dim == null) {
// In this case the output will be flattened.
handles =
PyTorchLibrary.LIB.torchUnique(
ndArray.getHandle(), -1, sorted, returnInverse, returnCounts);
} else {
// Dimension wrap
dim = Math.floorMod(dim, ndArray.getShape().dimension());
handles =
PyTorchLibrary.LIB.torchUnique(
ndArray.getHandle(), dim, sorted, returnInverse, returnCounts);
}
NDList list = new NDList(handles.length);
for (long handle : handles) {
PtNDArray array = new PtNDArray(ndArray.getManager(), handle);
list.add(array);
}
return list;
}
public static PtNDArray flatten(PtNDArray ndArray, long startDim, long endDim) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchFlatten(ndArray.getHandle(), startDim, endDim));
}
public static PtNDArray fft(PtNDArray ndArray, long length, long axis) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchFft(ndArray.getHandle(), length, axis));
}
public static PtNDArray ifft(PtNDArray ndArray, long length, long axis) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchIfft(ndArray.getHandle(), length, axis));
}
public static PtNDArray rfft(PtNDArray ndArray, long length, long axis) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchRfft(ndArray.getHandle(), length, axis));
}
public static PtNDArray irfft(PtNDArray ndArray, long length, long axis) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchIrfft(ndArray.getHandle(), length, axis));
}
public static PtNDArray stft(
PtNDArray ndArray,
long nFft,
long hopLength,
PtNDArray window,
boolean center,
boolean normalize,
boolean returnComplex) {
long handle =
PyTorchLibrary.LIB.torchStft(
ndArray.getHandle(),
nFft,
hopLength,
window.getHandle(),
center,
normalize,
returnComplex);
if (handle == -1) {
throw new UnsupportedOperationException("real() is not supported.");
}
return new PtNDArray(ndArray.getManager(), handle);
}
public static PtNDArray fft2(PtNDArray ndArray, long[] sizes, long[] axes) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchFft2(ndArray.getHandle(), sizes, axes));
}
public static PtNDArray ifft2(PtNDArray ndArray, long[] sizes, long[] axes) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchIfft2(ndArray.getHandle(), sizes, axes));
}
public static PtNDArray real(PtNDArray ndArray) {
long handle = PyTorchLibrary.LIB.torchViewAsReal(ndArray.getHandle());
if (handle == -1) {
throw new UnsupportedOperationException("real() is not supported.");
}
return new PtNDArray(ndArray.getManager(), handle);
}
public static PtNDArray complex(PtNDArray ndArray) {
long handle = PyTorchLibrary.LIB.torchViewAsComplex(ndArray.getHandle());
if (handle == -1) {
throw new UnsupportedOperationException("complex() is not supported.");
}
return new PtNDArray(ndArray.getManager(), handle);
}
public static PtNDArray conj(PtNDArray ndArray) {
return new PtNDArray(ndArray.getManager(), PyTorchLibrary.LIB.conj(ndArray.getHandle()));
}
public static PtNDArray abs(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchAbs(ndArray.getHandle()));
}
public static PtNDArray square(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchSquare(ndArray.getHandle()));
}
public static PtNDArray floor(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchFloor(ndArray.getHandle()));
}
public static PtNDArray ceil(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchCeil(ndArray.getHandle()));
}
public static PtNDArray round(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchRound(ndArray.getHandle()));
}
public static PtNDArray trunc(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchTrunc(ndArray.getHandle()));
}
public static PtNDArray clip(PtNDArray ndArray, Number min, Number max) {
PtNDArray minNd = (PtNDArray) ndArray.getManager().create(min);
PtNDArray maxNd = (PtNDArray) ndArray.getManager().create(max);
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchClamp(
ndArray.getHandle(), minNd.getHandle(), maxNd.getHandle()));
}
public static PtNDArray exp(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchExp(ndArray.getHandle()));
}
public static PtNDArray gammaln(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchLgamma(ndArray.getHandle()));
}
public static PtNDArray log(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchLog(ndArray.getHandle()));
}
public static PtNDArray log10(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchLog10(ndArray.getHandle()));
}
public static PtNDArray log2(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchLog2(ndArray.getHandle()));
}
public static PtNDArray sin(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchSin(ndArray.getHandle()));
}
public static PtNDArray cos(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchCos(ndArray.getHandle()));
}
public static PtNDArray tan(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchTan(ndArray.getHandle()));
}
public static PtNDArray asin(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchASin(ndArray.getHandle()));
}
public static PtNDArray acos(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchAcos(ndArray.getHandle()));
}
public static PtNDArray atan(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchAtan(ndArray.getHandle()));
}
public static PtNDArray atan2(PtNDArray self, PtNDArray other) {
return new PtNDArray(
self.getManager(),
PyTorchLibrary.LIB.torchAtan2(self.getHandle(), other.getHandle()));
}
public static PtNDArray sqrt(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchSqrt(ndArray.getHandle()));
}
public static PtNDArray sinh(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchSinh(ndArray.getHandle()));
}
public static PtNDArray cosh(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchCosh(ndArray.getHandle()));
}
public static PtNDArray tanh(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchTanh(ndArray.getHandle()));
}
public static PtNDArray sigmoid(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchSigmoid(ndArray.getHandle()));
}
public static PtNDArray all(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchAll(ndArray.getHandle()));
}
public static PtNDArray any(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchAny(ndArray.getHandle()));
}
public static PtNDArray none(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchNone(ndArray.getHandle()));
}
public static PtNDArray eq(PtNDArray self, PtNDArray other) {
return new PtNDArray(
self.getManager(), PyTorchLibrary.LIB.torchEq(self.getHandle(), other.getHandle()));
}
public static PtNDArray neq(PtNDArray self, PtNDArray other) {
return new PtNDArray(
self.getManager(),
PyTorchLibrary.LIB.torchNeq(self.getHandle(), other.getHandle()));
}
public static PtNDArray gt(PtNDArray self, PtNDArray other) {
return new PtNDArray(
self.getManager(), PyTorchLibrary.LIB.torchGt(self.getHandle(), other.getHandle()));
}
public static PtNDArray gte(PtNDArray self, PtNDArray other) {
return new PtNDArray(
self.getManager(),
PyTorchLibrary.LIB.torchGte(self.getHandle(), other.getHandle()));
}
public static PtNDArray lt(PtNDArray self, PtNDArray other) {
return new PtNDArray(
self.getManager(), PyTorchLibrary.LIB.torchLt(self.getHandle(), other.getHandle()));
}
public static PtNDArray lte(PtNDArray self, PtNDArray other) {
return new PtNDArray(
self.getManager(),
PyTorchLibrary.LIB.torchLte(self.getHandle(), other.getHandle()));
}
public static PtNDArray neg(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchNeg(ndArray.getHandle()));
}
public static void negi(PtNDArray ndArray) {
PyTorchLibrary.LIB.torchNegi(ndArray.getHandle());
}
public static PtNDArray isNaN(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchIsNaN(ndArray.getHandle()));
}
public static PtNDArray isInf(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchIsInf(ndArray.getHandle()));
}
public static PtNDArray randint(
PtNDManager manager,
long low,
long high,
Shape size,
DataType dataType,
Device device) {
return new PtNDArray(
manager,
PyTorchLibrary.LIB.torchRandint(
low,
high,
size.getShape(),
dataType.ordinal(),
layoutMapper(SparseFormat.DENSE, device),
new int[] {PtDeviceType.toDeviceType(device), device.getDeviceId()},
false));
}
public static PtNDArray randperm(
PtNDManager manager, long n, DataType dataType, Device device) {
return new PtNDArray(
manager,
PyTorchLibrary.LIB.torchRandPerm(
n,
dataType.ordinal(),
layoutMapper(SparseFormat.DENSE, device),
new int[] {PtDeviceType.toDeviceType(device), device.getDeviceId()},
false));
}
public static PtNDArray normal(
PtNDManager manager,
double mean,
double std,
Shape size,
DataType dataType,
Device device) {
return new PtNDArray(
manager,
PyTorchLibrary.LIB.torchNormal(
mean,
std,
size.getShape(),
dataType.ordinal(),
layoutMapper(SparseFormat.DENSE, device),
new int[] {PtDeviceType.toDeviceType(device), device.getDeviceId()},
false));
}
public static PtNDArray uniform(
PtNDManager manager,
double low,
double high,
Shape size,
DataType dataType,
Device device) {
return new PtNDArray(
manager,
PyTorchLibrary.LIB.tensorUniform(
low,
high,
size.getShape(),
dataType.ordinal(),
layoutMapper(SparseFormat.DENSE, device),
new int[] {PtDeviceType.toDeviceType(device), device.getDeviceId()},
false));
}
public static PtNDArray eye(
PtNDManager manager, int n, int m, DataType dataType, Device device, SparseFormat fmt) {
return new PtNDArray(
manager,
PyTorchLibrary.LIB.torchEye(
n,
m,
dataType.ordinal(),
layoutMapper(fmt, device),
new int[] {PtDeviceType.toDeviceType(device), device.getDeviceId()},
false));
}
public static PtNDArray hannWindow(
PtNDManager manager, long numPoints, boolean periodic, Device device) {
return new PtNDArray(
manager,
PyTorchLibrary.LIB.torchHannWindow(
numPoints,
periodic,
new int[] {PtDeviceType.toDeviceType(device), device.getDeviceId()}));
}
public static PtNDArray erfinv(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchErfinv(ndArray.getHandle()));
}
public static PtNDArray erf(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchErf(ndArray.getHandle()));
}
public static PtNDArray inverse(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchInverse(ndArray.getHandle()));
}
public static PtNDArray interpolate(
PtNDArray ndArray, long[] size, int mode, boolean alignCorners) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchNNInterpolate(
ndArray.getHandle(), size, mode, alignCorners));
}
public static PtNDArray linear(PtNDArray input, PtNDArray weight, PtNDArray bias) {
return new PtNDArray(
input.getManager(),
PyTorchLibrary.LIB.torchNNLinear(
input.getHandle(),
weight.getHandle(),
bias == null ? NULL_PTR : bias.getHandle()));
}
public static PtNDArray embedding(PtNDArray input, PtNDArray weight, boolean sparse) {
return new PtNDArray(
input.getManager(),
PyTorchLibrary.LIB.torchNNEmbedding(input.getHandle(), weight.getHandle(), sparse));
}
public static PtNDArray relu(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchNNRelu(ndArray.getHandle()));
}
public static PtNDArray softPlus(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchNNSoftPlus(ndArray.getHandle()));
}
public static PtNDArray softSign(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchNNSoftSign(ndArray.getHandle()));
}
public static PtNDArray leakyRelu(PtNDArray ndArray, double negativeSlope) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchNNLeakyRelu(ndArray.getHandle(), negativeSlope));
}
public static PtNDArray elu(PtNDArray ndArray, double alpha) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchNNElu(ndArray.getHandle(), alpha));
}
public static PtNDArray selu(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchNNSelu(ndArray.getHandle()));
}
public static PtNDArray gelu(PtNDArray ndArray) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchNNGelu(ndArray.getHandle()));
}
public static PtNDArray convolution(
PtNDArray ndArray,
PtNDArray weight,
PtNDArray bias,
Shape stride,
Shape padding,
Shape dilation,
int groups) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchNNConvNd(
ndArray.getHandle(),
weight.getHandle(),
(bias != null) ? bias.getHandle() : NULL_PTR,
stride.getShape(),
padding.getShape(),
dilation.getShape(),
groups));
}
public static PtNDArray batchNorm(
PtNDArray ndArray,
PtNDArray gamma,
PtNDArray beta,
PtNDArray runningMean,
PtNDArray runningVar,
boolean isTraining,
double momentum,
double eps) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchNNBatchNorm(
ndArray.getHandle(),
gamma.getHandle(),
beta.getHandle(),
runningMean.getHandle(),
runningVar.getHandle(),
isTraining,
momentum,
eps));
}
public static PtNDArray layerNorm(
PtNDArray ndArray, Shape normalizedShape, PtNDArray gamma, PtNDArray beta, double eps) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchNNLayerNorm(
ndArray.getHandle(),
normalizedShape.getShape(),
gamma.getHandle(),
beta.getHandle(),
eps));
}
public static PtNDArray normalize(PtNDArray ndArray, double p, long dim, double eps) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchNNNormalize(ndArray.getHandle(), p, dim, eps));
}
public static PtNDArray dropout(PtNDArray ndArray, double prob, boolean training) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchNNDropout(ndArray.getHandle(), prob, training));
}
public static NDList rnn(
PtNDArray input,
PtNDArray hx,
NDList params,
boolean hasBiases,
int numLayers,
RNN.Activation activation,
double dropRate,
boolean training,
boolean bidirectional,
boolean batchFirst) {
PtNDManager manager = input.getManager();
long[] paramHandles =
params.stream().mapToLong(array -> ((PtNDArray) array).getHandle()).toArray();
long[] outputs =
PyTorchLibrary.LIB.torchNNRnn(
input.getHandle(),
hx.getHandle(),
paramHandles,
hasBiases,
numLayers,
activation.ordinal(),
dropRate,
training,
bidirectional,
batchFirst);
NDList res = new NDList();
for (long output : outputs) {
res.add(new PtNDArray(manager, output));
}
return res;
}
public static NDList gru(
PtNDArray input,
PtNDArray hx,
NDList params,
boolean hasBiases,
int numLayers,
double dropRate,
boolean training,
boolean bidirectional,
boolean batchFirst) {
PtNDManager manager = input.getManager();
long[] paramHandles =
params.stream().mapToLong(array -> ((PtNDArray) array).getHandle()).toArray();
long[] outputs =
PyTorchLibrary.LIB.torchNNGru(
input.getHandle(),
hx.getHandle(),
paramHandles,
hasBiases,
numLayers,
dropRate,
training,
bidirectional,
batchFirst);
NDList res = new NDList();
for (long output : outputs) {
res.add(new PtNDArray(manager, output));
}
return res;
}
public static NDList lstm(
PtNDArray input,
NDList hx,
NDList params,
boolean hasBiases,
int numLayers,
double dropRate,
boolean training,
boolean bidirectional,
boolean batchFirst) {
PtNDManager manager = input.getManager();
long[] hxHandles =
hx.stream().mapToLong(array -> ((PtNDArray) array).getHandle()).toArray();
long[] paramHandles =
params.stream().mapToLong(array -> ((PtNDArray) array).getHandle()).toArray();
long[] outputs =
PyTorchLibrary.LIB.torchNNLstm(
input.getHandle(),
hxHandles,
paramHandles,
hasBiases,
numLayers,
dropRate,
training,
bidirectional,
batchFirst);
NDList res = new NDList();
for (long output : outputs) {
res.add(new PtNDArray(manager, output));
}
return res;
}
public static PtNDArray avgPool(
PtNDArray ndArray,
Shape kernelSize,
Shape stride,
Shape padding,
boolean ceilMode,
boolean countIncludePad) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchNNAvgPool(
ndArray.getHandle(),
kernelSize.getShape(),
stride.getShape(),
padding.getShape(),
ceilMode,
countIncludePad));
}
public static PtNDArray maxPool(
PtNDArray ndArray, Shape kernelSize, Shape stride, Shape padding, boolean ceilMode) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchNNMaxPool(
ndArray.getHandle(),
kernelSize.getShape(),
stride.getShape(),
padding.getShape(),
ceilMode));
}
public static PtNDArray adaptiveMaxPool(PtNDArray ndArray, Shape outputSize) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchNNAdaptiveMaxPool(
ndArray.getHandle(), outputSize.getShape()));
}
public static PtNDArray adaptiveAvgPool(PtNDArray ndArray, Shape outputSize) {
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchNNAdaptiveAvgPool(
ndArray.getHandle(), outputSize.getShape()));
}
public static PtNDArray lpPool(
PtNDArray ndArray, double normType, Shape kernelSize, Shape stride, boolean ceilMode) {
if (ndArray.getShape().dimension() - 2 == 3) {
throw new UnsupportedOperationException("3D lpPool is not supported in PyTorch engine");
}
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchNNLpPool(
ndArray.getHandle(),
normType,
kernelSize.getShape(),
stride.getShape(),
ceilMode));
}
public static DataType getDataType(PtNDArray ndArray) {
int dataType = PyTorchLibrary.LIB.torchDType(ndArray.getHandle());
return DataType.values()[dataType];
}
public static Device getDevice(PtNDArray ndArray) {
int[] device = PyTorchLibrary.LIB.torchDevice(ndArray.getHandle());
String deviceType = PtDeviceType.fromDeviceType(device[0]);
return Device.of(deviceType, device[1]);
}
public static SparseFormat getSparseFormat(PtNDArray ndArray) {
int layout = PyTorchLibrary.LIB.torchLayout(ndArray.getHandle());
if (layout == 0) {
return SparseFormat.DENSE;
} else if (layout == 1) {
return SparseFormat.COO;
} else if (layout == 2) {
logger.debug("MKLDNN layout is used!");
return SparseFormat.DENSE;
}
throw new UnsupportedOperationException("Unsupported data format");
}
public static Shape getShape(PtNDArray ndArray) {
return new Shape(PyTorchLibrary.LIB.torchSizes(ndArray.getHandle()));
}
public static ByteBuffer getByteBuffer(PtNDArray ndArray, boolean tryDirect) {
// Operation is CPU only
if (!ndArray.getDevice().equals(Device.cpu())) {
ndArray = ndArray.toDevice(Device.cpu(), false);
}
if (tryDirect) {
if (ndArray.isSparse()
|| getLayout(ndArray) == 2
|| !PyTorchLibrary.LIB.torchIsContiguous(ndArray.getHandle())) {
// keep the same lifecycle as origin NDArray
ndArray =
new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchToContiguous(ndArray.getHandle()));
}
return PyTorchLibrary.LIB
.torchDirectByteBuffer(ndArray.getHandle())
.order(ByteOrder.nativeOrder());
}
return ByteBuffer.wrap(PyTorchLibrary.LIB.torchDataPtr(ndArray.getHandle()))
.order(ByteOrder.nativeOrder());
}
public static void deleteNDArray(long handle) {
PyTorchLibrary.LIB.torchDeleteTensor(handle);
}
public static boolean requiresGrad(PtNDArray ndArray) {
return PyTorchLibrary.LIB.torchRequiresGrad(ndArray.getHandle());
}
public static String getGradientFunctionNames(PtNDArray ndArray) {
return PyTorchLibrary.LIB.torchGradFnName(ndArray.getHandle());
}
public static void attachGradient(PtNDArray ndArray, boolean requiresGrad) {
PyTorchLibrary.LIB.torchAttachGrad(ndArray.getHandle(), requiresGrad);
}
public static PtNDArray detachGradient(PtNDArray ndArray) {
// TODO: detached ndarray may not use the same manager for the attached one
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchDetachGrad(ndArray.getHandle()));
}
public static PtNDArray getGradient(PtNDArray ndArray) {
long pointer = PyTorchLibrary.LIB.torchGrad(ndArray.getHandle());
if (pointer == NULL_PTR) {
return null;
}
return new PtNDArray(ndArray.getManager(), pointer);
}
public static void backward(
PtNDArray ndArray, PtNDArray gradNd, boolean keepGraph, boolean createGraph) {
PyTorchLibrary.LIB.torchBackward(
ndArray.getHandle(), gradNd.getHandle(), keepGraph, createGraph);
}
public static void deleteModule(long pointer) {
PyTorchLibrary.LIB.torchDeleteModule(pointer);
}
public static void setGraphExecutorOptimize(boolean enabled) {
PyTorchLibrary.LIB.setGraphExecutorOptimize(enabled);
}
public static PtSymbolBlock loadModule(
PtNDManager manager,
Path path,
boolean mapLocation,
String[] extraFileKeys,
String[] extraFileValues,
boolean trainParam) {
Device device = manager.getDevice();
// MPS doesn't support mapLocation
if ("mps".equals(device.getDeviceType())) {
mapLocation = false;
}
logger.debug("mapLocation: {}", mapLocation);
logger.debug("extraFileKeys: {}", Arrays.toString(extraFileKeys));
long handle =
PyTorchLibrary.LIB.moduleLoad(
path.toString(),
new int[] {PtDeviceType.toDeviceType(device), device.getDeviceId()},
mapLocation,
extraFileKeys,
extraFileValues,
trainParam);
return new PtSymbolBlock(manager, handle);
}
public static PtSymbolBlock loadModule(
PtNDManager manager, InputStream is, boolean mapLocation, boolean hasSize)
throws IOException {
long handle = loadModuleHandle(is, manager.getDevice(), mapLocation, hasSize);
return new PtSymbolBlock(manager, handle);
}
public static long loadModuleHandle(
InputStream is, Device device, boolean mapLocation, boolean hasSize)
throws IOException {
byte[] buf = new byte[BYTE_LENGTH];
long size = -1;
if (hasSize) {
size = new DataInputStream(is).readLong();
}
// MPS doesn't support mapLocation
if ("mps".equals(device.getDeviceType())) {
mapLocation = false;
}
logger.debug("mapLocation: {}", mapLocation);
return PyTorchLibrary.LIB.moduleLoad(
is,
new int[] {PtDeviceType.toDeviceType(device), device.getDeviceId()},
mapLocation,
buf,
size);
}
public static void writeModule(PtSymbolBlock block, OutputStream os, boolean writeSize) {
byte[] buf = new byte[BYTE_LENGTH];
PyTorchLibrary.LIB.moduleWrite(block.getHandle(), os, buf, writeSize);
}
public static NDList moduleGetParams(PtSymbolBlock block, PtNDManager manager) {
long[] handles = PyTorchLibrary.LIB.moduleGetParams(block.getHandle());
String[] names = PyTorchLibrary.LIB.moduleGetParamNames(block.getHandle());
NDList list = new NDList(handles.length);
for (int i = 0; i < handles.length; i++) {
PtNDArray array = new PtNDArray(manager, handles[i]);
array.setName(names[i]);
list.add(array);
}
return list;
}
public static String[] getMethodNames(PtSymbolBlock block) {
return PyTorchLibrary.LIB.moduleGetMethodNames(block.getHandle());
}
public static void enableInferenceMode(PtSymbolBlock block) {
PyTorchLibrary.LIB.moduleEval(block.getHandle());
}
public static void enableTrainingMode(PtSymbolBlock block) {
PyTorchLibrary.LIB.moduleTrain(block.getHandle());
}
public static void zeroGrad(PtNDArray weight) {
PyTorchLibrary.LIB.zeroGrad(weight.getHandle());
}
public static void adamUpdate(
PtNDArray weight,
PtNDArray grad,
PtNDArray mean,
PtNDArray variance,
float lr,
float learningRateBiasCorrection,
float wd,
float rescaleGrad,
float clipGrad,
float beta1,
float beta2,
float eps,
boolean adamw) {
PyTorchLibrary.LIB.adamUpdate(
weight.getHandle(),
grad.getHandle(),
mean.getHandle(),
variance.getHandle(),
lr,
learningRateBiasCorrection,
wd,
rescaleGrad,
clipGrad,
beta1,
beta2,
eps,
adamw);
}
public static void sgdUpdate(
PtNDArray weight,
PtNDArray grad,
PtNDArray state,
float lr,
float wd,
float rescaleGrad,
float clipGrad,
float momentum) {
PyTorchLibrary.LIB.sgdUpdate(
weight.getHandle(),
grad.getHandle(),
(state == null) ? NULL_PTR : state.getHandle(),
lr,
wd,
rescaleGrad,
clipGrad,
momentum);
}
// Internal use only
public static int getLayout(PtNDArray array) {
return PyTorchLibrary.LIB.torchLayout(array.getHandle());
}
public static PtNDArray norm(PtNDArray ndArray, int ord, int[] axes, boolean keepDims) {
long[] longAxes = Arrays.stream(axes).mapToLong(i -> i).toArray();
return new PtNDArray(
ndArray.getManager(),
PyTorchLibrary.LIB.torchNorm(ndArray.getHandle(), ord, longAxes, keepDims));
}
public static PtNDArray nonZeros(PtNDArray ndArray) {
if (ndArray.isScalar()) {
ndArray = (PtNDArray) ndArray.reshape(-1);
}
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchNonZeros(ndArray.getHandle()));
}
public static PtNDArray diff(PtNDArray ndArray, int n, int dim) {
return new PtNDArray(
ndArray.getManager(), PyTorchLibrary.LIB.torchDiff(ndArray.getHandle(), n, dim));
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine/0.34.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine/0.34.0/ai/djl/pytorch/jni/LibUtils.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.jni;
import ai.djl.engine.EngineException;
import ai.djl.repository.Version;
import ai.djl.util.ClassLoaderUtils;
import ai.djl.util.Platform;
import ai.djl.util.Utils;
import ai.djl.util.cuda.CudaUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLDecoder;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Stream;
import java.util.zip.GZIPInputStream;
/**
* Utilities for finding the PyTorch Engine binary on the System.
*
* <p>The Engine will be searched for in a variety of locations in the following order:
*
* <ol>
* <li>In the path specified by the PYTORCH_LIBRARY_PATH environment variable
* <li>In a jar file location in the classpath. These jars can be created with the pytorch-native
* module.
* </ol>
*/
@SuppressWarnings("MissingJavadocMethod")
public final class LibUtils {
private static final Logger logger = LoggerFactory.getLogger(LibUtils.class);
private static final String NATIVE_LIB_NAME = System.mapLibraryName("torch");
private static final String JNI_LIB_NAME = System.mapLibraryName("djl_torch");
private static final Pattern VERSION_PATTERN =
Pattern.compile("(\\d+\\.\\d+\\.\\d+(-[a-z]+)?)(-SNAPSHOT)?(-\\d+)?");
private static final Pattern LIB_PATTERN = Pattern.compile("(.*\\.(so(\\.\\d+)*|dll|dylib))");
private static LibTorch libTorch;
private LibUtils() {}
public static synchronized void loadLibrary() {
// TODO workaround to make it work on Android Studio
// It should search for several places to find the native library
if ("http://www.android.com/".equals(System.getProperty("java.vendor.url"))) {
System.loadLibrary("djl_torch"); // NOPMD
return;
}
libTorch = getLibTorch();
loadLibTorch(libTorch);
Path path = findJniLibrary(libTorch).toAbsolutePath();
loadNativeLibrary(path.toString());
}
private static LibTorch getLibTorch() {
LibTorch lib = findOverrideLibrary();
if (lib != null) {
return lib;
}
return findNativeLibrary();
}
public static String getVersion() {
Matcher m = VERSION_PATTERN.matcher(libTorch.version);
if (m.matches()) {
return m.group(1);
}
return libTorch.version;
}
public static String getLibtorchPath() {
return libTorch.dir.toString();
}
private static void loadLibTorch(LibTorch libTorch) {
Path libDir = libTorch.dir.toAbsolutePath();
if (Files.exists(libDir.resolve("libstdc++.so.6"))) {
String libstd = Utils.getEnvOrSystemProperty("LIBSTDCXX_LIBRARY_PATH");
if (libstd != null) {
try {
logger.info("Loading libstdc++.so.6 from: {}", libstd);
System.load(libstd);
} catch (UnsatisfiedLinkError e) {
logger.warn("Failed Loading libstdc++.so.6 from: {}", libstd);
}
}
}
String libExclusion = Utils.getEnvOrSystemProperty("PYTORCH_LIBRARY_EXCLUSION", "");
Set<String> exclusion = new HashSet<>(Arrays.asList(libExclusion.split(",")));
boolean isCuda = libTorch.flavor.contains("cu");
List<String> deferred =
Arrays.asList(
System.mapLibraryName("fbgemm"),
System.mapLibraryName("caffe2_nvrtc"),
System.mapLibraryName("torch_cpu"),
System.mapLibraryName("c10_cuda"),
System.mapLibraryName("torch_cuda_cpp"),
System.mapLibraryName("torch_cuda_cu"),
System.mapLibraryName("torch_cuda"),
System.mapLibraryName("nvfuser_codegen"),
System.mapLibraryName("torch"));
Set<String> loadLater = new HashSet<>(deferred);
try (Stream<Path> paths = Files.walk(libDir)) {
Map<Path, Integer> rank = new ConcurrentHashMap<>();
paths.filter(
path -> {
String name = path.getFileName().toString();
if (!LIB_PATTERN.matcher(name).matches()
|| exclusion.contains(name)) {
return false;
} else if (!isCuda
&& name.contains("nvrtc")
&& name.contains("cudart")
&& name.contains("nvTools")) {
return false;
} else if (name.startsWith("libarm_compute-")
|| name.startsWith("libopenblasp")) {
rank.put(path, 2);
return true;
} else if (name.startsWith("libarm_compute_")) {
rank.put(path, 3);
return true;
} else if (!loadLater.contains(name)
&& Files.isRegularFile(path)
&& !name.endsWith(JNI_LIB_NAME)
&& !name.contains("torch_")
&& !name.contains("caffe2_")
&& !name.startsWith("cudnn")) {
rank.put(path, 1);
return true;
}
return false;
})
.sorted(Comparator.comparingInt(rank::get))
.map(Path::toString)
.forEach(LibUtils::loadNativeLibrary);
if (Files.exists((libDir.resolve("cudnn64_8.dll")))) {
loadNativeLibrary(libDir.resolve("cudnn64_8.dll").toString());
loadNativeLibrary(libDir.resolve("cudnn_ops_infer64_8.dll").toString());
loadNativeLibrary(libDir.resolve("cudnn_ops_train64_8.dll").toString());
loadNativeLibrary(libDir.resolve("cudnn_cnn_infer64_8.dll").toString());
loadNativeLibrary(libDir.resolve("cudnn_cnn_train64_8.dll").toString());
loadNativeLibrary(libDir.resolve("cudnn_adv_infer64_8.dll").toString());
loadNativeLibrary(libDir.resolve("cudnn_adv_train64_8.dll").toString());
} else if (Files.exists((libDir.resolve("cudnn64_7.dll")))) {
loadNativeLibrary(libDir.resolve("cudnn64_7.dll").toString());
}
if (!isCuda) {
deferred =
Arrays.asList(
System.mapLibraryName("fbgemm"),
System.mapLibraryName("torch_cpu"),
System.mapLibraryName("torch"));
}
for (String dep : deferred) {
Path path = libDir.resolve(dep);
if (Files.exists(path)) {
loadNativeLibrary(path.toString());
}
}
} catch (IOException e) {
throw new EngineException("Folder not exist! " + libDir, e);
}
}
private static LibTorch findOverrideLibrary() {
String libPath = Utils.getEnvOrSystemProperty("PYTORCH_LIBRARY_PATH");
if (libPath != null) {
return findLibraryInPath(libPath);
}
return null;
}
private static LibTorch findLibraryInPath(String libPath) {
String[] paths = libPath.split(File.pathSeparator);
for (String path : paths) {
File p = new File(path);
if (!p.exists()) {
continue;
}
if (p.isFile() && NATIVE_LIB_NAME.equals(p.getName())) {
return new LibTorch(p.getParentFile().toPath().toAbsolutePath());
}
File file = new File(path, NATIVE_LIB_NAME);
if (file.exists() && file.isFile()) {
return new LibTorch(p.toPath().toAbsolutePath());
}
}
return null;
}
private static Path findJniLibrary(LibTorch libTorch) {
String classifier = libTorch.classifier;
String version = libTorch.version;
String djlVersion = libTorch.apiVersion;
String flavor = libTorch.flavor;
// Looking for JNI in libTorch.dir first
Path libDir = libTorch.dir.toAbsolutePath();
Path path = libDir.resolve(djlVersion + '-' + JNI_LIB_NAME);
if (Files.exists(path)) {
return path;
}
path = libDir.resolve(JNI_LIB_NAME);
if (Files.exists(path)) {
return path;
}
// always use cache dir, cache dir might be different from libTorch.dir
Path cacheDir = Utils.getEngineCacheDir("pytorch");
Path dir = cacheDir.resolve(version + '-' + flavor + '-' + classifier);
path = dir.resolve(djlVersion + '-' + JNI_LIB_NAME);
if (Files.exists(path)) {
return path;
}
Matcher matcher = VERSION_PATTERN.matcher(version);
if (!matcher.matches()) {
throw new EngineException("Unexpected version: " + version);
}
version = matcher.group(1);
try {
URL url = ClassLoaderUtils.getResource("jnilib/pytorch.properties");
String jniVersion = null;
if (url != null) {
Properties prop = new Properties();
try (InputStream is = Utils.openUrl(url)) {
prop.load(is);
}
jniVersion = prop.getProperty("jni_version");
if (jniVersion == null) {
throw new AssertionError("No PyTorch jni version found.");
}
}
if (jniVersion == null) {
downloadJniLib(dir, path, djlVersion, version, classifier, flavor);
return path;
} else if (!jniVersion.startsWith(version + '-' + djlVersion)) {
logger.warn("Found mismatch PyTorch jni: {}", jniVersion);
downloadJniLib(dir, path, djlVersion, version, classifier, flavor);
return path;
}
} catch (IOException e) {
throw new AssertionError("Failed to read PyTorch jni properties file.", e);
}
Path tmp = null;
String libPath = "jnilib/" + classifier + '/' + flavor + '/' + JNI_LIB_NAME;
logger.info("Extracting {} to cache ...", libPath);
try (InputStream is = ClassLoaderUtils.getResourceAsStream(libPath)) {
Files.createDirectories(dir);
tmp = Files.createTempFile(dir, "jni", "tmp");
Files.copy(is, tmp, StandardCopyOption.REPLACE_EXISTING);
Utils.moveQuietly(tmp, path);
return path;
} catch (IOException e) {
throw new EngineException("Cannot copy jni files", e);
} finally {
if (tmp != null) {
Utils.deleteQuietly(tmp);
}
}
}
private static LibTorch findNativeLibrary() {
Platform platform = Platform.detectPlatform("pytorch");
String overrideVersion = Utils.getEnvOrSystemProperty("PYTORCH_VERSION");
if (overrideVersion != null
&& !overrideVersion.isEmpty()
&& !platform.getVersion().startsWith(overrideVersion)) {
// platform.version can be 1.8.1-20210421
logger.warn("Override PyTorch version: {}.", overrideVersion);
platform = Platform.detectPlatform("pytorch", overrideVersion);
return downloadPyTorch(platform);
}
if (platform.isPlaceholder()) {
return downloadPyTorch(platform);
}
return copyNativeLibraryFromClasspath(platform);
}
private static LibTorch copyNativeLibraryFromClasspath(Platform platform) {
logger.debug("Found bundled PyTorch package: {}.", platform);
String version = platform.getVersion();
String flavor = platform.getFlavor();
if (!flavor.endsWith("-precxx11")
&& Arrays.asList(platform.getLibraries()).contains("libstdc++.so.6")) {
// for PyTorch 1.9.1 and older
flavor += "-precxx11"; // NOPMD
}
String classifier = platform.getClassifier();
Path tmp = null;
try {
Path cacheDir = Utils.getEngineCacheDir("pytorch");
logger.debug("Using cache dir: {}", cacheDir);
Path dir = cacheDir.resolve(version + '-' + flavor + '-' + classifier);
Path path = dir.resolve(NATIVE_LIB_NAME);
if (Files.exists(path)) {
return new LibTorch(dir.toAbsolutePath(), platform, flavor);
}
Utils.deleteQuietly(dir);
Matcher m = VERSION_PATTERN.matcher(version);
if (!m.matches()) {
throw new AssertionError("Unexpected version: " + version);
}
String pathPrefix = "pytorch/" + flavor + '/' + classifier;
Files.createDirectories(cacheDir);
tmp = Files.createTempDirectory(cacheDir, "tmp");
for (String file : platform.getLibraries()) {
String libPath = pathPrefix + '/' + file;
logger.info("Extracting {} to cache ...", libPath);
try (InputStream is = ClassLoaderUtils.getResourceAsStream(libPath)) {
Files.copy(is, tmp.resolve(file), StandardCopyOption.REPLACE_EXISTING);
}
}
Utils.moveQuietly(tmp, dir);
return new LibTorch(dir.toAbsolutePath(), platform, flavor);
} catch (IOException e) {
throw new EngineException("Failed to extract PyTorch native library", e);
} finally {
if (tmp != null) {
Utils.deleteQuietly(tmp);
}
}
}
private static void loadNativeLibrary(String path) {
logger.debug("Loading native library: {}", path);
String nativeHelper = System.getProperty("ai.djl.pytorch.native_helper");
if (nativeHelper != null && !nativeHelper.isEmpty()) {
ClassLoaderUtils.nativeLoad(nativeHelper, path);
} else {
System.load(path); // NOPMD
}
}
private static LibTorch downloadPyTorch(Platform platform) {
String version = platform.getVersion();
String classifier = platform.getClassifier();
String precxx11;
String flavor = Utils.getEnvOrSystemProperty("PYTORCH_FLAVOR");
boolean override;
if (flavor == null || flavor.isEmpty()) {
flavor = platform.getFlavor();
if (System.getProperty("os.name").startsWith("Linux")
&& (Boolean.parseBoolean(Utils.getEnvOrSystemProperty("PYTORCH_PRECXX11"))
|| ("aarch64".equals(platform.getOsArch())
&& new Version(version).compareTo(new Version("2.7.1")) < 0))) {
precxx11 = "-precxx11";
} else {
precxx11 = "";
}
flavor += precxx11;
override = false;
} else {
logger.info("Uses override PYTORCH_FLAVOR: {}", flavor);
precxx11 = flavor.endsWith("-precxx11") ? "-precxx11" : "";
override = true;
}
Path cacheDir = Utils.getEngineCacheDir("pytorch");
Path dir = cacheDir.resolve(version + '-' + flavor + '-' + classifier);
Path path = dir.resolve(NATIVE_LIB_NAME);
if (Files.exists(path)) {
logger.debug("Using cache dir: {}", dir);
return new LibTorch(dir.toAbsolutePath(), platform, flavor);
}
Matcher matcher = VERSION_PATTERN.matcher(version);
if (!matcher.matches()) {
throw new AssertionError("Unexpected version: " + version);
}
String link = "https://publish.djl.ai/pytorch/" + matcher.group(1);
Path tmp = null;
Path indexFile = cacheDir.resolve(version + ".txt");
if (Files.notExists(indexFile)) {
Path tempFile = cacheDir.resolve(version + ".tmp");
try (InputStream is = Utils.openUrl(link + "/files.txt")) {
Files.createDirectories(cacheDir);
Files.copy(is, tempFile, StandardCopyOption.REPLACE_EXISTING);
Utils.moveQuietly(tempFile, indexFile);
} catch (IOException e) {
throw new EngineException("Failed to save pytorch index file", e);
} finally {
Utils.deleteQuietly(tempFile);
}
}
try (InputStream is = Files.newInputStream(indexFile)) {
// if files not found
Files.createDirectories(cacheDir);
List<String> lines = Utils.readLines(is);
if (flavor.startsWith("cu")) {
int cudaVersion = Integer.parseInt(flavor.substring(2, 5));
Pattern pattern =
Pattern.compile(
"cu(\\d\\d\\d)"
+ precxx11
+ '/'
+ classifier
+ "/native/lib/"
+ NATIVE_LIB_NAME
+ ".gz");
List<Integer> cudaVersions = new ArrayList<>();
boolean match = false;
for (String line : lines) {
Matcher m = pattern.matcher(line);
if (m.matches()) {
cudaVersions.add(Integer.parseInt(m.group(1)));
}
}
// find highest matching CUDA version
cudaVersions.sort(Collections.reverseOrder());
for (int cuda : cudaVersions) {
if (override && cuda == cudaVersion) {
match = true;
break;
} else if (cuda <= cudaVersion) {
flavor = "cu" + cuda + precxx11;
match = true;
break;
}
}
if (!match) {
logger.warn("No matching cuda flavor for {} found: {}.", classifier, flavor);
// fallback to CPU
flavor = "cpu" + precxx11;
}
// check again
dir = cacheDir.resolve(version + '-' + flavor + '-' + classifier);
path = dir.resolve(NATIVE_LIB_NAME);
if (Files.exists(path)) {
return new LibTorch(dir.toAbsolutePath(), platform, flavor);
}
}
logger.debug("Using cache dir: {}", dir);
tmp = Files.createTempDirectory(cacheDir, "tmp");
boolean found = false;
for (String line : lines) {
if (line.startsWith(flavor + '/' + classifier + '/')) {
found = true;
URL url = new URL(link + '/' + line);
String fileName = line.substring(line.lastIndexOf('/') + 1, line.length() - 3);
fileName = URLDecoder.decode(fileName, "UTF-8");
logger.info("Downloading {} ...", url);
try (InputStream fis = new GZIPInputStream(Utils.openUrl(url))) {
Files.copy(fis, tmp.resolve(fileName), StandardCopyOption.REPLACE_EXISTING);
}
}
}
if (!found) {
throw new EngineException(
"No PyTorch native library matches your operating system: " + platform);
}
Utils.moveQuietly(tmp, dir);
return new LibTorch(dir.toAbsolutePath(), platform, flavor);
} catch (IOException e) {
throw new EngineException("Failed to download PyTorch native library", e);
} finally {
if (tmp != null) {
Utils.deleteQuietly(tmp);
}
}
}
private static void downloadJniLib(
Path cacheDir,
Path path,
String djlVersion,
String version,
String classifier,
String flavor) {
String url =
"https://publish.djl.ai/pytorch/"
+ version
+ "/jnilib/"
+ djlVersion
+ '/'
+ classifier
+ '/'
+ flavor
+ '/'
+ JNI_LIB_NAME;
logger.info("Downloading jni {} to cache ...", url);
Path tmp = null;
try (InputStream is = Utils.openUrl(url)) {
Files.createDirectories(cacheDir);
tmp = Files.createTempFile(cacheDir, "jni", "tmp");
Files.copy(is, tmp, StandardCopyOption.REPLACE_EXISTING);
Utils.moveQuietly(tmp, path);
} catch (IOException e) {
throw new EngineException("Cannot download jni files: " + url, e);
} finally {
if (tmp != null) {
Utils.deleteQuietly(tmp);
}
}
}
private static final class LibTorch {
Path dir;
String version;
String apiVersion;
String flavor;
String classifier;
LibTorch(Path dir) {
Platform platform = Platform.detectPlatform("pytorch");
this.dir = dir;
this.apiVersion = platform.getApiVersion();
this.classifier = platform.getClassifier();
version = Utils.getEnvOrSystemProperty("PYTORCH_VERSION");
if (version == null || version.isEmpty()) {
version = platform.getVersion();
}
flavor = Utils.getEnvOrSystemProperty("PYTORCH_FLAVOR");
if (flavor == null || flavor.isEmpty()) {
if (CudaUtils.getGpuCount() > 0) {
flavor = "cu" + CudaUtils.getCudaVersionString() + "-precxx11";
} else if ("linux".equals(platform.getOsPrefix())) {
flavor = "cpu-precxx11";
} else {
flavor = "cpu";
}
}
}
LibTorch(Path dir, Platform platform, String flavor) {
this.dir = dir;
this.version = platform.getVersion();
this.apiVersion = platform.getApiVersion();
this.classifier = platform.getClassifier();
this.flavor = flavor;
}
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine/0.34.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine/0.34.0/ai/djl/pytorch/jni/PyTorchLibrary.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.jni;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.Set;
/** A class containing utilities to interact with the PyTorch Engine's JNI layer. */
final class PyTorchLibrary {
static final PyTorchLibrary LIB = new PyTorchLibrary();
private PyTorchLibrary() {}
native boolean torchIsGradMode();
native void torchSetGradMode(boolean enable);
native int torchGetNumInteropThreads();
native int torchGetNumThreads();
native void torchSetNumInteropThreads(int threads);
native void torchSetNumThreads(int threads);
native void torchSetBenchmarkCuDNN(boolean enable);
native void torchManualSeed(long seed);
native void torchShowConfig(Set<String> set);
native void torchStartProfile(boolean useCuda, boolean recordShape, boolean profileMemory);
native void torchStopProfile(String outputFile);
native long[] torchSizes(long handle);
native byte[] torchDataPtr(long handle);
native ByteBuffer torchDirectByteBuffer(long handle);
native boolean torchIsContiguous(long handle);
native long torchToContiguous(long handle);
native int torchDType(long handle);
native int[] torchDevice(long handle);
native int torchLayout(long handle);
native long torchTo(long handle, int dType, int[] device);
native long torchGetItem(long handle, long index);
native long torchGetItem(long handle, long[] indices);
native long torchToSparse(long handle);
native long torchToDense(long handle);
native long tensorClone(long handle);
native void torchCudaEmptyCache();
native long torchEmpty(long[] shape, int dType, int layout, int[] device, boolean requiredGrad);
native long torchZeros(long[] shape, int dType, int layout, int[] device, boolean requiredGrad);
native long torchOnes(long[] shape, int dType, int layout, int[] device, boolean requiredGrad);
native long torchFull(
long[] shape,
double fillValue,
int dType,
int layout,
int[] device,
boolean requiredGrad);
native long torchZerosLike(
long handle, int dType, int layout, int[] device, boolean requiredGrad);
native long torchOnesLike(
long handle, int dType, int layout, int[] device, boolean requiredGrad);
native long torchSparseCoo(
long[] shape, long indicesHandle, long valueHandle, boolean requiredGrad);
native long torchArange(
float start,
float end,
float step,
int dType,
int layout,
int[] device,
boolean requiredGrad);
native long torchLinspace(
float start,
float end,
int step,
int dType,
int layout,
int[] device,
boolean requiredGrad);
native long torchAdd(long self, long other);
native void torchAddi(long self, long other);
native long torchExpand(long self, long[] shape);
native long torchSub(long self, long other);
native void torchSubi(long self, long other);
native long torchMul(long self, long other);
native void torchMuli(long self, long other);
native long torchTrueDivide(long self, long other);
native void torchTrueDividei(long self, long other);
native long torchRemainder(long self, long other);
native void torchRemainderi(long self, long other);
native long torchRot90(long self, long k, long[] axes);
native long torchPow(long self, long exponent);
native void torchPowi(long self, long exponent);
native long torchSign(long self);
native void torchSigni(long self);
native long torchMatmul(long self, long other);
native long torchBmm(long self, long other);
native long torchXLogY(long self, long other);
native long torchDot(long self, long other);
native long torchLogicalAnd(long self, long other);
native long torchLogicalOr(long self, long other);
native long torchLogicalXor(long self, long other);
native long torchLogicalNot(long handle);
native long torchPad(long handle, long[] shape, double value);
native long torchReshape(long handle, long[] shape);
native long torchSoftmax(long handle, long dim, int dType);
native long torchLogSoftmax(long handle, long dim, int dType);
native long torchArgMax(long handle);
native long torchArgMax(long handle, long dim, boolean keepDim);
native long[] torchTopK(long handle, long k, long axis, boolean largest, boolean sorted);
native long torchArgMin(long handle);
native long torchArgMin(long handle, long dim, boolean keepDim);
native long torchArgSort(long handle, long dim, boolean keepDim);
native long torchSort(long handle, long dim, boolean descending);
native long torchPermute(long handle, long[] dims);
native long torchFlip(long handle, long[] dims);
native long torchTranspose(long handle, long axis1, long axis2);
native boolean contentEqual(long handle1, long handle2);
native long torchFromBlob(
ByteBuffer data,
long[] shape,
int dType,
int layout,
int[] device,
boolean requiredGrad);
native long torchIndex(long handle, long[] minIndices, long[] maxIndices, long[] stepIndices);
native void torchIndexPut(
long handle,
long valueHandle,
long[] minIndices,
long[] maxIndices,
long[] stepIndices);
native void torchIndexAdvPut(long handle, long torchIndexHandle, long data);
native void torchSet(long handle, ByteBuffer data);
native long torchSlice(long handle, long dim, long start, long end, long step);
native long torchGather(long handle, long index, long dim, boolean sparseGrad);
native long torchTake(long handle, long index);
native long torchPut(long handle, long index, long value);
native long torchScatter(long handle, long index, long value, int axis);
native long torchMaskedSelect(long handle, long maskHandle);
native void torchMaskedPut(long handle, long valueHandle, long maskHandle);
native void torchDeleteTensor(long handle);
native void torchDeleteIndex(long handle);
native void torchDeleteModule(long handle);
native void torchDeleteIValue(long handle);
native long torchMaximum(long self, long other);
native long torchMax(long handle);
native long torchMax(long handle, long dim, boolean keepDim);
native long torchMinimum(long self, long other);
native long[] torchMedian(long self, long dim, boolean keepDim);
native long torchMin(long handle);
native long torchMin(long handle, long dim, boolean keepDim);
native long torchMean(long handle);
native long torchMean(long handle, long dim, boolean keepDim);
native long torchSum(long handle);
native long torchSum(long handle, long[] dim, boolean keepDim);
native long torchCumProd(long handle, long dim, int dtype);
native long torchProd(long handle);
native long torchProd(long handle, long dim, boolean keepDim);
native long torchCumSum(long handle, long dim);
native long torchDiagonal(long handle, long offset, long axis1, long axis2);
native long torchFlatten(long handle, long startDim, long endDim);
native long torchFft(long handle, long length, long axis);
native long torchIfft(long handle, long length, long axis);
native long torchRfft(long handle, long length, long axis);
native long torchIrfft(long handle, long length, long axis);
native long torchStft(
long handle,
long nFft,
long hopLength,
long windowHandle,
boolean center,
boolean normalize,
boolean returnComplex);
native long torchFft2(long handle, long[] sizes, long[] axes);
native long torchIfft2(long handle, long[] sizes, long[] axes);
native long torchViewAsReal(long handle);
native long torchViewAsComplex(long handle);
native long conj(long handle);
native long[] torchSplit(long handle, long size, long dim);
native long[] torchSplit(long handle, long[] indices, long dim);
native long torchUnsqueeze(long handle, long dim);
native long torchSqueeze(long handle);
native long torchSqueeze(long handle, long axis);
native long[] torchUnique(
long handle, long dim, boolean sorted, boolean returnInverse, boolean returnCounts);
native long torchStack(long[] handles, long dim);
native long torchCat(long[] handles, long dim);
native long torchRepeat(long handle, long[] repeats);
native long torchRepeatInterleave(long handle, long repeat, long axis);
native long torchAbs(long handle);
native long torchSquare(long self);
native long torchFloor(long handle);
native long torchCeil(long handle);
native long torchClamp(long handle, long min, long max);
native long torchRound(long handle);
native long torchTrunc(long handle);
native long torchExp(long handle);
native long torchLgamma(long handle);
native long torchLog(long handle);
native long torchLog10(long handle);
native long torchLog2(long handle);
native long torchSin(long handle);
native long torchCos(long handle);
native long torchTan(long handle);
native long torchASin(long handle);
native long torchAcos(long handle);
native long torchAtan(long handle);
native long torchAtan2(long self, long other);
native long torchSqrt(long handle);
native long torchSinh(long handle);
native long torchCosh(long handle);
native long torchTanh(long handle);
native long torchSigmoid(long handle);
native long torchWhere(long handle, long x, long y);
native long torchAll(long self);
native long torchAny(long self);
native long torchNone(long self);
native long torchEq(long self, long other);
native long torchNeq(long self, long other);
native long torchGt(long self, long other);
native long torchGte(long self, long other);
native long torchLt(long self, long other);
native long torchLte(long self, long other);
native long torchNeg(long self);
native void torchNegi(long self);
native long torchIsNaN(long self);
native long torchIsInf(long self);
native long torchRandint(
long low,
long high,
long[] sizes,
int dType,
int layout,
int[] device,
boolean requiredGrad);
native long torchRandPerm(long n, int dType, int layout, int[] device, boolean requireGrad);
native long torchNormal(
double mean,
double std,
long[] sizes,
int dType,
int layout,
int[] device,
boolean requiredGrad);
native long tensorUniform(
double from,
double to,
long[] sizes,
int dType,
int layout,
int[] device,
boolean requiredGrad);
native long torchEye(int n, int m, int dType, int layout, int[] device, boolean requiredGrad);
native long torchHannWindow(long nfft, boolean periodic, int[] device);
native long torchErfinv(long handle);
native long torchErf(long handle);
native long torchInverse(long self);
native long torchNNInterpolate(long handle, long[] size, int mode, boolean alignCorners);
native long torchNNLinear(long handle, long weightHandle, long biasHandle);
native long torchNNEmbedding(long handle, long weightHandle, boolean sparse);
native long torchNNRelu(long handle);
native long torchNNSoftPlus(long handle);
native long torchNNSoftSign(long handle);
native long torchNNLeakyRelu(long handle, double negativeSlope);
native long torchNNElu(long handle, double alpha);
native long torchNNSelu(long handle);
native long torchNNGelu(long handle);
native long torchNNConvNd(
long inputHandle,
long weightHandle,
long biasHandle,
long[] stride,
long[] padding,
long[] dilation,
int groups);
native long torchNNDropout(long inputHandle, double probability, boolean isTrain);
native long torchNNNormalize(long inputHandle, double p, long dim, double eps);
native long torchNNLayerNorm(
long inputHandle,
long[] normalizedShape,
long weigthHandle,
long biasHandle,
double eps);
native long torchNNBatchNorm(
long inputHandle,
long runningMeanHandle,
long runningVarHandle,
long weigthHandle,
long biasHandle,
boolean training,
double momentum,
double eps);
native long[] torchNNRnn(
long inputHandle,
long hxHandle,
long[] paramHandles,
boolean hasBiases,
int numLayers,
int activation,
double dropRate,
boolean training,
boolean bidirectional,
boolean batchFirst);
native long[] torchNNGru(
long inputHandle,
long hxHandle,
long[] paramHandles,
boolean hasBiases,
int numLayers,
double dropRate,
boolean training,
boolean bidirectional,
boolean batchFirst);
native long[] torchNNLstm(
long inputHandle,
long[] hxHandles,
long[] paramHandles,
boolean hasBiases,
int numLayers,
double dropRate,
boolean training,
boolean bidirectional,
boolean batchFirst);
native long torchNNAvgPool(
long inputHandle,
long[] kernel,
long[] stride,
long[] pad,
boolean useCeil,
boolean countIncludePad);
native long torchNNMaxPool(
long inputHandle, long[] kernelSize, long[] stride, long[] padding, boolean ceilMode);
native long torchNNAdaptiveAvgPool(long inputHandle, long[] outputSize);
native long torchNNAdaptiveMaxPool(long inputHandle, long[] outputSize);
native long torchNNLpPool(
long inputHandle, double normType, long[] kernelSize, long[] stride, boolean ceilMode);
native long torchNNOneHot(long inputHandle, int depth);
native boolean torchRequiresGrad(long inputHandle);
native String torchGradFnName(long inputHandle);
native void torchAttachGrad(long inputHandle, boolean requiresGrad);
native long torchGrad(long inputHandle);
native long torchDetachGrad(long inputHandle);
native void torchBackward(
long inputHandle, long gradHandle, boolean keepGraph, boolean createGraph);
native long moduleLoad(
String path,
int[] device,
boolean mapLocation,
String[] extraFileNames,
String[] extraFileValues,
boolean trainParam);
native long moduleLoad(
InputStream is, int[] device, boolean mapLocation, byte[] buffer, long size);
native void moduleEval(long handle);
native void moduleTrain(long handle);
native long moduleRunMethod(
long moduleHandle,
String methodName,
long[] iValueHandles,
boolean isTrain,
boolean separateCudaStream);
native void setGraphExecutorOptimize(boolean enabled);
native void moduleWrite(long moduleHandle, OutputStream os, byte[] buffer, boolean writeSize);
native long[] moduleGetParams(long moduleHandle);
native String[] moduleGetParamNames(long moduleHandle);
native String[] moduleGetMethodNames(long moduleHandle);
native long iValueFromTensor(long tensorHandle);
native long iValueFromBool(boolean value);
native long iValueFromLong(long value);
native long iValueFromDouble(double value);
native long iValueFromString(String value);
native long iValueFromBoolList(boolean... value);
native long iValueFromLongList(long... value);
native long iValueFromDoubleList(double... value);
native long iValueFromTensorList(long[] tensorHandles);
native long iValueFromList(long[] ivalueHandles);
native long iValueFromTuple(long[] ivalueHandles);
native long iValueFromStringMap(String[] keys, long[] tensorHandles);
native long iValueFromStringIValueMap(String[] keys, long[] tensorHandles);
native long iValueToTensor(long iValueHandle);
native boolean iValueToBool(long iValueHandle);
native long iValueToLong(long iValueHandle);
native double iValueToDouble(long iValueHandle);
native String iValueToString(long iValueHandle);
native boolean[] iValueToBoolList(long iValueHandle);
native long[] iValueToLongList(long iValueHandle);
native double[] iValueToDoubleList(long iValueHandle);
native long[] iValueToTensorList(long iValueHandle);
native long[] iValueToIValueList(long iValueHandle);
native long[] iValueToIValueTuple(long iValueHandle);
native long[] iValueToMap(long iValueHandle);
native String iValueGetType(long iValueHandle);
native boolean iValueIsTensor(long iValueHandle);
native boolean iValueIsBool(long iValueHandle);
native boolean iValueIsLong(long iValueHandle);
native boolean iValueIsDouble(long iValueHandle);
native boolean iValueIsString(long iValueHandle);
native boolean iValueIsBoolList(long iValueHandle);
native boolean iValueIsLongList(long iValueHandle);
native boolean iValueIsDoubleList(long iValueHandle);
native boolean iValueIsTensorList(long iValueHandle);
native boolean iValueIsList(long iValueHandle);
native boolean iValueIsTuple(long iValueHandle);
native boolean iValueIsMap(long iValueHandle);
native void zeroGrad(long handle);
native void adamUpdate(
long weight,
long grad,
long mean,
long variance,
float lr,
float learningRateBiasCorrection,
float wd,
float rescaleGrad,
float clipGrad,
float beta1,
float beta2,
float eps,
boolean adamw);
native void sgdUpdate(
long weight,
long grad,
long state,
float lr,
float wd,
float rescaleGrad,
float clipGrad,
float momentum);
native long torchNorm(long handle, int ord, long[] axis, boolean keepDims);
native long torchNonZeros(long handle);
native long torchIndexInit(int size);
native long torchIndexAdvGet(long handle, long torchIndexHandle);
native void torchIndexAppendNoneEllipsis(long torchIndexHandle, boolean isEllipsis);
native void torchIndexAppendSlice(
long torchIndexHandle, long min, long max, long step, int nullSliceBinary);
native void torchIndexAppendFixed(long torchIndexHandle, long idx);
native void torchIndexAppendArray(long torchIndexHandle, long arrayHandle);
native long torchDiff(long self, int n, int dim);
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine/0.34.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine/0.34.0/ai/djl/pytorch/jni/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes to interface with the underlying PyTorch Engine. */
package ai.djl.pytorch.jni;
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch/engine/PtDeviceType.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.engine;
import ai.djl.Device;
import ai.djl.DeviceType;
/** DeviceType is the PyTorch equivalent of the types in {@link Device}. */
public final class PtDeviceType implements DeviceType {
private PtDeviceType() {}
/**
* Converts a {@link Device} to the corresponding PyTorch device number.
*
* @param device the java {@link Device}
* @return the PyTorch device number
*/
public static int toDeviceType(Device device) {
String deviceType = device.getDeviceType();
if (Device.Type.CPU.equals(deviceType)) {
return 0;
} else if (Device.Type.GPU.equals(deviceType)) {
return 1;
} else {
throw new IllegalArgumentException("Unsupported device: " + device.toString());
}
}
/**
* Converts from an PyTorch device number to {@link Device}.
*
* @param deviceType the PyTorch device number
* @return the corresponding {@link Device}
*/
public static String fromDeviceType(int deviceType) {
switch (deviceType) {
case 0:
return Device.Type.CPU;
case 1:
return Device.Type.GPU;
default:
throw new IllegalArgumentException("Unsupported deviceType: " + deviceType);
}
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch/engine/PtEngine.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.engine;
import ai.djl.Device;
import ai.djl.Model;
import ai.djl.engine.Engine;
import ai.djl.ndarray.NDManager;
import ai.djl.pytorch.jni.JniUtils;
import ai.djl.pytorch.jni.LibUtils;
import ai.djl.training.GradientCollector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The {@code PtEngine} is an implementation of the {@link Engine} based on the <a
* href="https://pytorch.org/">PyTorch Deep Learning Framework</a>.
*
* <p>To get an instance of the {@code PtEngine} when it is not the default Engine, call {@link
* Engine#getEngine(String)} with the Engine name "PyTorch".
*/
public final class PtEngine extends Engine {
private static final Logger logger = LoggerFactory.getLogger(PtEngine.class);
public static final String ENGINE_NAME = "PyTorch";
private PtEngine() {}
static Engine newInstance() {
try {
LibUtils.loadLibrary();
if (Integer.getInteger("ai.djl.pytorch.num_interop_threads") != null) {
JniUtils.setNumInteropThreads(
Integer.getInteger("ai.djl.pytorch.num_interop_threads"));
}
if (Integer.getInteger("ai.djl.pytorch.num_threads") != null) {
JniUtils.setNumThreads(Integer.getInteger("ai.djl.pytorch.num_threads"));
}
logger.info("Number of inter-op threads is " + JniUtils.getNumInteropThreads());
logger.info("Number of intra-op threads is " + JniUtils.getNumThreads());
return new PtEngine();
} catch (Throwable t) {
logger.warn("Failed to load PyTorch native library", t);
}
return null;
}
/** {@inheritDoc} */
@Override
public String getEngineName() {
return ENGINE_NAME;
}
/** {@inheritDoc} */
@Override
public String getVersion() {
return "1.6.0";
}
/** {@inheritDoc} */
@Override
public boolean hasCapability(String capability) {
return JniUtils.getFeatures().contains(capability);
}
/** {@inheritDoc} */
@Override
public Model newModel(String name, Device device) {
return new PtModel(name, device);
}
/** {@inheritDoc} */
@Override
public NDManager newBaseManager() {
return PtNDManager.getSystemManager().newSubManager();
}
/** {@inheritDoc} */
@Override
public NDManager newBaseManager(Device device) {
return PtNDManager.getSystemManager().newSubManager(device);
}
/** {@inheritDoc} */
@Override
public GradientCollector newGradientCollector() {
return new PtGradientCollector();
}
/** {@inheritDoc} */
@Override
public void setRandomSeed(int seed) {
JniUtils.setSeed(seed);
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch/engine/PtEngineProvider.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.engine;
import ai.djl.engine.Engine;
import ai.djl.engine.EngineProvider;
/** {@code PtEngineProvider} is the PyTorch implementation of {@link EngineProvider}. */
public class PtEngineProvider implements EngineProvider {
private static Engine engine;
/** {@inheritDoc} */
@Override
public synchronized Engine getEngine() {
if (engine == null) {
engine = PtEngine.newInstance();
}
return engine;
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch/engine/PtGradientCollector.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.engine;
import ai.djl.ndarray.NDArray;
import ai.djl.pytorch.jni.JniUtils;
import ai.djl.training.GradientCollector;
/** {@code PtGradientCollector} is the PyTorch implementation of {@link GradientCollector}. */
public class PtGradientCollector implements GradientCollector {
/** {@inheritDoc} */
@Override
public void backward(NDArray target) {
// TODO manager should create the new NDArray on the same device
NDArray grad =
target.getManager()
.ones(target.getShape(), target.getDataType())
.toDevice(target.getDevice(), false);
backward(target, grad, false, false);
}
/**
* Computes the gradients of the NDArray w.r.t variables.
*
* @param target the target/head array to run backward on
* @param grad The “vector” in the Jacobian-vector product, usually gradients w.r.t. each
* element of corresponding tensors
* @param keepGraph whether to retain the computation graph for another backward pass on the
* same graph. By default the computation history is cleared.
* @param createGraph If true, graph of the derivative will be constructed, allowing to compute
* higher order derivative products. Defaults to false.
*/
private void backward(NDArray target, NDArray grad, boolean keepGraph, boolean createGraph) {
JniUtils.backward((PtNDArray) target, (PtNDArray) grad, keepGraph, createGraph);
}
/** {@inheritDoc} */
@Override
public void close() {
// TODO: do some clean up if necessary
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch/engine/PtModel.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.engine;
import ai.djl.BaseModel;
import ai.djl.Device;
import ai.djl.MalformedModelException;
import ai.djl.Model;
import ai.djl.inference.Predictor;
import ai.djl.ndarray.types.DataType;
import ai.djl.pytorch.jni.JniUtils;
import ai.djl.training.Trainer;
import ai.djl.training.TrainingConfig;
import ai.djl.training.initializer.Initializer;
import ai.djl.translate.Translator;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* {@code PtModel} is the PyTorch implementation of {@link Model}.
*
* <p>PtModel contains all the methods in Model to load and process a model. In addition, it
* provides PyTorch Specific functionality
*/
public class PtModel extends BaseModel {
/**
* Constructs a new Model on a given device.
*
* @param name the model name
* @param device the device the model should be located on
*/
PtModel(String name, Device device) {
super(name);
device = Device.defaultIfNull(device);
manager = PtNDManager.getSystemManager().newSubManager(device);
dataType = DataType.FLOAT32;
}
/** {@inheritDoc} */
@Override
public void load(Path modelPath, String prefix, Map<String, Object> options)
throws IOException, MalformedModelException {
modelDir = modelPath.toAbsolutePath();
if (prefix == null) {
prefix = modelName;
}
if (block == null) {
Path modelFile = findModelFile(prefix);
if (modelFile == null) {
modelFile = findModelFile(modelDir.toFile().getName());
if (modelFile == null) {
throw new FileNotFoundException(".pt file not found in: " + modelDir);
}
}
block = JniUtils.loadModule((PtNDManager) manager, modelFile, manager.getDevice());
} else {
Path paramFile = paramPathResolver(prefix, options);
if (paramFile == null) {
throw new IOException(
"Parameter file not found in: "
+ modelDir
+ ". If you only specified model path, make sure path name match"
+ "your saved model file name.");
}
readParameters(paramFile, options);
}
}
private Path findModelFile(String prefix) {
Path modelFile = modelDir.resolve(prefix);
if (Files.notExists(modelFile) || !Files.isRegularFile(modelFile)) {
if (prefix.endsWith(".pt")) {
return null;
}
modelFile = modelDir.resolve(prefix + ".pt");
if (Files.notExists(modelFile) || !Files.isRegularFile(modelFile)) {
return null;
}
}
return modelFile;
}
/** {@inheritDoc} */
@Override
public Trainer newTrainer(TrainingConfig trainingConfig) {
Initializer initializer = trainingConfig.getInitializer();
if (block == null) {
throw new IllegalStateException(
"You must set a block for the model before creating a new trainer");
}
block.setInitializer(initializer);
return new Trainer(this, trainingConfig);
}
/** {@inheritDoc} */
@Override
public <I, O> Predictor<I, O> newPredictor(Translator<I, O> translator) {
// TODO: modify copy
return new Predictor<>(this, translator, false);
}
/** {@inheritDoc} */
@Override
public String[] getArtifactNames() {
try {
List<Path> files =
Files.walk(modelDir).filter(Files::isRegularFile).collect(Collectors.toList());
List<String> ret = new ArrayList<>(files.size());
for (Path path : files) {
String fileName = path.toFile().getName();
if (fileName.endsWith(".pt")) {
// ignore model files.
continue;
}
Path relative = modelDir.relativize(path);
ret.add(relative.toString());
}
return ret.toArray(new String[0]);
} catch (IOException e) {
throw new AssertionError("Failed list files", e);
}
}
/** {@inheritDoc} */
@Override
public void cast(DataType dataType) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public void close() {
manager.close();
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch/engine/PtNDArray.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.engine;
import ai.djl.Device;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.internal.NDFormat;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.ndarray.types.SparseFormat;
import ai.djl.pytorch.jni.JniUtils;
import ai.djl.pytorch.jni.NativeResource;
import ai.djl.pytorch.jni.Pointer;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
/** {@code PtNDArray} is the PyTorch implementation of {@link NDArray}. */
public class PtNDArray extends NativeResource implements NDArray {
private static final int MAX_SIZE = 100;
private static final int MAX_DEPTH = 10;
private static final int MAX_ROWS = 10;
private static final int MAX_COLUMNS = 20;
private String name;
private Device device;
private DataType dataType;
private Shape shape;
private SparseFormat sparseFormat;
// use Boolean object to maintain three status: null, false, true
private Boolean hasGradient;
private PtNDManager manager;
private PtNDArrayEx ptNDArrayEx;
/**
* Constructs an PyTorch from a native handle (internal. Use {@link NDManager} instead).
*
* @param manager the manager to attach the new array to
* @param handle the pointer to the native PyTorch memory
*/
PtNDArray(PtNDManager manager, Pointer handle) {
super(handle);
this.manager = manager;
this.ptNDArrayEx = new PtNDArrayEx(this);
manager.attach(getUid(), this);
}
/** {@inheritDoc} */
@Override
public PtNDManager getManager() {
return manager;
}
/** {@inheritDoc} */
@Override
public String getName() {
return name;
}
/** {@inheritDoc} */
@Override
public void setName(String name) {
this.name = name;
}
/** {@inheritDoc} */
@Override
public DataType getDataType() {
if (dataType == null) {
dataType = JniUtils.getDataType(this);
}
return dataType;
}
/** {@inheritDoc} */
@Override
public Device getDevice() {
if (device == null) {
device = JniUtils.getDevice(this);
}
return device;
}
/** {@inheritDoc} */
@Override
public Shape getShape() {
if (shape == null) {
shape = JniUtils.getShape(this);
}
return shape;
}
/** {@inheritDoc} */
@Override
public SparseFormat getSparseFormat() {
if (sparseFormat == null) {
sparseFormat = JniUtils.getSparseFormat(this);
}
return sparseFormat;
}
/** {@inheritDoc} */
@Override
public PtNDArray toDevice(Device device, boolean copy) {
return JniUtils.to(this, getDataType(), device, copy);
}
/** {@inheritDoc} */
@Override
public PtNDArray toType(DataType dataType, boolean copy) {
return JniUtils.to(this, dataType, getDevice(), copy);
}
/** {@inheritDoc} */
@Override
public void attachGradient() {
attachGradient(null);
}
/** {@inheritDoc} */
@Override
public void attachGradient(SparseFormat sparseFormat) {
if (sparseFormat != null && !sparseFormat.equals(SparseFormat.DENSE)) {
throw new UnsupportedOperationException(
"Sparse NDArray gradient atttach not supported");
}
JniUtils.attachGradient(this);
hasGradient = true;
}
/** {@inheritDoc} */
@Override
public PtNDArray getGradient() {
if (!hasGradient()) {
throw new IllegalStateException(
"No gradient attached to this NDArray, please call array.requiredGradient()"
+ "on your NDArray or block.setInitializer() on your Block");
}
PtNDArray res = JniUtils.getGradient(this);
// If you call getGradient() before you run the backward,
// you will get nothing in PyTorch engine.
// To align with MXNet's behavior, we will create a zeros NDArray.
// TODO should we access the grad NDArray after we close the parameter NDArray?
if (res == null) {
res = (PtNDArray) getManager().zeros(getShape());
}
return res;
}
/** {@inheritDoc} */
@Override
public boolean hasGradient() {
if (hasGradient == null) {
hasGradient = JniUtils.requiresGrad(this);
}
return hasGradient;
}
/** {@inheritDoc} */
@Override
public ByteBuffer toByteBuffer() {
return JniUtils.getByteBuffer(this);
}
/** {@inheritDoc} */
@Override
public void set(Buffer data) {
PtNDArray other = getManager().create(data, getShape(), getDataType());
JniUtils.set(this, other);
other.close();
}
/** {@inheritDoc} */
@Override
public void copyTo(NDArray array) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDManager attach(NDManager manager) {
detach();
NDManager original = this.manager;
this.manager = (PtNDManager) manager;
manager.attach(getUid(), this);
return original;
}
/** {@inheritDoc} */
@Override
public void detach() {
manager.detach(getUid());
manager = PtNDManager.getSystemManager();
}
/** {@inheritDoc} */
@Override
public NDArray duplicate() {
return JniUtils.clone(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray booleanMask(NDArray index, int axis) {
Shape indexShape = index.getShape();
if (indexShape.equals(getShape())) {
// Result is flattened since shape is undetermined
return JniUtils.booleanMask(this, (PtNDArray) index);
} else if (indexShape.equals(getShape().slice(axis))) {
// index will be broadcasted by default
try (PtNDArray flattedResult = JniUtils.booleanMask(this, (PtNDArray) index)) {
// Shape recovery
Shape remainder = getShape().slice(0, axis);
long selectedSize = flattedResult.getShape().size() / remainder.size();
return flattedResult.reshape(remainder.addAll(new Shape(selectedSize)));
}
} else {
throw new UnsupportedOperationException(
"Not supported for shape not broadcastable "
+ indexShape.toString()
+ " vs "
+ getShape().toString());
}
}
/** {@inheritDoc} */
@Override
public NDArray sequenceMask(NDArray sequenceLength, float value) {
throw new UnsupportedOperationException("Not implemented yet");
}
/** {@inheritDoc} */
@Override
public NDArray sequenceMask(NDArray sequenceLength) {
throw new UnsupportedOperationException("Not implemented yet");
}
/** {@inheritDoc} */
@Override
public PtNDArray zerosLike() {
return JniUtils.zerosLike(this, getDataType(), getDevice(), SparseFormat.DENSE);
}
/** {@inheritDoc} */
@Override
public PtNDArray onesLike() {
return JniUtils.onesLike(this, getDataType(), getDevice(), SparseFormat.DENSE);
}
/** {@inheritDoc} */
@Override
public boolean contentEquals(Number number) {
return JniUtils.contentEqual(this, (PtNDArray) manager.create(number));
}
/** {@inheritDoc} */
@Override
public boolean contentEquals(NDArray other) {
if (other == null || (!shapeEquals(other))) {
return false;
}
if (getDataType() != other.getDataType()) {
return false;
}
return JniUtils.contentEqual(this, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray eq(Number n) {
try (NDArray number = manager.create(n)) {
return eq(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray eq(NDArray other) {
return JniUtils.eq(this, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray neq(Number n) {
try (NDArray number = manager.create(n)) {
return neq(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray neq(NDArray other) {
return JniUtils.neq(this, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray gt(Number n) {
try (NDArray number = manager.create(n)) {
return gt(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray gt(NDArray other) {
return JniUtils.gt(this, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray gte(Number n) {
try (NDArray number = manager.create(n)) {
return gte(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray gte(NDArray other) {
return JniUtils.gte(this, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray lt(Number n) {
try (NDArray number = manager.create(n)) {
return lt(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray lt(NDArray other) {
return JniUtils.lt(this, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray lte(Number n) {
try (NDArray number = manager.create(n)) {
return lte(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray lte(NDArray other) {
return JniUtils.lte(this, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray add(Number n) {
try (NDArray number = manager.create(n)) {
return add(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray add(NDArray other) {
return JniUtils.add(this, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray sub(Number n) {
try (NDArray number = manager.create(n)) {
return sub(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray sub(NDArray other) {
return JniUtils.sub(this, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray mul(Number n) {
try (NDArray number = manager.create(n)) {
return mul(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray mul(NDArray other) {
return JniUtils.mul(this, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray div(Number n) {
try (NDArray number = manager.create(n)) {
return div(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray div(NDArray other) {
return JniUtils.div(this, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray mod(Number n) {
try (NDArray number = manager.create(n)) {
return mod(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray mod(NDArray other) {
return JniUtils.remainder(this, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray pow(Number n) {
try (NDArray number = manager.create(n)) {
return pow(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray pow(NDArray other) {
return JniUtils.pow(this, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray addi(Number n) {
try (NDArray number = manager.create(n)) {
return addi(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray addi(NDArray other) {
JniUtils.addi(this, (PtNDArray) other);
return this;
}
/** {@inheritDoc} */
@Override
public PtNDArray subi(Number n) {
try (NDArray number = manager.create(n)) {
return subi(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray subi(NDArray other) {
JniUtils.subi(this, (PtNDArray) other);
return this;
}
/** {@inheritDoc} */
@Override
public PtNDArray muli(Number n) {
try (NDArray number = manager.create(n)) {
return muli(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray muli(NDArray other) {
JniUtils.muli(this, (PtNDArray) other);
return this;
}
/** {@inheritDoc} */
@Override
public PtNDArray divi(Number n) {
try (NDArray number = manager.create(n)) {
return divi(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray divi(NDArray other) {
JniUtils.divi(this, (PtNDArray) other);
return this;
}
/** {@inheritDoc} */
@Override
public PtNDArray modi(Number n) {
try (NDArray number = manager.create(n)) {
return modi(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray modi(NDArray other) {
JniUtils.remainderi(this, (PtNDArray) other);
return this;
}
/** {@inheritDoc} */
@Override
public PtNDArray powi(Number n) {
try (NDArray number = manager.create(n)) {
return powi(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray powi(NDArray other) {
JniUtils.powi(this, (PtNDArray) other);
return this;
}
/** {@inheritDoc} */
@Override
public PtNDArray sign() {
return JniUtils.sign(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray signi() {
JniUtils.signi(this);
return this;
}
/** {@inheritDoc} */
@Override
public PtNDArray maximum(Number n) {
try (NDArray number = manager.create(n)) {
return maximum(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray maximum(NDArray other) {
return JniUtils.max(this, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray minimum(Number n) {
try (NDArray number = manager.create(n)) {
return minimum(number);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray minimum(NDArray other) {
return JniUtils.min(this, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray all() {
try (PtNDArray bool = toType(DataType.BOOLEAN, true)) {
return JniUtils.all(bool);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray any() {
try (PtNDArray bool = toType(DataType.BOOLEAN, true)) {
return JniUtils.any(bool);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray none() {
try (PtNDArray bool = toType(DataType.BOOLEAN, true)) {
return JniUtils.none(bool);
}
}
/** {@inheritDoc} */
@Override
public PtNDArray neg() {
return JniUtils.neg(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray negi() {
JniUtils.negi(this);
return this;
}
/** {@inheritDoc} */
@Override
public PtNDArray abs() {
return JniUtils.abs(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray square() {
return JniUtils.square(this);
}
/** {@inheritDoc} */
@Override
public NDArray sqrt() {
return JniUtils.sqrt(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray cbrt() {
return JniUtils.pow(this, (PtNDArray) getManager().create(1.0 / 3));
}
/** {@inheritDoc} */
@Override
public PtNDArray floor() {
return JniUtils.floor(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray ceil() {
return JniUtils.ceil(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray round() {
return JniUtils.round(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray trunc() {
return JniUtils.trunc(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray exp() {
return JniUtils.exp(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray log() {
return JniUtils.log(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray log10() {
return JniUtils.log10(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray log2() {
return JniUtils.log2(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray sin() {
return JniUtils.sin(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray cos() {
return JniUtils.cos(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray tan() {
return JniUtils.tan(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray asin() {
return JniUtils.asin(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray acos() {
return JniUtils.acos(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray atan() {
return JniUtils.atan(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray sinh() {
return JniUtils.sinh(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray cosh() {
return JniUtils.cosh(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray tanh() {
return JniUtils.tanh(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray asinh() {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray acosh() {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray atanh() {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray toDegrees() {
return mul(180.0).div(Math.PI);
}
/** {@inheritDoc} */
@Override
public PtNDArray toRadians() {
return mul(Math.PI).div(180.0);
}
/** {@inheritDoc} */
@Override
public PtNDArray max() {
return JniUtils.max(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray max(int[] axes, boolean keepDims) {
if (axes.length > 1) {
// TODO fix this
throw new UnsupportedOperationException("Only 1 axis is support!");
}
return JniUtils.max(this, axes[0], keepDims);
}
/** {@inheritDoc} */
@Override
public PtNDArray min() {
return JniUtils.min(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray min(int[] axes, boolean keepDims) {
if (axes.length > 1) {
// TODO fix this
throw new UnsupportedOperationException("Only 1 axis is support!");
}
return JniUtils.min(this, axes[0], keepDims);
}
/** {@inheritDoc} */
@Override
public PtNDArray sum() {
return JniUtils.sum(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray sum(int[] axes, boolean keepDims) {
return JniUtils.sum(this, Arrays.stream(axes).mapToLong(i -> i).toArray(), keepDims);
}
/** {@inheritDoc} */
@Override
public PtNDArray prod() {
return JniUtils.prod(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray prod(int[] axes, boolean keepDims) {
if (axes.length > 1) {
throw new UnsupportedOperationException("Only 1 axis is support!");
}
return JniUtils.prod(this, axes[0], keepDims);
}
/** {@inheritDoc} */
@Override
public PtNDArray mean() {
return JniUtils.mean(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray mean(int[] axes, boolean keepDims) {
if (axes.length > 1) {
// TODO fix this
throw new UnsupportedOperationException("Only 1 axis is support!");
}
return JniUtils.mean(this, axes[0], keepDims);
}
/** {@inheritDoc} */
@Override
public PtNDArray trace(int offset, int axis1, int axis2) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList split(long sections, int axis) {
long size = getShape().get(axis) / sections;
return JniUtils.split(this, size, axis);
}
/** {@inheritDoc} */
@Override
public NDList split(long[] indices, int axis) {
if (indices.length == 0) {
return new NDList(this);
}
List<Long> ptIndex = new ArrayList<>();
ptIndex.add(indices[0]);
for (int i = 1; i < indices.length; i++) {
ptIndex.add(indices[i] - indices[i - 1]);
}
ptIndex.add(size(axis) - indices[indices.length - 1]);
return JniUtils.split(this, ptIndex.stream().mapToLong(i -> i).toArray(), axis);
}
/** {@inheritDoc} */
@Override
public PtNDArray flatten() {
return JniUtils.flatten(this, 0, -1);
}
/** {@inheritDoc} */
@Override
public PtNDArray reshape(Shape shape) {
return JniUtils.reshape(this, shape.getShape());
}
/** {@inheritDoc} */
@Override
public PtNDArray expandDims(int axis) {
return JniUtils.unsqueeze(this, axis);
}
/** {@inheritDoc} */
@Override
public PtNDArray squeeze() {
return JniUtils.squeeze(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray squeeze(int axis) {
return JniUtils.squeeze(this, axis);
}
/** {@inheritDoc} */
@Override
public PtNDArray squeeze(int[] axes) {
if (isScalar()) {
if (axes.length > 1 || axes[0] != 0) {
throw new IllegalArgumentException(
"axis " + axes[0] + "is out of bounds for array of dimension 0");
}
return (PtNDArray) duplicate();
}
long[] shapeArr = getShape().getShape();
List<Long> newShape = new ArrayList<>();
Set<Integer> set =
IntStream.of(axes).boxed().collect(Collectors.toCollection(HashSet::new));
// check input
for (int axis : axes) {
if (shapeArr[axis] != 1) {
throw new IllegalArgumentException(
"cannot select an axis to squeeze out which has size not equal to one");
}
}
for (int i = 0; i < shapeArr.length; i++) {
if (!set.contains(i)) {
newShape.add(shapeArr[i]);
}
}
return (PtNDArray) reshape(newShape.stream().mapToLong(i -> i).toArray());
}
/** {@inheritDoc} */
@Override
public PtNDArray logicalAnd(NDArray other) {
return JniUtils.logicalAnd(this, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray logicalOr(NDArray other) {
return JniUtils.logicalOr(this, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray logicalXor(NDArray other) {
return JniUtils.logicalXor(this, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray logicalNot() {
return JniUtils.logicalNot(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray argSort(int axis, boolean ascending) {
if (!ascending) {
throw new UnsupportedOperationException("Only support ascending!");
}
return JniUtils.argSort(this, axis, false);
}
/** {@inheritDoc} */
@Override
public PtNDArray sort() {
return sort(-1);
}
/** {@inheritDoc} */
@Override
public PtNDArray sort(int axis) {
return JniUtils.sort(this, axis, false);
}
/** {@inheritDoc} */
@Override
public PtNDArray softmax(int axis) {
return JniUtils.softmax(this, axis, getDataType());
}
/** {@inheritDoc} */
@Override
public PtNDArray logSoftmax(int axis) {
return JniUtils.logSoftmax(this, axis, getDataType());
}
/** {@inheritDoc} */
@Override
public PtNDArray cumSum() {
// TODO: change default behavior on cumSum
if (isScalar()) {
return (PtNDArray) reshape(1);
}
if (isEmpty()) {
return (PtNDArray) reshape(0);
}
return cumSum(0);
}
/** {@inheritDoc} */
@Override
public PtNDArray cumSum(int axis) {
return JniUtils.cumSum(this, axis);
}
/** {@inheritDoc} */
@Override
public PtNDArray isInfinite() {
return JniUtils.isInf(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray isNaN() {
return JniUtils.isNaN(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray tile(long repeats) {
// zero-dim
if (isEmpty()) {
return (PtNDArray) duplicate();
}
// scalar
int dim = (isScalar()) ? 1 : getShape().dimension();
long[] repeatsArray = new long[dim];
Arrays.fill(repeatsArray, repeats);
return tile(repeatsArray);
}
/** {@inheritDoc} */
@Override
public PtNDArray tile(int axis, long repeats) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray tile(long[] repeats) {
return JniUtils.tile(this, repeats);
}
/** {@inheritDoc} */
@Override
public PtNDArray tile(Shape desiredShape) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray repeat(long repeats) {
// zero-dim
if (isEmpty()) {
return (PtNDArray) duplicate();
}
// scalar
int dim = (isScalar()) ? 1 : getShape().dimension();
long[] repeatsArray = new long[dim];
Arrays.fill(repeatsArray, repeats);
return repeat(repeatsArray);
}
/** {@inheritDoc} */
@Override
public PtNDArray repeat(int axis, long repeats) {
return JniUtils.repeat(this, repeats, axis);
}
/** {@inheritDoc} */
@Override
public PtNDArray repeat(long[] repeats) {
PtNDArray result = this;
for (int dim = 0; dim < repeats.length; dim++) {
PtNDArray temp = result;
result = JniUtils.repeat(result, repeats[dim], dim);
if (temp != this) {
temp.close();
}
}
return result;
}
/** {@inheritDoc} */
@Override
public PtNDArray repeat(Shape desiredShape) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray dot(NDArray other) {
int selfDim = this.getShape().dimension();
int otherDim = other.getShape().dimension();
if (selfDim != otherDim || selfDim > 2) {
throw new UnsupportedOperationException(
"Dimension mismatch or high dimensional dot operation is not supported. Please use .matMul instead.");
}
return JniUtils.dot(this, (PtNDArray) other);
}
@Override
public NDArray matMul(NDArray other) {
if (isScalar() || other.isScalar()) {
throw new IllegalArgumentException("scalar is not allowed for matMul()");
}
return JniUtils.matmul(this, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray clip(Number min, Number max) {
return JniUtils.clip(this, min, max);
}
/** {@inheritDoc} */
@Override
public PtNDArray swapAxes(int axis1, int axis2) {
return JniUtils.transpose(this, axis1, axis2);
}
@Override
public NDArray flip(int... axes) {
return JniUtils.flip(this, Arrays.stream(axes).mapToLong(ele -> (long) ele).toArray());
}
/** {@inheritDoc} */
@Override
public PtNDArray transpose() {
int dim = getShape().dimension();
int[] reversedShape = IntStream.range(0, dim).map(i -> dim - i - 1).toArray();
return transpose(reversedShape);
}
/** {@inheritDoc} */
@Override
public PtNDArray transpose(int... axes) {
if (isScalar() && axes.length > 0) {
throw new IllegalArgumentException("axes don't match NDArray");
}
return JniUtils.permute(this, Arrays.stream(axes).mapToLong(i -> i).toArray());
}
/** {@inheritDoc} */
@Override
public PtNDArray broadcast(Shape shape) {
return JniUtils.broadcast(this, shape);
}
/** {@inheritDoc} */
@Override
public PtNDArray argMax() {
if (isEmpty()) {
throw new IllegalArgumentException("attempt to get argMax of an empty NDArray");
}
return JniUtils.argMax(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray argMax(int axis) {
// TODO pytorch bug: https://github.com/pytorch/pytorch/issues/37084
if (isScalar()) {
return (PtNDArray) manager.create(0L);
}
return JniUtils.argMax(this, axis, false);
}
/** {@inheritDoc} */
@Override
public PtNDArray argMin() {
if (isEmpty()) {
throw new IllegalArgumentException("attempt to get argMin of an empty NDArray");
}
return JniUtils.argMin(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray argMin(int axis) {
// TODO pytorch bug: https://github.com/pytorch/pytorch/issues/37084
if (isScalar()) {
return (PtNDArray) manager.create(0L);
}
return JniUtils.argMin(this, axis, false);
}
/** {@inheritDoc} */
@Override
public PtNDArray percentile(Number percentile) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray percentile(Number percentile, int[] axes) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray median() {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray median(int[] axes) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray toDense() {
if (!isSparse() && JniUtils.getLayout(this) != 2) {
return (PtNDArray) duplicate();
}
return JniUtils.toDense(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray toSparse(SparseFormat fmt) {
if (fmt == SparseFormat.DENSE) {
throw new IllegalArgumentException("Default type is not allowed");
}
if (fmt != SparseFormat.COO) {
throw new UnsupportedOperationException("Only COO sparse type supported for PyTorch");
}
if (fmt == getSparseFormat()) {
return (PtNDArray) duplicate();
}
return JniUtils.toSparse(this);
}
/** {@inheritDoc} */
@Override
public PtNDArray nonzero() {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArrayEx getNDArrayInternal() {
return ptNDArrayEx;
}
/** {@inheritDoc} */
@Override
public String toString() {
if (isReleased()) {
return "This array is already closed";
}
// index operator in toDebugString is not supported for MKLDNN & Sparse layout
if (JniUtils.getLayout(this) != 0) {
try (NDArray tmp = toDense()) {
return NDFormat.format(tmp, MAX_SIZE, MAX_DEPTH, MAX_ROWS, MAX_COLUMNS);
}
}
return toDebugString(MAX_SIZE, MAX_DEPTH, MAX_ROWS, MAX_COLUMNS);
}
/** {@inheritDoc} */
@Override
public boolean equals(Object obj) {
if (obj instanceof PtNDArray) {
return contentEquals((PtNDArray) obj);
}
return false;
}
/** {@inheritDoc} */
@Override
public int hashCode() {
return 0;
}
/** {@inheritDoc} */
@Override
public void close() {
Pointer pointer = handle.getAndSet(null);
if (pointer != null) {
JniUtils.deleteNdArray(pointer);
manager.detach(getUid());
manager = null;
}
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch/engine/PtNDArrayEx.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.engine;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.NDUtils;
import ai.djl.ndarray.index.NDArrayIndexer;
import ai.djl.ndarray.internal.NDArrayEx;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.pytorch.jni.JniUtils;
import ai.djl.util.PairList;
import java.util.List;
/** {@code PtNDArrayEx} is the PyTorch implementation of the {@link NDArrayEx}. */
public class PtNDArrayEx implements NDArrayEx {
private static final NDArrayIndexer INDEXER = new PtNDArrayIndexer();
private PtNDArray array;
/**
* Constructs an {@code PtNDArrayEx} given a {@link NDArray}.
*
* @param parent the {@link NDArray} to extend
*/
PtNDArrayEx(PtNDArray parent) {
this.array = parent;
}
/** {@inheritDoc} */
@Override
public PtNDArray rdiv(Number n) {
return rdiv(array.getManager().create(n));
}
/** {@inheritDoc} */
@Override
public PtNDArray rdiv(NDArray b) {
return (PtNDArray) b.div(array);
}
/** {@inheritDoc} */
@Override
public PtNDArray rdivi(Number n) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray rdivi(NDArray b) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray rsub(Number n) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray rsub(NDArray b) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray rsubi(Number n) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray rsubi(NDArray b) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray rmod(Number n) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray rmod(NDArray b) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray rmodi(Number n) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray rmodi(NDArray b) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray rpow(Number n) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray rpowi(Number n) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray relu() {
return JniUtils.relu(array);
}
/** {@inheritDoc} */
@Override
public PtNDArray sigmoid() {
return JniUtils.sigmoid(array);
}
/** {@inheritDoc} */
@Override
public PtNDArray tanh() {
return JniUtils.tanh(array);
}
/** {@inheritDoc} */
@Override
public PtNDArray softPlus() {
return JniUtils.softPlus(array);
}
/** {@inheritDoc} */
@Override
public PtNDArray softSign() {
return JniUtils.softSign(array);
}
/** {@inheritDoc} */
@Override
public PtNDArray leakyRelu(float alpha) {
return JniUtils.leakyRelu(array, alpha);
}
/** {@inheritDoc} */
@Override
public PtNDArray elu(float alpha) {
return JniUtils.elu(array, alpha);
}
/** {@inheritDoc} */
@Override
public PtNDArray selu() {
return JniUtils.selu(array);
}
/** {@inheritDoc} */
@Override
public PtNDArray gelu() {
return JniUtils.gelu(array);
}
/** {@inheritDoc} */
@Override
public PtNDArray maxPool(Shape kernelShape, Shape stride, Shape padding, boolean ceilMode) {
return JniUtils.maxPool(array, kernelShape, stride, padding, ceilMode);
}
/** {@inheritDoc} */
@Override
public PtNDArray globalMaxPool() {
Shape shape = getPoolShape(array);
try (NDArray temp = JniUtils.adaptiveMaxPool(array, shape)) {
return (PtNDArray) temp.reshape(array.getShape().slice(0, 2));
}
}
/** {@inheritDoc} */
@Override
public PtNDArray avgPool(
Shape kernelShape,
Shape stride,
Shape padding,
boolean ceilMode,
boolean countIncludePad) {
return JniUtils.avgPool(array, kernelShape, stride, padding, ceilMode, countIncludePad);
}
/** {@inheritDoc} */
@Override
public PtNDArray globalAvgPool() {
Shape shape = getPoolShape(array);
try (NDArray temp = JniUtils.adaptiveAvgPool(array, shape)) {
return (PtNDArray) temp.reshape(array.getShape().slice(0, 2));
}
}
/** {@inheritDoc} */
@Override
public PtNDArray lpPool(
float normType, Shape kernelShape, Shape stride, Shape padding, boolean ceilMode) {
if (padding.size() != 0) {
throw new IllegalArgumentException("padding is not supported for PyTorch engine");
}
return JniUtils.lpPool(array, normType, kernelShape, stride, ceilMode);
}
/** {@inheritDoc} */
@Override
public PtNDArray globalLpPool(float normType) {
try (NDArray temp =
JniUtils.lpPool(
array, normType, array.getShape().slice(2), getPoolShape(array), false)) {
return (PtNDArray) temp.reshape(array.getShape().slice(0, 2));
}
}
/** {@inheritDoc} */
@Override
public void adadeltaUpdate(
NDList inputs,
NDList weights,
float weightDecay,
float rescaleGrad,
float clipGrad,
float rho,
float epsilon) {
throw new UnsupportedOperationException(
"AdaDelta optimzier is not supported for PyTorch engine!");
}
/** {@inheritDoc} */
@Override
public void adagradUpdate(
NDList inputs,
NDList weights,
float learningRate,
float weightDecay,
float rescaleGrad,
float clipGrad,
float epsilon) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public void adamUpdate(
NDList inputs,
NDList weights,
float learningRate,
float weightDecay,
float rescaleGrad,
float clipGrad,
float beta1,
float beta2,
float epsilon,
boolean lazyUpdate) {
// TODO: Lazy update not used
JniUtils.adamUpdate(
(PtNDArray) inputs.get(0),
(PtNDArray) inputs.get(1),
(PtNDArray) inputs.get(2),
(PtNDArray) inputs.get(3),
learningRate,
weightDecay,
rescaleGrad,
clipGrad,
beta1,
beta2,
epsilon);
// call zero-grad
JniUtils.zeroGrad((PtNDArray) weights.singletonOrThrow());
}
/** {@inheritDoc} */
@Override
public void nagUpdate(
NDList inputs,
NDList weights,
float learningRate,
float weightDecay,
float rescaleGrad,
float clipGrad,
float momentum) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public void rmspropUpdate(
NDList inputs,
NDList weights,
float learningRate,
float weightDecay,
float rescaleGrad,
float clipGrad,
float rho,
float momentum,
float epsilon,
boolean centered) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public void sgdUpdate(
NDList inputs,
NDList weights,
float learningRate,
float weightDecay,
float rescaleGrad,
float clipGrad,
float momentum,
boolean lazyUpdate) {
// TODO: Lazy update not used
JniUtils.sgdUpdate(
(PtNDArray) inputs.get(0),
(PtNDArray) inputs.get(1),
(momentum == 0f) ? null : (PtNDArray) inputs.get(2),
learningRate,
weightDecay,
rescaleGrad,
clipGrad,
momentum);
// call zero-grad
JniUtils.zeroGrad((PtNDArray) weights.singletonOrThrow());
}
/** {@inheritDoc} */
@Override
public NDList convolution(
NDArray input,
NDArray weight,
NDArray bias,
Shape stride,
Shape padding,
Shape dilation,
int groups) {
return new NDList(
JniUtils.convolution(
(PtNDArray) input,
(PtNDArray) weight,
(PtNDArray) bias,
stride,
padding,
dilation,
groups));
}
/** {@inheritDoc} */
@Override
public NDList linear(NDArray input, NDArray weight, NDArray bias) {
return new NDList(JniUtils.linear((PtNDArray) input, (PtNDArray) weight, (PtNDArray) bias));
}
/** {@inheritDoc} */
@Override
public NDList embedding(
NDList inputs,
int numItems,
int embeddingSize,
boolean sparseGrad,
DataType dataType,
PairList<String, Object> additional) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList prelu(NDArray input, NDArray alpha) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList dropout(NDArray input, float rate, boolean training) {
return new NDList(JniUtils.dropout((PtNDArray) input, rate, training));
}
/** {@inheritDoc} */
@Override
public NDList batchNorm(
NDArray input,
NDArray runningMean,
NDArray runningVar,
NDArray gamma,
NDArray beta,
int axis,
float momentum,
float eps,
boolean training) {
// TODO PyTorch will support axis argument
// https://github.com/pytorch/pytorch/issues/21856
if (axis == -1) {
return new NDList(
JniUtils.batchNorm(
(PtNDArray) input,
(PtNDArray) runningMean,
(PtNDArray) runningVar,
(PtNDArray) gamma,
(PtNDArray) beta,
training,
// momentum is defined differently in PyTorch
1f - momentum,
eps));
}
// apply the swapAxes to simulate BatchNorm with axis
try (NDManager subManager = input.getManager().newSubManager()) {
input.attach(subManager);
NDArray result = input;
result = result.swapAxes(1, axis);
result =
JniUtils.batchNorm(
(PtNDArray) result,
(PtNDArray) runningMean,
(PtNDArray) runningVar,
(PtNDArray) gamma,
(PtNDArray) beta,
training,
// momentum is defined differently in PyTorch
1f - momentum,
eps);
result = result.swapAxes(1, axis);
input.attach(subManager.getParentManager());
result.attach(subManager.getParentManager());
return new NDList(result);
}
}
/** {@inheritDoc} */
@Override
public NDList rnn(
NDList inputs,
String mode,
long stateSize,
float dropRate,
int numStackedLayers,
boolean useSequenceLength,
boolean useBidirectional,
boolean stateOutputs,
PairList<String, Object> additional) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList lstm(
NDList inputs,
long stateSize,
float dropRate,
int numStackedLayers,
boolean useSequenceLength,
boolean useBidirectional,
boolean stateOutputs,
double lstmStateClipMin,
double lstmStateClipMax,
PairList<String, Object> additional) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray resize(int width, int height, int interpolation) {
// create subManager to help close intermediate NDArray
try (NDManager subManager = array.getManager().newSubManager()) {
array.attach(subManager);
NDArray result = array;
if (result.isEmpty()) {
throw new IllegalArgumentException("attempt to resize of an empty NDArray");
}
if (result.getDataType() != DataType.FLOAT32) {
result = result.toType(DataType.FLOAT32, true);
}
int dim = result.getShape().dimension();
if (dim == 3) {
result = result.expandDims(0);
}
result = result.transpose(0, 3, 1, 2);
result =
JniUtils.interpolate(
(PtNDArray) result,
new long[] {height, width},
getInterpolationMode(interpolation),
false)
.transpose(0, 2, 3, 1);
if (dim == 3) {
result = result.squeeze(0);
}
array.attach(subManager.getParentManager());
result.attach(subManager.getParentManager());
return (PtNDArray) result;
}
}
@Override
public NDArray randomFlipLeftRight() {
throw new UnsupportedOperationException("Not implemented");
}
@Override
public NDArray randomFlipTopBottom() {
throw new UnsupportedOperationException("Not implemented");
}
@Override
public NDArray randomBrightness(float brightness) {
throw new UnsupportedOperationException("Not implemented");
}
@Override
public NDArray randomHue(float hue) {
throw new UnsupportedOperationException("Not implemented");
}
@Override
public NDArray randomColorJitter(
float brightness, float contrast, float saturation, float hue) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArrayIndexer getIndexer() {
return INDEXER;
}
/** {@inheritDoc} */
@Override
public PtNDArray where(NDArray condition, NDArray other) {
// Try to broadcast if shape mismatch
if (!condition.getShape().equals(array.getShape())) {
throw new UnsupportedOperationException(
"condition and self shape mismatch, broadcast is not supported");
}
return JniUtils.where((PtNDArray) condition, array, (PtNDArray) other);
}
/** {@inheritDoc} */
@Override
public PtNDArray stack(NDList arrays, int axis) {
NDArray[] srcArray = new NDArray[arrays.size() + 1];
srcArray[0] = array;
System.arraycopy(arrays.toArray(new NDArray[0]), 0, srcArray, 1, arrays.size());
return JniUtils.stack(srcArray, axis);
}
/** {@inheritDoc} */
@Override
public PtNDArray concat(NDList list, int axis) {
NDUtils.checkConcatInput(list);
NDArray[] srcArray = new NDArray[list.size() + 1];
srcArray[0] = array;
System.arraycopy(list.toArray(new NDArray[0]), 0, srcArray, 1, list.size());
return JniUtils.cat(srcArray, axis);
}
/** {@inheritDoc} */
@Override
public NDList multiBoxTarget(
NDList inputs,
float iouThreshold,
float ignoreLabel,
float negativeMiningRatio,
float negativeMiningThreshold,
int minNegativeSamples) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList multiBoxPrior(
List<Float> sizes,
List<Float> ratios,
List<Float> steps,
List<Float> offsets,
boolean clip) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList multiBoxDetection(
NDList inputs,
boolean clip,
float threshold,
int backgroundId,
float nmsThreshold,
boolean forceSuppress,
int nmsTopK) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDArray getArray() {
return array;
}
private Shape getPoolShape(NDArray array) {
switch (array.getShape().dimension() - 2) {
case 1:
return new Shape(1);
case 2:
return new Shape(1, 1);
case 3:
return new Shape(1, 1, 1);
default:
throw new IllegalArgumentException("the input dimension should be in [3, 5]");
}
}
// Here is the list of PyTorch C++ interpolation mapping: kNearest, kLinear, kBilinear,
// kBicubic, kTrilinear, kArea
private int getInterpolationMode(int interpolation) {
switch (interpolation) {
case 0:
return 0;
case 1:
return 2;
case 2:
return 5;
case 3:
return 3;
default:
throw new UnsupportedOperationException(
"The kind of interpolation is not supported.");
}
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch/engine/PtNDArrayIndexer.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.engine;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.index.NDArrayIndexer;
import ai.djl.ndarray.index.dim.NDIndexBooleans;
import ai.djl.ndarray.index.full.NDIndexFullPick;
import ai.djl.ndarray.index.full.NDIndexFullSlice;
import ai.djl.ndarray.types.Shape;
import ai.djl.pytorch.jni.JniUtils;
import java.util.Stack;
/** The {@link NDArrayIndexer} used by the {@link PtNDArray}. */
public class PtNDArrayIndexer extends NDArrayIndexer {
/** {@inheritDoc} */
@Override
public NDArray get(NDArray array, NDIndexFullPick fullPick) {
return JniUtils.pick(
(PtNDArray) array, (PtNDArray) fullPick.getIndices(), fullPick.getAxis());
}
/** {@inheritDoc} */
@Override
public NDArray get(NDArray array, NDIndexFullSlice fullSlice) {
long[] min = fullSlice.getMin();
long[] max = fullSlice.getMax();
long[] step = fullSlice.getStep();
try (PtNDArray res = JniUtils.index((PtNDArray) array, min, max, step)) {
return res.squeeze(fullSlice.getToSqueeze());
}
}
/** {@inheritDoc} */
@Override
public void set(NDArray array, NDIndexFullSlice fullSlice, NDArray value) {
Stack<NDArray> prepareValue = new Stack<>();
prepareValue.add(value);
prepareValue.add(prepareValue.peek().toDevice(array.getDevice(), false));
// Deal with the case target: (1, 10, 1), original (10)
// try to find (10, 1) and reshape (10) to that
Shape targetShape = fullSlice.getShape();
while (targetShape.size() > value.size()) {
targetShape = targetShape.slice(1);
}
prepareValue.add(prepareValue.peek().reshape(targetShape));
prepareValue.add(prepareValue.peek().broadcast(fullSlice.getShape()));
JniUtils.indexSet(
(PtNDArray) array,
(PtNDArray) prepareValue.peek(),
fullSlice.getMin(),
fullSlice.getMax(),
fullSlice.getStep());
for (NDArray toClean : prepareValue) {
if (toClean != value) {
toClean.close();
}
}
}
/** {@inheritDoc} */
@Override
public void set(NDArray array, NDIndexBooleans indices, NDArray value) {
try (NDArray mask = indices.getIndex()) {
JniUtils.booleanMaskSet((PtNDArray) array, (PtNDArray) value, (PtNDArray) mask);
}
}
/** {@inheritDoc} */
@Override
public void set(NDArray array, NDIndexFullSlice fullSlice, Number value) {
set(array, fullSlice, array.getManager().create(value));
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch/engine/PtNDManager.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.engine;
import ai.djl.Device;
import ai.djl.engine.Engine;
import ai.djl.ndarray.BaseNDManager;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.ndarray.types.SparseFormat;
import ai.djl.pytorch.jni.JniUtils;
import ai.djl.pytorch.jni.Pointer;
import ai.djl.util.PairList;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.DoubleBuffer;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.nio.LongBuffer;
import java.nio.file.Path;
/** {@code PtNDManager} is the PyTorch implementation of {@link NDManager}. */
public class PtNDManager extends BaseNDManager {
private static final PtNDManager SYSTEM_MANAGER = new SystemManager();
private PtNDManager(NDManager parent, Device device) {
super(parent, device);
}
static PtNDManager getSystemManager() {
return SYSTEM_MANAGER;
}
/** {@inheritDoc} */
@Override
public ByteBuffer allocateDirect(int capacity) {
return ByteBuffer.allocateDirect(capacity).order(ByteOrder.nativeOrder());
}
/**
* Creates an {@link PtNDArray} with the given Native Memory Pointer and attaches to this
* manager.
*
* @param handle the array's native memory pointer
* @return the created array
*/
public PtNDArray create(Pointer handle) {
return new PtNDArray(this, handle);
}
/** {@inheritDoc} */
@Override
public PtNDArray create(Shape shape, DataType dataType) {
return JniUtils.createEmptyNdArray(this, shape, dataType, device, SparseFormat.DENSE);
}
/** {@inheritDoc} */
@Override
public PtNDArray create(Buffer data, Shape shape, DataType dataType) {
if (data.isDirect() && data instanceof ByteBuffer) {
return JniUtils.createNdFromByteBuffer(
this, (ByteBuffer) data, shape, dataType, SparseFormat.DENSE, device);
}
int size = data.remaining();
// int8, uint8, boolean use ByteBuffer, so need to explicitly input DataType
DataType inputType = DataType.fromBuffer(data);
int numOfBytes = inputType.getNumOfBytes();
ByteBuffer buf = allocateDirect(size * numOfBytes);
switch (inputType) {
case FLOAT32:
buf.asFloatBuffer().put((FloatBuffer) data);
break;
case FLOAT64:
buf.asDoubleBuffer().put((DoubleBuffer) data);
break;
case UINT8:
case INT8:
case BOOLEAN:
buf.put((ByteBuffer) data);
break;
case INT32:
buf.asIntBuffer().put((IntBuffer) data);
break;
case INT64:
buf.asLongBuffer().put((LongBuffer) data);
break;
case FLOAT16:
default:
throw new AssertionError("Show never happen");
}
return JniUtils.createNdFromByteBuffer(
this, buf, shape, dataType, SparseFormat.DENSE, device);
}
/** {@inheritDoc} */
@Override
public NDArray createCSR(Buffer data, long[] indptr, long[] indices, Shape shape) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray createRowSparse(Buffer data, Shape dataShape, long[] indices, Shape shape) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList load(Path path) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray zeros(Shape shape, DataType dataType) {
return JniUtils.createZerosNdArray(this, shape, dataType, device, SparseFormat.DENSE);
}
/** {@inheritDoc} */
@Override
public NDArray ones(Shape shape, DataType dataType) {
return JniUtils.createOnesNdArray(this, shape, dataType, device, SparseFormat.DENSE);
}
/** {@inheritDoc} */
@Override
public NDArray full(Shape shape, float value, DataType dataType) {
return JniUtils.full(this, shape, value, dataType, device, SparseFormat.DENSE);
}
/** {@inheritDoc} */
@Override
public NDArray arange(int start, int stop, int step, DataType dataType) {
return arange((float) start, (float) stop, (float) step, dataType, device);
}
/** {@inheritDoc} */
@Override
public NDArray arange(float start, float stop, float step, DataType dataType) {
if (Math.signum(stop - start) != Math.signum(step)) {
return create(new Shape(0), dataType, device);
}
return JniUtils.arange(this, start, stop, step, dataType, device, SparseFormat.DENSE);
}
/** {@inheritDoc} */
@Override
public NDArray eye(int rows, int cols, int k, DataType dataType) {
if (k != 0) {
throw new UnsupportedOperationException(
"index of the diagonal is not supported in PyTorch");
}
return JniUtils.eye(this, rows, cols, dataType, device, SparseFormat.DENSE);
}
/** {@inheritDoc} */
@Override
public NDArray linspace(float start, float stop, int num, boolean endpoint) {
if (!endpoint) {
throw new UnsupportedOperationException("endpoint only support true");
}
return JniUtils.linspace(
this, start, stop, num, DataType.FLOAT32, device, SparseFormat.DENSE);
}
/** {@inheritDoc} */
@Override
public NDArray randomUniform(float low, float high, Shape shape, DataType dataType) {
return JniUtils.uniform(this, low, high, shape, dataType, device);
}
/** {@inheritDoc} */
@Override
public NDArray randomNormal(float loc, float scale, Shape shape, DataType dataType) {
return JniUtils.normal(this, loc, scale, shape, dataType, device);
}
/** {@inheritDoc} */
@Override
public NDArray randomMultinomial(int n, NDArray pValues) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray randomMultinomial(int n, NDArray pValues, Shape shape) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public PtNDManager newSubManager() {
return newSubManager(device);
}
/** {@inheritDoc} */
@Override
public PtNDManager newSubManager(Device device) {
PtNDManager manager = new PtNDManager(this, device);
attach(manager.uid, manager);
return manager;
}
/** {@inheritDoc} */
@Override
public void invoke(
String operation, NDArray[] src, NDArray[] dest, PairList<String, ?> params) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDList invoke(String operation, NDList src, PairList<String, ?> params) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public Engine getEngine() {
return Engine.getEngine(PtEngine.ENGINE_NAME);
}
/** The SystemManager is the root {@link PtNDManager} of which all others are children. */
private static final class SystemManager extends PtNDManager {
SystemManager() {
super(null, Device.defaultDevice());
}
/** {@inheritDoc} */
@Override
public void attach(String resourceId, AutoCloseable resource) {}
/** {@inheritDoc} */
@Override
public void detach(String resourceId) {}
/** {@inheritDoc} */
@Override
public void close() {}
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch/engine/PtSymbolBlock.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.engine;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.BlockList;
import ai.djl.nn.ParameterList;
import ai.djl.nn.SymbolBlock;
import ai.djl.pytorch.jni.IValueUtils;
import ai.djl.pytorch.jni.JniUtils;
import ai.djl.pytorch.jni.NativeResource;
import ai.djl.pytorch.jni.Pointer;
import ai.djl.training.ParameterStore;
import ai.djl.training.initializer.Initializer;
import ai.djl.util.PairList;
import java.io.DataInputStream;
import java.io.DataOutputStream;
/**
* {@code PtSymbolBlock} is the PyTorch implementation of {@link SymbolBlock}.
*
* <p>You can create a {@code PtSymbolBlock} using {@link ai.djl.Model#load(java.nio.file.Path,
* String)}.
*/
// TODO: Memory handling
public class PtSymbolBlock extends NativeResource implements SymbolBlock {
private PtNDManager manager;
private boolean isTrain;
/**
* Constructs a {@code PtSymbolBlock}.
*
* <p>You can create a {@code PtSymbolBlock} using {@link ai.djl.Model#load(java.nio.file.Path,
* String)}.
*
* @param manager the manager to use for the block
* @param handle the module handle
*/
public PtSymbolBlock(PtNDManager manager, Pointer handle) {
super(handle);
this.manager = manager;
manager.attach(getUid(), this);
// training mode is on by default
isTrain = true;
}
/** {@inheritDoc} */
@Override
public void close() {
Pointer pointer = handle.getAndSet(null);
if (pointer != null) {
JniUtils.deleteModule(pointer);
manager.detach(getUid());
manager = null;
}
}
/** {@inheritDoc} */
@Override
public void removeLastBlock() {
throw new UnsupportedOperationException("Not supported for PyTorch");
}
/** {@inheritDoc} */
@Override
public NDList forward(
ParameterStore parameterStore,
NDList inputs,
boolean training,
PairList<String, Object> params) {
// TODO refactor the forward to not take ParameterStore
if (isTrain != training) {
isTrain = training;
if (isTrain) {
JniUtils.enableTrainingMode(this);
} else {
JniUtils.enableInferenceMode(this);
}
}
return IValueUtils.forward(this, inputs, training);
}
/** {@inheritDoc} */
@Override
public void setInitializer(Initializer initializer) {
throw new UnsupportedOperationException("Not supported for PyTorch");
}
/** {@inheritDoc} */
@Override
public void setInitializer(Initializer initializer, String paramName) {
throw new UnsupportedOperationException("Not supported for PyTorch");
}
/** {@inheritDoc} */
@Override
public Shape[] initialize(NDManager manager, DataType dataType, Shape... inputShapes) {
throw new UnsupportedOperationException("Not supported for PyTorch");
}
/** {@inheritDoc} */
@Override
public boolean isInitialized() {
return true;
}
/** {@inheritDoc} */
@Override
public void cast(DataType dataType) {
throw new UnsupportedOperationException("Not supported for PyTorch");
}
/** {@inheritDoc} */
@Override
public void clear() {
throw new UnsupportedOperationException("Not supported for PyTorch");
}
/** {@inheritDoc} */
@Override
public PairList<String, Shape> describeInput() {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public BlockList getChildren() {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public ParameterList getDirectParameters() {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public ParameterList getParameters() {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public Shape getParameterShape(String name, Shape[] inputShapes) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public Shape[] getOutputShapes(NDManager manager, Shape[] inputShapes) {
return new Shape[0];
}
/** {@inheritDoc} */
@Override
public void saveParameters(DataOutputStream os) {
throw new UnsupportedOperationException("Not supported for PyTorch");
}
/** {@inheritDoc} */
@Override
public void loadParameters(NDManager manager, DataInputStream is) {
throw new UnsupportedOperationException("Not supported for PyTorch");
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch/jni/IValueUtils.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.jni;
import ai.djl.ndarray.NDList;
import ai.djl.pytorch.engine.PtNDArray;
import ai.djl.pytorch.engine.PtNDManager;
import ai.djl.pytorch.engine.PtSymbolBlock;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/** IValueUtils is utility class to deal with IValue in PyTorch. */
public final class IValueUtils {
private IValueUtils() {}
/**
* Create IValue Pointer from NDArray.
*
* @param array {@link PtNDArray}
* @return IValue Pointer
*/
public static Pointer toIValuePointer(PtNDArray array) {
return PyTorchLibrary.LIB.iValueCreateFromTensor(array.getHandle());
}
/**
* Check IValue is a container of {@link PtNDArray}.
*
* @param iValueHandle IValue {@link Pointer}
* @return result
*/
public static boolean isNDArray(Pointer iValueHandle) {
return PyTorchLibrary.LIB.iValueIsTensor(iValueHandle);
}
/**
* Check IValue is a container of {@link NDList}.
*
* @param iValueHandle IValue {@link Pointer}
* @return result
*/
public static boolean isNDList(Pointer iValueHandle) {
return PyTorchLibrary.LIB.iValueIsTensorList(iValueHandle);
}
/**
* Check IValue is a container of IValue List.
*
* @param iValueHandle IValue {@link Pointer}
* @return result
*/
public static boolean isList(Pointer iValueHandle) {
return PyTorchLibrary.LIB.iValueIsList(iValueHandle);
}
/**
* Check IValue is a container of IValue Tuple.
*
* @param iValueHandle IValue {@link Pointer}
* @return result
*/
public static boolean isTuple(Pointer iValueHandle) {
return PyTorchLibrary.LIB.iValueIsTuple(iValueHandle);
}
/**
* Check IValue is a container of IValue Map.
*
* @param iValueHandle IValue {@link Pointer}
* @return result
*/
public static boolean isMap(Pointer iValueHandle) {
return PyTorchLibrary.LIB.iValueIsMap(iValueHandle);
}
/**
* Check IValue is a container of String.
*
* @param iValueHandle IValue {@link Pointer}
* @return result
*/
public static boolean isString(Pointer iValueHandle) {
return PyTorchLibrary.LIB.iValueIsString(iValueHandle);
}
/**
* Extract IValue with a {@link PtNDArray} value.
*
* @param iValueHandle IValue {@link Pointer}
* @param manager {@link PtNDManager} that creates {@link PtNDArray}
* @return {@link ai.djl.ndarray.NDArray}
*/
public static PtNDArray toNDArray(Pointer iValueHandle, PtNDManager manager) {
Pointer ndHandle = PyTorchLibrary.LIB.iValueToTensor(iValueHandle);
return manager.create(ndHandle);
}
/**
* Extract IValue to {@link NDList}.
*
* @param iValueHandle IValue {@link Pointer}
* @param manager {@link PtNDManager} that creates {@link PtNDArray}
* @return {@link NDList}
*/
public static NDList toNDList(Pointer iValueHandle, PtNDManager manager) {
Pointer[] ndHandles = PyTorchLibrary.LIB.iValueToTensorList(iValueHandle);
NDList list = new NDList();
for (Pointer handle : ndHandles) {
list.add(manager.create(handle));
}
return list;
}
/**
* Extract IValue to String.
*
* @param iValueHandle IValue {@link Pointer}
* @return String
*/
public static String toString(Pointer iValueHandle) {
return PyTorchLibrary.LIB.iValueToString(iValueHandle);
}
/**
* Extract IValue to an IValue Array.
*
* @param iValueHandle IValue {@link Pointer}
* @return IValue array
*/
public static Pointer[] toIValueArray(Pointer iValueHandle) {
if (isTuple(iValueHandle)) {
return PyTorchLibrary.LIB.iValueToListFromTuple(iValueHandle);
}
return PyTorchLibrary.LIB.iValueToList(iValueHandle);
}
/**
* Extract IValue to a Map.
*
* @param iValueHandle IValue {@link Pointer}
* @return IValue Map
*/
public static Map<Pointer, Pointer> toIValueMap(Pointer iValueHandle) {
Pointer[] iValueHandles = PyTorchLibrary.LIB.iValueToMap(iValueHandle);
Map<Pointer, Pointer> map = new ConcurrentHashMap<>();
for (int i = 0; i < iValueHandles.length; i += 2) {
map.put(iValueHandles[i], iValueHandles[i + 1]);
}
return map;
}
private static NDList forwardHelper(Pointer iValueHandle, PtNDManager manager) {
NDList list = new NDList();
if (isNDArray(iValueHandle)) {
list.add(toNDArray(iValueHandle, manager));
} else if (isNDList(iValueHandle)) {
list.addAll(toNDList(iValueHandle, manager));
} else if (isList(iValueHandle) || isTuple(iValueHandle)) {
for (Pointer handle : toIValueArray(iValueHandle)) {
list.addAll(forwardHelper(handle, manager));
}
} else if (isMap(iValueHandle)) {
// Only allows <String, NDArray> type of map
Map<Pointer, Pointer> map = toIValueMap(iValueHandle);
for (Map.Entry<Pointer, Pointer> entry : map.entrySet()) {
String name = toString(entry.getKey());
// free the IValue handle
PyTorchLibrary.LIB.torchDeleteIValue(entry.getKey());
PtNDArray value = toNDArray(entry.getValue(), manager);
// free the IValue handle
PyTorchLibrary.LIB.torchDeleteIValue(entry.getValue());
value.setName(name);
list.add(value);
}
} else {
// free the IValue handle
PyTorchLibrary.LIB.torchDeleteIValue(iValueHandle);
throw new UnsupportedOperationException("Unsupported IValue type");
}
// free the IValue handle
PyTorchLibrary.LIB.torchDeleteIValue(iValueHandle);
return list;
}
/**
* Run the forward of PyTorch module.
*
* @param block the block that contains PyTorch module
* @param inputs input {@link NDList}
* @param isTrain is running on training mode
* @return result {@link NDList}
*/
public static NDList forward(PtSymbolBlock block, NDList inputs, boolean isTrain) {
Pointer[] arrayHandles =
inputs.stream()
.map(input -> ((PtNDArray) input).getHandle())
.toArray(Pointer[]::new);
Pointer result = PyTorchLibrary.LIB.moduleForward(block.getHandle(), arrayHandles, isTrain);
PtNDManager manager = (PtNDManager) inputs.get(0).getManager();
return forwardHelper(result, manager);
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch/jni/JniUtils.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.jni;
import ai.djl.Device;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.ndarray.types.SparseFormat;
import ai.djl.pytorch.engine.PtDeviceType;
import ai.djl.pytorch.engine.PtNDArray;
import ai.djl.pytorch.engine.PtNDManager;
import ai.djl.pytorch.engine.PtSymbolBlock;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A class containing utilities to interact with the PyTorch Engine's Java Native Interface (JNI)
* layer.
*/
@SuppressWarnings("MissingJavadocMethod")
public final class JniUtils {
private static final Logger logger = LoggerFactory.getLogger(JniUtils.class);
private static Set<String> configs;
private JniUtils() {}
private static int layoutMapper(SparseFormat fmt, Device device) {
if (fmt == SparseFormat.DENSE) {
// Enable MKLDNN with environment variable
// Using MKLDNN with GPU would throw exception on libtorch
if (Boolean.getBoolean("ai.djl.pytorch.use_mkldnn") && !device.equals(Device.gpu())) {
return 2;
}
return 0;
} else if (fmt == SparseFormat.COO) {
return 1;
} else {
throw new IllegalArgumentException(
"Current PyTorch only support SparseFormat.DENSE and SparseFormat.COO");
}
}
public static int getNumInteropThreads() {
return PyTorchLibrary.LIB.torchGetNumInteropThreads();
}
public static int getNumThreads() {
return PyTorchLibrary.LIB.torchGetNumThreads();
}
public static void setNumInteropThreads(int threads) {
PyTorchLibrary.LIB.torchSetNumInteropThreads(threads);
}
public static void setNumThreads(int threads) {
PyTorchLibrary.LIB.torchSetNumThreads(threads);
}
public static Set<String> getFeatures() {
if (configs != null) {
return configs;
}
Set<String> features = new HashSet<>();
PyTorchLibrary.LIB.torchShowConfig(features);
configs = features;
return configs;
}
public static void setSeed(long seed) {
PyTorchLibrary.LIB.torchManualSeed(seed);
}
// TODO: Unchecked Datatype and device mapping
public static PtNDArray createNdFromByteBuffer(
PtNDManager manager,
ByteBuffer data,
Shape shape,
DataType dType,
SparseFormat fmt,
Device device) {
int layoutVal = layoutMapper(fmt, device);
return manager.create(
PyTorchLibrary.LIB.torchFromBlob(
data,
shape.getShape(),
dType.ordinal(),
layoutVal,
new int[] {
PtDeviceType.toDeviceType(device),
device.equals(Device.cpu()) ? -1 : device.getDeviceId()
},
false));
}
public static PtNDArray createEmptyNdArray(
PtNDManager manager, Shape shape, DataType dType, Device device, SparseFormat fmt) {
int layoutVal = layoutMapper(fmt, device);
return manager.create(
PyTorchLibrary.LIB.torchEmpty(
shape.getShape(),
dType.ordinal(),
layoutVal,
new int[] {
PtDeviceType.toDeviceType(device),
device.equals(Device.cpu()) ? -1 : device.getDeviceId()
},
false));
}
public static PtNDArray createZerosNdArray(
PtNDManager manager, Shape shape, DataType dType, Device device, SparseFormat fmt) {
int layoutVal = layoutMapper(fmt, device);
return manager.create(
PyTorchLibrary.LIB.torchZeros(
shape.getShape(),
dType.ordinal(),
layoutVal,
new int[] {
PtDeviceType.toDeviceType(device),
device.equals(Device.cpu()) ? -1 : device.getDeviceId()
},
false));
}
public static PtNDArray createOnesNdArray(
PtNDManager manager, Shape shape, DataType dType, Device device, SparseFormat fmt) {
int layoutVal = layoutMapper(fmt, device);
return manager.create(
PyTorchLibrary.LIB.torchOnes(
shape.getShape(),
dType.ordinal(),
layoutVal,
new int[] {
PtDeviceType.toDeviceType(device),
device.equals(Device.cpu()) ? -1 : device.getDeviceId()
},
false));
}
public static PtNDArray full(
PtNDManager manager,
Shape shape,
double fillValue,
DataType dType,
Device device,
SparseFormat fmt) {
int layoutVal = layoutMapper(fmt, device);
return manager.create(
PyTorchLibrary.LIB.torchFull(
shape.getShape(),
fillValue,
dType.ordinal(),
layoutVal,
new int[] {
PtDeviceType.toDeviceType(device),
device.equals(Device.cpu()) ? -1 : device.getDeviceId()
},
false));
}
public static PtNDArray zerosLike(
PtNDArray array, DataType dType, Device device, SparseFormat fmt) {
int layoutVal = layoutMapper(fmt, device);
return array.getManager()
.create(
PyTorchLibrary.LIB.torchZerosLike(
array.getHandle(),
dType.ordinal(),
layoutVal,
new int[] {
PtDeviceType.toDeviceType(device),
device.equals(Device.cpu()) ? -1 : device.getDeviceId()
},
false));
}
public static PtNDArray onesLike(
PtNDArray array, DataType dType, Device device, SparseFormat fmt) {
int layoutVal = layoutMapper(fmt, device);
return array.getManager()
.create(
PyTorchLibrary.LIB.torchOnesLike(
array.getHandle(),
dType.ordinal(),
layoutVal,
new int[] {
PtDeviceType.toDeviceType(device),
device.equals(Device.cpu()) ? -1 : device.getDeviceId()
},
false));
}
public static PtNDArray arange(
PtNDManager manager,
float start,
float stop,
float step,
DataType dType,
Device device,
SparseFormat fmt) {
int layoutVal = layoutMapper(fmt, device);
return manager.create(
PyTorchLibrary.LIB.torchArange(
start,
stop,
step,
dType.ordinal(),
layoutVal,
new int[] {
PtDeviceType.toDeviceType(device),
device.equals(Device.cpu()) ? -1 : device.getDeviceId()
},
false));
}
public static PtNDArray linspace(
PtNDManager manager,
float start,
float stop,
int step,
DataType dType,
Device device,
SparseFormat fmt) {
int layoutVal = layoutMapper(fmt, device);
return manager.create(
PyTorchLibrary.LIB.torchLinspace(
start,
stop,
step,
dType.ordinal(),
layoutVal,
new int[] {
PtDeviceType.toDeviceType(device),
device.equals(Device.cpu()) ? -1 : device.getDeviceId()
},
false));
}
public static PtNDArray to(PtNDArray ndArray, DataType dataType, Device device, boolean copy) {
PtNDManager manager = ndArray.getManager();
// the device of the manager should always match the one in NDArray which the manager attach
// to
if (!device.equals(manager.getDevice())) {
manager = manager.newSubManager(device);
}
return manager.create(
PyTorchLibrary.LIB.torchTo(
ndArray.getHandle(),
dataType.ordinal(),
new int[] {
PtDeviceType.toDeviceType(device),
device.equals(Device.cpu()) ? -1 : device.getDeviceId()
},
copy));
}
public static PtNDArray toSparse(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchToSparse(ndArray.getHandle()));
}
public static PtNDArray toDense(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchToDense(ndArray.getHandle()));
}
public static PtNDArray broadcast(PtNDArray ndArray, Shape shape) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchExpand(ndArray.getHandle(), shape.getShape()));
}
public static PtNDArray slice(PtNDArray ndArray, long dim, long start, long stop, long step) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchSlice(ndArray.getHandle(), dim, start, stop, step));
}
public static PtNDArray index(
PtNDArray ndArray, long[] minIndices, long[] maxIndices, long[] stepIndices) {
return ndArray.getManager()
.create(
PyTorchLibrary.LIB.torchIndex(
ndArray.getHandle(), minIndices, maxIndices, stepIndices));
}
public static void indexSet(
PtNDArray ndArray,
PtNDArray value,
long[] minIndices,
long[] maxIndices,
long[] stepIndices) {
PyTorchLibrary.LIB.torchIndexPut(
ndArray.getHandle(), value.getHandle(), minIndices, maxIndices, stepIndices);
}
public static void set(PtNDArray self, PtNDArray other) {
PyTorchLibrary.LIB.torchSet(self.getHandle(), other.getHandle());
}
public static PtNDArray pick(PtNDArray ndArray, PtNDArray index, long dim) {
Shape indexShape = index.getShape();
Shape ndShape = ndArray.getShape();
int shapeDims = indexShape.dimension();
int ndDims = ndShape.dimension();
if (shapeDims != ndDims) {
for (int i = 0; i < ndDims - shapeDims; ++i) {
if (indexShape.equals(ndShape.slice(i, shapeDims))) {
long[] shapes = indexShape.getShape();
long[] newShape = new long[ndDims];
Arrays.fill(newShape, 0, i, 1L);
Arrays.fill(newShape, i, i + shapes.length, shapes[i]);
Arrays.fill(newShape, i + shapes.length, ndDims, 1L);
indexShape = new Shape(newShape);
break;
}
}
if (indexShape.equals(index.getShape())) {
throw new IllegalArgumentException(
"expand shape failed! Cannot expand from " + indexShape + "to " + ndShape);
}
index = index.reshape(indexShape);
}
if (index.getDataType() != DataType.INT64) {
index = index.toType(DataType.INT64, true);
}
return ndArray.getManager()
.create(
PyTorchLibrary.LIB.torchGather(
ndArray.getHandle(), index.getHandle(), dim, false));
}
public static PtNDArray where(PtNDArray condition, PtNDArray self, PtNDArray other) {
return self.getManager()
.create(
PyTorchLibrary.LIB.torchWhere(
condition.getHandle(), self.getHandle(), other.getHandle()));
}
public static PtNDArray booleanMask(PtNDArray ndArray, PtNDArray indicesNd) {
return ndArray.getManager()
.create(
PyTorchLibrary.LIB.torchMaskedSelect(
ndArray.getHandle(), indicesNd.getHandle()));
}
public static void booleanMaskSet(PtNDArray ndArray, PtNDArray value, PtNDArray indicesNd) {
PyTorchLibrary.LIB.torchMaskedPut(
ndArray.getHandle(), value.getHandle(), indicesNd.getHandle());
}
public static PtNDArray clone(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.tensorClone(ndArray.getHandle()));
}
public static PtNDArray reshape(PtNDArray ndArray, long[] shape) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchReshape(ndArray.getHandle(), shape));
}
public static PtNDArray stack(NDArray[] arrays, int dim) {
Pointer[] pointers =
Arrays.stream(arrays)
.map(array -> ((PtNDArray) array).getHandle())
.toArray(Pointer[]::new);
return ((PtNDManager) arrays[0].getManager())
.create(PyTorchLibrary.LIB.torchStack(pointers, dim));
}
public static PtNDArray cat(NDArray[] arrays, long dim) {
Pointer[] pointers =
Arrays.stream(arrays)
.map(array -> ((PtNDArray) array).getHandle())
.toArray(Pointer[]::new);
return ((PtNDManager) arrays[0].getManager())
.create(PyTorchLibrary.LIB.torchCat(pointers, dim));
}
public static PtNDArray tile(PtNDArray ndArray, long[] repeats) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchRepeat(ndArray.getHandle(), repeats));
}
public static PtNDArray repeat(PtNDArray ndArray, long repeat, long dim) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchRepeatInterleave(ndArray.getHandle(), repeat, dim));
}
public static PtNDArray softmax(PtNDArray ndArray, long dim, DataType dTpe) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchSoftmax(ndArray.getHandle(), dim, dTpe.ordinal()));
}
public static PtNDArray logSoftmax(PtNDArray ndArray, long dim, DataType dTpe) {
return ndArray.getManager()
.create(
PyTorchLibrary.LIB.torchLogSoftmax(
ndArray.getHandle(), dim, dTpe.ordinal()));
}
public static PtNDArray argMax(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchArgMax(ndArray.getHandle()));
}
public static PtNDArray argMax(PtNDArray ndArray, long dim, boolean keepDim) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchArgMax(ndArray.getHandle(), dim, keepDim));
}
public static PtNDArray argMin(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchArgMin(ndArray.getHandle()));
}
public static PtNDArray argMin(PtNDArray ndArray, long dim, boolean keepDim) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchArgMin(ndArray.getHandle(), dim, keepDim));
}
public static PtNDArray argSort(PtNDArray ndArray, long dim, boolean keepDim) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchArgSort(ndArray.getHandle(), dim, keepDim));
}
public static PtNDArray sort(PtNDArray ndArray, long dim, boolean descending) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchSort(ndArray.getHandle(), dim, descending));
}
public static PtNDArray permute(PtNDArray ndArray, long[] dims) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchPermute(ndArray.getHandle(), dims));
}
public static PtNDArray flip(PtNDArray ndArray, long[] dims) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchFlip(ndArray.getHandle(), dims));
}
public static PtNDArray transpose(PtNDArray ndArray, long dim1, long dim2) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchTranspose(ndArray.getHandle(), dim1, dim2));
}
public static boolean contentEqual(PtNDArray ndArray1, PtNDArray ndArray2) {
return PyTorchLibrary.LIB.contentEqual(ndArray1.getHandle(), ndArray2.getHandle());
}
public static PtNDArray add(PtNDArray ndArray1, PtNDArray ndArray2) {
return ndArray1.getManager()
.create(PyTorchLibrary.LIB.torchAdd(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static void addi(PtNDArray ndArray1, PtNDArray ndArray2) {
PyTorchLibrary.LIB.torchAddi(ndArray1.getHandle(), ndArray2.getHandle());
}
public static PtNDArray sub(PtNDArray ndArray1, PtNDArray ndArray2) {
return ndArray1.getManager()
.create(PyTorchLibrary.LIB.torchSub(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static void subi(PtNDArray ndArray1, PtNDArray ndArray2) {
PyTorchLibrary.LIB.torchSubi(ndArray1.getHandle(), ndArray2.getHandle());
}
public static PtNDArray mul(PtNDArray ndArray1, PtNDArray ndArray2) {
return ndArray1.getManager()
.create(PyTorchLibrary.LIB.torchMul(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static void muli(PtNDArray ndArray1, PtNDArray ndArray2) {
PyTorchLibrary.LIB.torchMuli(ndArray1.getHandle(), ndArray2.getHandle());
}
public static PtNDArray div(PtNDArray ndArray1, PtNDArray ndArray2) {
return ndArray1.getManager()
.create(
PyTorchLibrary.LIB.torchTrueDivide(
ndArray1.getHandle(), ndArray2.getHandle()));
}
public static void divi(PtNDArray ndArray1, PtNDArray ndArray2) {
PyTorchLibrary.LIB.torchTrueDividei(ndArray1.getHandle(), ndArray2.getHandle());
}
public static PtNDArray remainder(PtNDArray ndArray1, PtNDArray ndArray2) {
return ndArray1.getManager()
.create(
PyTorchLibrary.LIB.torchRemainder(
ndArray1.getHandle(), ndArray2.getHandle()));
}
public static void remainderi(PtNDArray ndArray1, PtNDArray ndArray2) {
PyTorchLibrary.LIB.torchRemainderi(ndArray1.getHandle(), ndArray2.getHandle());
}
public static PtNDArray pow(PtNDArray ndArray1, PtNDArray ndArray2) {
return ndArray1.getManager()
.create(PyTorchLibrary.LIB.torchPow(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static void powi(PtNDArray ndArray1, PtNDArray ndArray2) {
PyTorchLibrary.LIB.torchPowi(ndArray1.getHandle(), ndArray2.getHandle());
}
public static PtNDArray sign(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchSign(ndArray.getHandle()));
}
public static void signi(PtNDArray ndArray) {
PyTorchLibrary.LIB.torchSigni(ndArray.getHandle());
}
public static PtNDArray logicalAnd(PtNDArray ndArray1, PtNDArray ndArray2) {
return ndArray1.getManager()
.create(
PyTorchLibrary.LIB.torchLogicalAnd(
ndArray1.getHandle(), ndArray2.getHandle()));
}
public static PtNDArray logicalOr(PtNDArray ndArray1, PtNDArray ndArray2) {
return ndArray1.getManager()
.create(
PyTorchLibrary.LIB.torchLogicalOr(
ndArray1.getHandle(), ndArray2.getHandle()));
}
public static PtNDArray logicalXor(PtNDArray ndArray1, PtNDArray ndArray2) {
return ndArray1.getManager()
.create(
PyTorchLibrary.LIB.torchLogicalXor(
ndArray1.getHandle(), ndArray2.getHandle()));
}
public static PtNDArray logicalNot(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchLogicalNot(ndArray.getHandle()));
}
public static PtNDArray matmul(PtNDArray ndArray1, PtNDArray ndArray2) {
return ndArray1.getManager()
.create(PyTorchLibrary.LIB.torchMatmul(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static PtNDArray dot(PtNDArray ndArray1, PtNDArray ndArray2) {
if (ndArray1.getShape().dimension() == 1) {
return ndArray1.getManager()
.create(
PyTorchLibrary.LIB.torchDot(
ndArray1.getHandle(), ndArray2.getHandle()));
}
return ndArray1.getManager()
.create(PyTorchLibrary.LIB.torchMM(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static PtNDArray max(PtNDArray ndArray1, PtNDArray ndArray2) {
return ndArray1.getManager()
.create(PyTorchLibrary.LIB.torchMax(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static PtNDArray max(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchMax(ndArray.getHandle()));
}
public static PtNDArray max(PtNDArray ndArray, long dim, boolean keepDim) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchMax(ndArray.getHandle(), dim, keepDim));
}
public static PtNDArray min(PtNDArray ndArray1, PtNDArray ndArray2) {
return ndArray1.getManager()
.create(PyTorchLibrary.LIB.torchMin(ndArray1.getHandle(), ndArray2.getHandle()));
}
public static PtNDArray min(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchMin(ndArray.getHandle()));
}
public static PtNDArray min(PtNDArray ndArray, long dim, boolean keepDim) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchMin(ndArray.getHandle(), dim, keepDim));
}
public static PtNDArray mean(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchMean(ndArray.getHandle()));
}
public static PtNDArray mean(PtNDArray ndArray, long dim, boolean keepDim) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchMean(ndArray.getHandle(), dim, keepDim));
}
public static PtNDArray sum(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchSum(ndArray.getHandle()));
}
public static PtNDArray sum(PtNDArray ndArray, long[] dims, boolean keepDim) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchSum(ndArray.getHandle(), dims, keepDim));
}
public static PtNDArray prod(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchProd(ndArray.getHandle()));
}
public static PtNDArray prod(PtNDArray ndArray, long dim, boolean keepDim) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchProd(ndArray.getHandle(), dim, keepDim));
}
public static PtNDArray cumSum(PtNDArray ndArray, long dim) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchCumSum(ndArray.getHandle(), dim));
}
public static NDList split(PtNDArray ndArray, long size, long axis) {
Pointer[] ndPtrs = PyTorchLibrary.LIB.torchSplit(ndArray.getHandle(), size, axis);
NDList list = new NDList();
for (Pointer ptr : ndPtrs) {
list.add(ndArray.getManager().create(ptr));
}
return list;
}
public static NDList split(PtNDArray ndArray, long[] indices, long axis) {
Pointer[] ndPtrs = PyTorchLibrary.LIB.torchSplit(ndArray.getHandle(), indices, axis);
NDList list = new NDList();
for (Pointer ptr : ndPtrs) {
list.add(ndArray.getManager().create(ptr));
}
return list;
}
public static PtNDArray squeeze(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchSqueeze(ndArray.getHandle()));
}
public static PtNDArray squeeze(PtNDArray ndArray, long dim) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchSqueeze(ndArray.getHandle(), dim));
}
public static PtNDArray unsqueeze(PtNDArray ndArray, long dim) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchUnsqueeze(ndArray.getHandle(), dim));
}
public static PtNDArray flatten(PtNDArray ndArray, long startDim, long endDim) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchFlatten(ndArray.getHandle(), startDim, endDim));
}
public static PtNDArray abs(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchAbs(ndArray.getHandle()));
}
public static PtNDArray square(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchSquare(ndArray.getHandle()));
}
public static PtNDArray floor(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchFloor(ndArray.getHandle()));
}
public static PtNDArray ceil(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchCeil(ndArray.getHandle()));
}
public static PtNDArray round(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchRound(ndArray.getHandle()));
}
public static PtNDArray trunc(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchTrunc(ndArray.getHandle()));
}
public static PtNDArray clip(PtNDArray ndArray, Number min, Number max) {
PtNDArray minNd = (PtNDArray) ndArray.getManager().create(min);
PtNDArray maxNd = (PtNDArray) ndArray.getManager().create(max);
return ndArray.getManager()
.create(
PyTorchLibrary.LIB.torchClamp(
ndArray.getHandle(), minNd.getHandle(), maxNd.getHandle()));
}
public static PtNDArray exp(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchExp(ndArray.getHandle()));
}
public static PtNDArray log(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchLog(ndArray.getHandle()));
}
public static PtNDArray log10(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchLog10(ndArray.getHandle()));
}
public static PtNDArray log2(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchLog2(ndArray.getHandle()));
}
public static PtNDArray sin(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchSin(ndArray.getHandle()));
}
public static PtNDArray cos(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchCos(ndArray.getHandle()));
}
public static PtNDArray tan(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchTan(ndArray.getHandle()));
}
public static PtNDArray asin(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchASin(ndArray.getHandle()));
}
public static PtNDArray acos(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchAcos(ndArray.getHandle()));
}
public static PtNDArray atan(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchAtan(ndArray.getHandle()));
}
public static PtNDArray sqrt(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchSqrt(ndArray.getHandle()));
}
public static PtNDArray sinh(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchSinh(ndArray.getHandle()));
}
public static PtNDArray cosh(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchCosh(ndArray.getHandle()));
}
public static PtNDArray tanh(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchTanh(ndArray.getHandle()));
}
public static PtNDArray sigmoid(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchSigmoid(ndArray.getHandle()));
}
public static PtNDArray all(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchAll(ndArray.getHandle()));
}
public static PtNDArray any(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchAny(ndArray.getHandle()));
}
public static PtNDArray none(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchNone(ndArray.getHandle()));
}
public static PtNDArray eq(PtNDArray self, PtNDArray other) {
return self.getManager()
.create(PyTorchLibrary.LIB.torchEq(self.getHandle(), other.getHandle()));
}
public static PtNDArray neq(PtNDArray self, PtNDArray other) {
return self.getManager()
.create(PyTorchLibrary.LIB.torchNeq(self.getHandle(), other.getHandle()));
}
public static PtNDArray gt(PtNDArray self, PtNDArray other) {
return self.getManager()
.create(PyTorchLibrary.LIB.torchGt(self.getHandle(), other.getHandle()));
}
public static PtNDArray gte(PtNDArray self, PtNDArray other) {
return self.getManager()
.create(PyTorchLibrary.LIB.torchGte(self.getHandle(), other.getHandle()));
}
public static PtNDArray lt(PtNDArray self, PtNDArray other) {
return self.getManager()
.create(PyTorchLibrary.LIB.torchLt(self.getHandle(), other.getHandle()));
}
public static PtNDArray lte(PtNDArray self, PtNDArray other) {
return self.getManager()
.create(PyTorchLibrary.LIB.torchLte(self.getHandle(), other.getHandle()));
}
public static PtNDArray neg(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchNeg(ndArray.getHandle()));
}
public static void negi(PtNDArray ndArray) {
PyTorchLibrary.LIB.torchNegi(ndArray.getHandle());
}
public static PtNDArray isNaN(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchIsNaN(ndArray.getHandle()));
}
public static PtNDArray isInf(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchIsInf(ndArray.getHandle()));
}
public static PtNDArray normal(
PtNDManager manager,
double mean,
double std,
Shape size,
DataType dataType,
Device device) {
return manager.create(
PyTorchLibrary.LIB.atNormal(
mean,
std,
size.getShape(),
dataType.ordinal(),
layoutMapper(SparseFormat.DENSE, device),
new int[] {
PtDeviceType.toDeviceType(device),
device.equals(Device.cpu()) ? -1 : device.getDeviceId()
},
false));
}
public static PtNDArray uniform(
PtNDManager manager,
double low,
double high,
Shape size,
DataType dataType,
Device device) {
return manager.create(
PyTorchLibrary.LIB.tensorUniform(
low,
high,
size.getShape(),
dataType.ordinal(),
layoutMapper(SparseFormat.DENSE, device),
new int[] {
PtDeviceType.toDeviceType(device),
device.equals(Device.cpu()) ? -1 : device.getDeviceId()
},
false));
}
public static PtNDArray eye(
PtNDManager manager, int n, int m, DataType dataType, Device device, SparseFormat fmt) {
return manager.create(
PyTorchLibrary.LIB.torchEye(
n,
m,
dataType.ordinal(),
layoutMapper(fmt, device),
new int[] {
PtDeviceType.toDeviceType(device),
device.equals(Device.cpu()) ? -1 : device.getDeviceId()
},
false));
}
public static PtNDArray interpolate(
PtNDArray ndArray, long[] size, int mode, boolean alignCorners) {
return ndArray.getManager()
.create(
PyTorchLibrary.LIB.torchNNInterpolate(
ndArray.getHandle(), size, mode, alignCorners));
}
public static PtNDArray linear(PtNDArray input, PtNDArray weight, PtNDArray bias) {
return input.getManager()
.create(
PyTorchLibrary.LIB.torchNNLinear(
input.getHandle(),
weight.getHandle(),
bias == null ? null : bias.getHandle()));
}
public static PtNDArray relu(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchNNRelu(ndArray.getHandle()));
}
public static PtNDArray softPlus(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchNNSoftPlus(ndArray.getHandle()));
}
public static PtNDArray softSign(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchNNSoftSign(ndArray.getHandle()));
}
public static PtNDArray leakyRelu(PtNDArray ndArray, double negativeSlope) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchNNLeakyRelu(ndArray.getHandle(), negativeSlope));
}
public static PtNDArray elu(PtNDArray ndArray, double alpha) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchNNElu(ndArray.getHandle(), alpha));
}
public static PtNDArray selu(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchNNSelu(ndArray.getHandle()));
}
public static PtNDArray gelu(PtNDArray ndArray) {
return ndArray.getManager().create(PyTorchLibrary.LIB.torchNNGelu(ndArray.getHandle()));
}
public static PtNDArray convolution(
PtNDArray ndArray,
PtNDArray weight,
PtNDArray bias,
Shape stride,
Shape padding,
Shape dilation,
int groups) {
return ndArray.getManager()
.create(
PyTorchLibrary.LIB.torchNNConvNd(
ndArray.getHandle(),
weight.getHandle(),
(bias != null) ? bias.getHandle() : null,
stride.getShape(),
padding.getShape(),
dilation.getShape(),
groups));
}
public static PtNDArray batchNorm(
PtNDArray ndArray,
PtNDArray gamma,
PtNDArray beta,
PtNDArray runningMean,
PtNDArray runningVar,
boolean isTraining,
double momentum,
double eps) {
return ndArray.getManager()
.create(
PyTorchLibrary.LIB.torchNNBatchNorm(
ndArray.getHandle(),
gamma.getHandle(),
beta.getHandle(),
runningMean.getHandle(),
runningVar.getHandle(),
isTraining,
momentum,
eps));
}
public static PtNDArray dropout(PtNDArray ndArray, double prob, boolean training) {
return ndArray.getManager()
.create(PyTorchLibrary.LIB.torchNNDropout(ndArray.getHandle(), prob, training));
}
public static PtNDArray avgPool(
PtNDArray ndArray,
Shape kernelSize,
Shape stride,
Shape padding,
boolean ceilMode,
boolean countIncludePad) {
return ndArray.getManager()
.create(
PyTorchLibrary.LIB.torchNNAvgPool(
ndArray.getHandle(),
kernelSize.getShape(),
stride.getShape(),
padding.getShape(),
ceilMode,
countIncludePad));
}
public static PtNDArray maxPool(
PtNDArray ndArray, Shape kernelSize, Shape stride, Shape padding, boolean ceilMode) {
return ndArray.getManager()
.create(
PyTorchLibrary.LIB.torchNNMaxPool(
ndArray.getHandle(),
kernelSize.getShape(),
stride.getShape(),
padding.getShape(),
ceilMode));
}
public static PtNDArray adaptiveMaxPool(PtNDArray ndArray, Shape outputSize) {
return ndArray.getManager()
.create(
PyTorchLibrary.LIB.torchNNAdaptiveMaxPool(
ndArray.getHandle(), outputSize.getShape()));
}
public static PtNDArray adaptiveAvgPool(PtNDArray ndArray, Shape outputSize) {
return ndArray.getManager()
.create(
PyTorchLibrary.LIB.torchNNAdaptiveAvgPool(
ndArray.getHandle(), outputSize.getShape()));
}
public static PtNDArray lpPool(
PtNDArray ndArray, double normType, Shape kernelSize, Shape stride, boolean ceilMode) {
if (ndArray.getShape().dimension() - 2 == 3) {
throw new UnsupportedOperationException("3D lpPool is not supported in PyTorch engine");
}
return ndArray.getManager()
.create(
PyTorchLibrary.LIB.torchNNLpPool(
ndArray.getHandle(),
normType,
kernelSize.getShape(),
stride.getShape(),
ceilMode));
}
public static DataType getDataType(PtNDArray ndArray) {
int dataType = PyTorchLibrary.LIB.torchDType(ndArray.getHandle());
return DataType.values()[dataType];
}
public static Device getDevice(PtNDArray ndArray) {
int[] device = PyTorchLibrary.LIB.torchDevice(ndArray.getHandle());
String deviceType = PtDeviceType.fromDeviceType(device[0]);
return Device.of(deviceType, device[1]);
}
public static SparseFormat getSparseFormat(PtNDArray ndArray) {
int layout = PyTorchLibrary.LIB.torchLayout(ndArray.getHandle());
if (layout == 0) {
return SparseFormat.DENSE;
} else if (layout == 1) {
return SparseFormat.COO;
} else if (layout == 2) {
logger.debug("MKLDNN layout is used!");
return SparseFormat.DENSE;
}
throw new UnsupportedOperationException("Unsupported data format");
}
public static Shape getShape(PtNDArray ndArray) {
return new Shape(PyTorchLibrary.LIB.torchSizes(ndArray.getHandle()));
}
public static ByteBuffer getByteBuffer(PtNDArray ndArray) {
// Operation is CPU only
if (!ndArray.getDevice().equals(Device.cpu())) {
ndArray = ndArray.toDevice(Device.cpu(), false);
}
return ByteBuffer.wrap(PyTorchLibrary.LIB.torchDataPtr(ndArray.getHandle()))
.order(ByteOrder.nativeOrder());
}
public static void deleteNdArray(Pointer handle) {
PyTorchLibrary.LIB.torchDeleteTensor(handle);
}
public static boolean requiresGrad(PtNDArray ndArray) {
return PyTorchLibrary.LIB.torchRequiresGrad(ndArray.getHandle());
}
public static String getGradientFunctionNames(PtNDArray ndArray) {
return PyTorchLibrary.LIB.torchGradFnName(ndArray.getHandle());
}
public static void attachGradient(PtNDArray ndArray) {
PyTorchLibrary.LIB.torchAttachGrad(ndArray.getHandle());
}
public static PtNDArray detachGradient(PtNDArray ndArray) {
// TODO: detached ndarray may not use the same manager for the attached one
return ndArray.getManager().create(PyTorchLibrary.LIB.torchDetachGrad(ndArray.getHandle()));
}
public static PtNDArray getGradient(PtNDArray ndArray) {
Pointer pointer = PyTorchLibrary.LIB.torchGrad(ndArray.getHandle());
if (pointer == null) {
return null;
}
return ndArray.getManager().create(pointer);
}
public static void backward(
PtNDArray ndArray, PtNDArray gradNd, boolean keepGraph, boolean createGraph) {
PyTorchLibrary.LIB.torchBackward(
ndArray.getHandle(), gradNd.getHandle(), keepGraph, createGraph);
}
public static void deleteModule(Pointer pointer) {
PyTorchLibrary.LIB.torchDeleteModule(pointer);
}
public static PtSymbolBlock loadModule(PtNDManager manager, Path path, Device device) {
Pointer handle =
PyTorchLibrary.LIB.moduleLoad(
path.toString(),
new int[] {
PtDeviceType.toDeviceType(device),
device.equals(Device.cpu()) ? -1 : device.getDeviceId()
});
return new PtSymbolBlock(manager, handle);
}
public static void enableInferenceMode(PtSymbolBlock block) {
PyTorchLibrary.LIB.moduleEval(block.getHandle());
}
public static void enableTrainingMode(PtSymbolBlock block) {
PyTorchLibrary.LIB.moduleTrain(block.getHandle());
}
public static void zeroGrad(PtNDArray weight) {
PyTorchLibrary.LIB.zeroGrad(weight.getHandle());
}
public static void adamUpdate(
PtNDArray weight,
PtNDArray grad,
PtNDArray mean,
PtNDArray variance,
float lr,
float wd,
float rescaleGrad,
float clipGrad,
float beta1,
float beta2,
float eps) {
PyTorchLibrary.LIB.adamUpdate(
weight.getHandle(),
grad.getHandle(),
mean.getHandle(),
variance.getHandle(),
lr,
wd,
rescaleGrad,
clipGrad,
beta1,
beta2,
eps);
}
public static void sgdUpdate(
PtNDArray weight,
PtNDArray grad,
PtNDArray state,
float lr,
float wd,
float rescaleGrad,
float clipGrad,
float momentum) {
PyTorchLibrary.LIB.sgdUpdate(
weight.getHandle(),
grad.getHandle(),
(state == null) ? null : state.getHandle(),
lr,
wd,
rescaleGrad,
clipGrad,
momentum);
}
// Internal use only
public static int getLayout(PtNDArray array) {
return PyTorchLibrary.LIB.torchLayout(array.getHandle());
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch/jni/LibUtils.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.jni;
import ai.djl.util.Platform;
import ai.djl.util.Utils;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Stream;
import java.util.zip.GZIPInputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Utilities for finding the PyTorch Engine binary on the System.
*
* <p>The Engine will be searched for in a variety of locations in the following order:
*
* <ol>
* <li>In the path specified by the PYTORCH_LIBRARY_PATH environment variable
* <li>In a jar file location in the classpath. These jars can be created with the pytorch-native
* module.
* </ol>
*/
@SuppressWarnings("MissingJavadocMethod")
public final class LibUtils {
private static final Logger logger = LoggerFactory.getLogger(LibUtils.class);
private static final String LIB_NAME = "djl_torch";
private static final String NATIVE_LIB_NAME = "torch";
private static final Pattern VERSION_PATTERN =
Pattern.compile("(\\d+\\.\\d+\\.\\d+(-\\w)?)(-SNAPSHOT)?(-\\d+)?");
private LibUtils() {}
public static void loadLibrary() {
// TODO workaround to make it work on Android Studio
// It should search for several places to find the native library
if (System.getProperty("java.vendor.url").equals("http://www.android.com/")) {
System.loadLibrary(LIB_NAME); // NOPMD
return;
}
String libName = findOverrideLibrary();
if (libName == null) {
AtomicBoolean fallback = new AtomicBoolean(false);
String nativeLibDir = findNativeLibrary(fallback);
if (nativeLibDir != null) {
libName = copyJniLibraryFromClasspath(Paths.get(nativeLibDir), fallback.get());
} else {
throw new IllegalStateException("Native library not found");
}
}
logger.debug("Loading pytorch library from: {}", libName);
if (System.getProperty("os.name").startsWith("Win")) {
loadWinDependencies(libName);
}
System.load(libName); // NOPMD
}
private static void loadWinDependencies(String libName) {
Path libDir = Paths.get(libName).getParent();
if (libDir == null) {
throw new IllegalArgumentException("Invalid library path!");
}
try (Stream<Path> paths = Files.walk(libDir)) {
paths.filter(
path -> {
String name = path.getFileName().toString();
return !"c10_cuda.dll".equals(name)
&& !"torch.dll".equals(name)
&& !"torch_cpu.dll".equals(name)
&& !"torch_cuda.dll".equals(name)
&& !"fbgemm.dll".equals(name)
&& Files.isRegularFile(path)
&& !name.endsWith("djl_torch.dll");
})
.map(path -> path.toAbsolutePath().toString())
.forEach(System::load);
System.load(libDir.resolve("fbgemm.dll").toAbsolutePath().toString());
System.load(libDir.resolve("torch_cpu.dll").toAbsolutePath().toString());
if (Files.exists(libDir.resolve("c10_cuda.dll"))) {
// Windows System.load is global load
System.load(libDir.resolve("c10_cuda.dll").toAbsolutePath().toString());
System.load(libDir.resolve("torch_cuda.dll").toAbsolutePath().toString());
}
System.load(libDir.resolve("torch.dll").toAbsolutePath().toString());
} catch (IOException e) {
throw new IllegalArgumentException("Folder not exist! " + libDir, e);
}
}
private static String findOverrideLibrary() {
String libPath = System.getenv("PYTORCH_LIBRARY_PATH");
if (libPath != null) {
String libName = findLibraryInPath(libPath);
if (libName != null) {
return libName;
}
}
libPath = System.getProperty("java.library.path");
if (libPath != null) {
return findLibraryInPath(libPath);
}
return null;
}
private static String findLibraryInPath(String libPath) {
String[] paths = libPath.split(File.pathSeparator);
List<String> mappedLibNames;
mappedLibNames = Collections.singletonList(System.mapLibraryName(LIB_NAME));
for (String path : paths) {
File p = new File(path);
if (!p.exists()) {
continue;
}
for (String name : mappedLibNames) {
if (p.isFile() && p.getName().endsWith(name)) {
return p.getAbsolutePath();
}
File file = new File(path, name);
if (file.exists() && file.isFile()) {
return file.getAbsolutePath();
}
}
}
return null;
}
private static String copyJniLibraryFromClasspath(Path nativeDir, boolean fallback) {
String name = System.mapLibraryName(LIB_NAME);
Platform platform = Platform.fromSystem();
String classifier = platform.getClassifier();
String flavor = platform.getFlavor();
if (fallback || flavor.isEmpty()) {
flavor = "cpu";
}
Properties prop = new Properties();
try (InputStream stream =
LibUtils.class.getResourceAsStream(
"/jnilib/" + classifier + "/" + flavor + "/pytorch.properties")) {
prop.load(stream);
} catch (IOException e) {
throw new IllegalStateException("Cannot find pytorch property file", e);
}
String version = prop.getProperty("version");
Path path = nativeDir.resolve(version + flavor + name);
if (Files.exists(path)) {
return path.toAbsolutePath().toString();
}
Path tmp = null;
try (InputStream stream =
LibUtils.class.getResourceAsStream(
"/jnilib/" + classifier + "/" + flavor + "/" + name)) {
tmp = Files.createTempFile(nativeDir, "jni", "tmp");
Files.copy(stream, tmp, StandardCopyOption.REPLACE_EXISTING);
Utils.moveQuietly(tmp, path);
return path.toAbsolutePath().toString();
} catch (IOException e) {
throw new IllegalStateException("Cannot copy jni files", e);
} finally {
if (tmp != null) {
Utils.deleteQuietly(tmp);
}
}
}
private static synchronized String findNativeLibrary(AtomicBoolean fallback) {
Enumeration<URL> urls;
try {
urls =
Thread.currentThread()
.getContextClassLoader()
.getResources("native/lib/pytorch.properties");
} catch (IOException e) {
logger.warn("", e);
return null;
}
// No native jars
if (!urls.hasMoreElements()) {
return null;
}
Platform systemPlatform = Platform.fromSystem();
try {
Platform matching = null;
Platform placeholder = null;
while (urls.hasMoreElements()) {
URL url = urls.nextElement();
Platform platform = Platform.fromUrl(url);
if (platform.isPlaceholder()) {
placeholder = platform;
} else if (platform.matches(systemPlatform)) {
matching = platform;
break;
}
}
if (matching != null) {
return copyNativeLibraryFromClasspath(matching);
}
if (placeholder != null) {
try {
return downloadPyTorch(placeholder, fallback);
} catch (IOException e) {
throw new IllegalStateException("Failed to download PyTorch native library", e);
}
}
} catch (IOException e) {
throw new IllegalStateException(
"Failed to read PyTorch native library jar properties", e);
}
throw new IllegalStateException(
"Your PyTorch native library jar does not match your operating system. Make sure the Maven Dependency Classifier matches your system type.");
}
private static String copyNativeLibraryFromClasspath(Platform platform) {
Path tmp = null;
String version = platform.getVersion();
String flavor = platform.getFlavor();
String classifier = platform.getClassifier();
try {
String libName = System.mapLibraryName(NATIVE_LIB_NAME);
Path cacheDir = getCacheDir();
logger.debug("Using cache dir: {}", cacheDir);
Path dir = cacheDir.resolve(version + flavor + '-' + classifier);
Path path = dir.resolve(libName);
if (Files.exists(path)) {
return dir.toAbsolutePath().toString();
}
Files.createDirectories(cacheDir);
tmp = Files.createTempDirectory(cacheDir, "tmp");
for (String file : platform.getLibraries()) {
String libPath = "/native/lib/" + file;
try (InputStream is = LibUtils.class.getResourceAsStream(libPath)) {
Files.copy(is, tmp.resolve(file), StandardCopyOption.REPLACE_EXISTING);
}
}
Utils.moveQuietly(tmp, dir);
return dir.toAbsolutePath().toString();
} catch (IOException e) {
throw new IllegalStateException("Failed to extract PyTorch native library", e);
} finally {
if (tmp != null) {
Utils.deleteQuietly(tmp);
}
}
}
private static String downloadPyTorch(Platform platform, AtomicBoolean fallback)
throws IOException {
String version = platform.getVersion();
String flavor = platform.getFlavor();
if (flavor.isEmpty()) {
flavor = "cpu";
}
String classifier = platform.getClassifier();
String os = platform.getOsPrefix();
String libName = System.mapLibraryName(NATIVE_LIB_NAME);
Path cacheDir = getCacheDir();
logger.debug("Using cache dir: {}", cacheDir);
Path dir = cacheDir.resolve(version + flavor + '-' + classifier);
Path path = dir.resolve(libName);
if (Files.exists(path)) {
return dir.toAbsolutePath().toString();
}
// if files not found
Files.createDirectories(cacheDir);
Matcher matcher = VERSION_PATTERN.matcher(version);
if (!matcher.matches()) {
throw new IllegalArgumentException("Unexpected version: " + version);
}
String link = "https://djl-ai.s3.amazonaws.com/publish/pytorch-" + matcher.group(1);
Path tmp = null;
try (InputStream is = new URL(link + "/files.txt").openStream()) {
List<String> lines = Utils.readLines(is);
if (flavor.startsWith("cu")
&& !lines.contains(flavor + '/' + os + "/native/lib/" + libName + ".gz")) {
logger.warn("No matching cuda flavor for {} found: {}.", os, flavor);
// fallback to CPU
flavor = "cpu";
fallback.set(true);
// check again
dir = cacheDir.resolve(version + flavor + '-' + classifier);
path = dir.resolve(libName);
if (Files.exists(path)) {
return dir.toAbsolutePath().toString();
}
}
tmp = Files.createTempDirectory(cacheDir, "tmp");
for (String line : lines) {
if (line.startsWith(flavor + '/' + os + '/')) {
URL url = new URL(link + '/' + line);
String fileName = line.substring(line.lastIndexOf('/') + 1, line.length() - 3);
logger.info("Downloading {} ...", url);
try (InputStream fis = new GZIPInputStream(url.openStream())) {
Files.copy(fis, tmp.resolve(fileName), StandardCopyOption.REPLACE_EXISTING);
}
}
}
Utils.moveQuietly(tmp, dir);
return dir.toAbsolutePath().toString();
} finally {
if (tmp != null) {
Utils.deleteQuietly(tmp);
}
}
}
private static Path getCacheDir() {
String cacheDir = System.getProperty("ENGINE_CACHE_DIR");
if (cacheDir == null || cacheDir.isEmpty()) {
cacheDir = System.getenv("ENGINE_CACHE_DIR");
if (cacheDir == null || cacheDir.isEmpty()) {
cacheDir = System.getProperty("DJL_CACHE_DIR");
if (cacheDir == null || cacheDir.isEmpty()) {
cacheDir = System.getenv("DJL_CACHE_DIR");
if (cacheDir == null || cacheDir.isEmpty()) {
String userHome = System.getProperty("user.home");
return Paths.get(userHome, ".pytorch/cache");
}
}
return Paths.get(cacheDir, "pytorch");
}
}
return Paths.get(cacheDir, ".pytorch/cache");
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch/jni/NativeResource.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.jni;
import java.util.concurrent.atomic.AtomicReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* {@code NativeResource} is an internal class for {@link AutoCloseable} blocks of memory created in
* the PyTorch Engine.
*/
public abstract class NativeResource implements AutoCloseable {
private static final Logger logger = LoggerFactory.getLogger(NativeResource.class);
protected final AtomicReference<Pointer> handle;
private String uid;
private Exception exception;
protected NativeResource(Pointer pointer) {
this.handle = new AtomicReference<>(pointer);
uid = String.valueOf(pointer.getValue());
if (logger.isTraceEnabled()) {
exception = new Exception();
}
}
/**
* Gets the boolean that indicates whether this resource has been released.
*
* @return whether this resource has been released
*/
public boolean isReleased() {
return handle.get() == null;
}
/**
* Gets the {@link Pointer} to this resource.
*
* @return the {@link Pointer} to this resource
*/
protected Pointer getHandle() {
Pointer pointer = handle.get();
if (pointer == null) {
throw new IllegalStateException("Native resource has been release already.");
}
return pointer;
}
/**
* Gets the unique ID of this resource.
*
* @return the unique ID of this resource
*/
public final String getUid() {
return uid;
}
/** {@inheritDoc} */
@Override
public void close() {
throw new UnsupportedOperationException("Not implemented.");
}
/** {@inheritDoc} */
@SuppressWarnings("deprecation")
@Override
protected void finalize() throws Throwable {
if (handle.get() != null) {
if (exception != null) {
logger.warn(
"Resource ({}) was not closed explicitly: {}",
getUid(),
getClass().getSimpleName());
logger.warn("Resource was created:", exception);
}
}
close();
super.finalize();
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch/jni/Pointer.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.jni;
/**
* An abstraction for a native pointer data type. A Pointer instance represents, on the Java side, a
* native pointer. The native pointer could be any <em>type</em> of native pointer.
*/
public class Pointer {
private final long peer;
/**
* Creates an instance of {@link Pointer}.
*
* @param peer the native peer of the pointer
*/
public Pointer(long peer) {
this.peer = peer;
}
/**
* Returns the native peer of the pointer address.
*
* @return the native peer of the pointer address
*/
public long getValue() {
return peer;
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-engine-precxx11/0.7.0/ai/djl/pytorch/jni/PyTorchLibrary.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.jni;
import java.nio.ByteBuffer;
import java.util.Set;
/** A class containing utilities to interact with the PyTorch Engine's JNI layer. */
final class PyTorchLibrary {
static final PyTorchLibrary LIB = new PyTorchLibrary();
private PyTorchLibrary() {}
native int torchGetNumInteropThreads();
native int torchGetNumThreads();
native void torchSetNumInteropThreads(int threads);
native void torchSetNumThreads(int threads);
native void torchManualSeed(long seed);
native void torchShowConfig(Set<String> set);
native long[] torchSizes(Pointer handle);
native byte[] torchDataPtr(Pointer handle);
native int torchDType(Pointer handle);
native int[] torchDevice(Pointer handle);
native int torchLayout(Pointer handle);
native Pointer torchTo(Pointer handle, int dType, int[] device, boolean copy);
native Pointer torchToSparse(Pointer handle);
native Pointer torchToDense(Pointer handle);
native Pointer tensorClone(Pointer handle);
native Pointer torchEmpty(
long[] shape, int dType, int layout, int[] device, boolean requiredGrad);
native Pointer torchZeros(
long[] shape, int dType, int layout, int[] device, boolean requiredGrad);
native Pointer torchOnes(
long[] shape, int dType, int layout, int[] device, boolean requiredGrad);
native Pointer torchFull(
long[] shape,
double fillValue,
int dType,
int layout,
int[] device,
boolean requiredGrad);
native Pointer torchZerosLike(
Pointer handle, int dType, int layout, int[] device, boolean requiredGrad);
native Pointer torchOnesLike(
Pointer handle, int dType, int layout, int[] device, boolean requiredGrad);
native Pointer torchArange(
float start,
float end,
float step,
int dType,
int layout,
int[] device,
boolean requiredGrad);
native Pointer torchLinspace(
float start,
float end,
int step,
int dType,
int layout,
int[] device,
boolean requiredGrad);
native Pointer torchAdd(Pointer self, Pointer other);
native void torchAddi(Pointer self, Pointer other);
native Pointer torchExpand(Pointer self, long[] shape);
native Pointer torchSub(Pointer self, Pointer other);
native void torchSubi(Pointer self, Pointer other);
native Pointer torchMul(Pointer self, Pointer other);
native void torchMuli(Pointer self, Pointer other);
native Pointer torchTrueDivide(Pointer self, Pointer other);
native void torchTrueDividei(Pointer self, Pointer other);
native Pointer torchRemainder(Pointer self, Pointer other);
native void torchRemainderi(Pointer self, Pointer other);
native Pointer torchPow(Pointer self, Pointer exponent);
native void torchPowi(Pointer self, Pointer exponent);
native Pointer torchSign(Pointer self);
native void torchSigni(Pointer self);
native Pointer torchMatmul(Pointer self, Pointer other);
native Pointer torchDot(Pointer self, Pointer other);
native Pointer torchMM(Pointer self, Pointer other);
native Pointer torchLogicalAnd(Pointer self, Pointer other);
native Pointer torchLogicalOr(Pointer self, Pointer other);
native Pointer torchLogicalXor(Pointer self, Pointer other);
native Pointer torchLogicalNot(Pointer handle);
native Pointer torchReshape(Pointer handle, long[] shape);
native Pointer torchSoftmax(Pointer handle, long dim, int dType);
native Pointer torchLogSoftmax(Pointer handle, long dim, int dType);
native Pointer torchArgMax(Pointer handle);
native Pointer torchArgMax(Pointer handle, long dim, boolean keepDim);
native Pointer torchArgMin(Pointer handle);
native Pointer torchArgMin(Pointer handle, long dim, boolean keepDim);
native Pointer torchArgSort(Pointer handle);
native Pointer torchArgSort(Pointer handle, long dim, boolean keepDim);
native Pointer torchSort(Pointer handle, long dim, boolean descending);
native Pointer torchPermute(Pointer handle, long[] dims);
native Pointer torchFlip(Pointer handle, long[] dims);
native Pointer torchTranspose(Pointer handle, long axis1, long axis2);
native boolean contentEqual(Pointer handle1, Pointer handle2);
native Pointer torchFromBlob(
ByteBuffer data,
long[] shape,
int dType,
int layout,
int[] device,
boolean requiredGrad);
native Pointer torchIndex(
Pointer handle, long[] minIndices, long[] maxIndices, long[] stepIndices);
native void torchIndexPut(
Pointer handle,
Pointer valueHandle,
long[] minIndices,
long[] maxIndices,
long[] stepIndices);
native void torchSet(Pointer selfHandle, Pointer otherHandle);
native Pointer torchSlice(Pointer handle, long dim, long start, long end, long step);
native Pointer torchGather(Pointer handle, Pointer index, long dim, boolean sparseGrad);
native Pointer torchMaskedSelect(Pointer handle, Pointer maskHandle);
native void torchMaskedPut(Pointer handle, Pointer valueHandle, Pointer maskHandle);
native void torchDeleteTensor(Pointer handle);
native void torchDeleteModule(Pointer handle);
native void torchDeleteIValue(Pointer handle);
native Pointer torchMax(Pointer handle);
native Pointer torchMax(Pointer self, Pointer other);
native Pointer torchMax(Pointer handle, long dim, boolean keepDim);
native Pointer torchMin(Pointer handle);
native Pointer torchMin(Pointer self, Pointer other);
native Pointer torchMin(Pointer handle, long dim, boolean keepDim);
native Pointer torchMean(Pointer handle);
native Pointer torchMean(Pointer handle, long dim, boolean keepDim);
native Pointer torchSum(Pointer handle);
native Pointer torchSum(Pointer handle, long[] dim, boolean keepDim);
native Pointer torchProd(Pointer handle);
native Pointer torchProd(Pointer handle, long dim, boolean keepDim);
native Pointer torchCumSum(Pointer handle, long dim);
native Pointer torchFlatten(Pointer handle, long startDim, long endDim);
native Pointer[] torchSplit(Pointer handle, long size, long dim);
native Pointer[] torchSplit(Pointer handle, long[] indices, long dim);
native Pointer torchUnsqueeze(Pointer handle, long dim);
native Pointer torchSqueeze(Pointer handle);
native Pointer torchSqueeze(Pointer handle, long axis);
native Pointer torchStack(Pointer[] handles, long dim);
native Pointer torchCat(Pointer[] handles, long dim);
native Pointer torchRepeat(Pointer handle, long[] repeats);
native Pointer torchRepeatInterleave(Pointer handle, long repeat, long axis);
native Pointer torchAbs(Pointer handle);
native Pointer torchSquare(Pointer self);
native Pointer torchFloor(Pointer handle);
native Pointer torchCeil(Pointer handle);
native Pointer torchClamp(Pointer handle, Pointer min, Pointer max);
native Pointer torchRound(Pointer handle);
native Pointer torchTrunc(Pointer handle);
native Pointer torchExp(Pointer handle);
native Pointer torchLog(Pointer handle);
native Pointer torchLog10(Pointer handle);
native Pointer torchLog2(Pointer handle);
native Pointer torchSin(Pointer handle);
native Pointer torchCos(Pointer handle);
native Pointer torchTan(Pointer handle);
native Pointer torchASin(Pointer handle);
native Pointer torchAcos(Pointer handle);
native Pointer torchAtan(Pointer handle);
native Pointer torchSqrt(Pointer handle);
native Pointer torchSinh(Pointer handle);
native Pointer torchCosh(Pointer handle);
native Pointer torchTanh(Pointer handle);
native Pointer torchSigmoid(Pointer handle);
native Pointer torchWhere(Pointer handle, Pointer x, Pointer y);
native Pointer torchAll(Pointer self);
native Pointer torchAny(Pointer self);
native Pointer torchNone(Pointer self);
native Pointer torchEq(Pointer self, Pointer other);
native Pointer torchNeq(Pointer self, Pointer other);
native Pointer torchGt(Pointer self, Pointer other);
native Pointer torchGte(Pointer self, Pointer other);
native Pointer torchLt(Pointer self, Pointer other);
native Pointer torchLte(Pointer self, Pointer other);
native Pointer torchNeg(Pointer self);
native void torchNegi(Pointer self);
native Pointer torchIsNaN(Pointer self);
native Pointer torchIsInf(Pointer self);
native Pointer atNormal(
double mean,
double std,
long[] sizes,
int dType,
int layout,
int[] device,
boolean requiredGrad);
native Pointer tensorUniform(
double from,
double to,
long[] sizes,
int dType,
int layout,
int[] device,
boolean requiredGrad);
native Pointer torchEye(
int n, int m, int dType, int layout, int[] device, boolean requiredGrad);
native Pointer torchNNInterpolate(Pointer handle, long[] size, int mode, boolean alignCorners);
native Pointer torchNNLinear(Pointer handle, Pointer weightHandle, Pointer biasHandle);
native Pointer torchNNRelu(Pointer handle);
native Pointer torchNNSoftPlus(Pointer handle);
native Pointer torchNNSoftSign(Pointer handle);
native Pointer torchNNLeakyRelu(Pointer handle, double negativeSlope);
native Pointer torchNNElu(Pointer handle, double alpha);
native Pointer torchNNSelu(Pointer handle);
native Pointer torchNNGelu(Pointer handle);
native Pointer torchNNConvNd(
Pointer inputHandle,
Pointer weightHandle,
Pointer biasHandle,
long[] stride,
long[] padding,
long[] dilation,
int groups);
native Pointer torchNNDropout(Pointer inputHandle, double probability, boolean isTrain);
native Pointer torchNNBatchNorm(
Pointer inputHandle,
Pointer runningMeanHandle,
Pointer runningVarHandle,
Pointer weigthHandle,
Pointer biasHandle,
boolean training,
double momentum,
double eps);
native Pointer torchNNAvgPool(
Pointer inputHandle,
long[] kernel,
long[] stride,
long[] pad,
boolean useCeil,
boolean countIncludePad);
native Pointer torchNNMaxPool(
Pointer inputHandle,
long[] kernelSize,
long[] stride,
long[] padding,
boolean ceilMode);
native Pointer torchNNAdaptiveAvgPool(Pointer inputHandle, long[] outputSize);
native Pointer torchNNAdaptiveMaxPool(Pointer inputHandle, long[] outputSize);
native Pointer torchNNLpPool(
Pointer inputHandle,
double normType,
long[] kernelSize,
long[] stride,
boolean ceilMode);
native boolean torchRequiresGrad(Pointer inputHandle);
native String torchGradFnName(Pointer inputHandle);
native void torchAttachGrad(Pointer inputHandle);
native Pointer torchGrad(Pointer inputHandle);
native Pointer torchDetachGrad(Pointer inputHandle);
native void torchBackward(
Pointer inputHandle, Pointer gradHandle, boolean keepGraph, boolean createGraph);
native Pointer moduleLoad(String path, int[] device);
native void moduleEval(Pointer handle);
native void moduleTrain(Pointer handle);
native Pointer moduleForward(Pointer moduleHandle, Pointer[] arrayHandles, boolean isTrain);
native Pointer iValueCreateFromTensor(Pointer tensorHandle);
native Pointer iValueToTensor(Pointer iValueHandle);
native Pointer[] iValueToTensorList(Pointer iValueHandle);
native Pointer[] iValueToList(Pointer iValueHandle);
native Pointer[] iValueToListFromTuple(Pointer iValueHandle);
native Pointer[] iValueToMap(Pointer iValueHandle);
native String iValueToString(Pointer iValueHandle);
native boolean iValueIsString(Pointer iValueHandle);
native boolean iValueIsTensor(Pointer iValueHandle);
native boolean iValueIsTensorList(Pointer iValueHandle);
native boolean iValueIsList(Pointer iValueHandle);
native boolean iValueIsMap(Pointer iValueHandle);
native boolean iValueIsTuple(Pointer iValueHandle);
native void zeroGrad(Pointer handle);
native void adamUpdate(
Pointer weight,
Pointer grad,
Pointer mean,
Pointer variance,
float lr,
float wd,
float rescaleGrad,
float clipGrad,
float beta1,
float beta2,
float eps);
native void sgdUpdate(
Pointer weight,
Pointer grad,
Pointer state,
float lr,
float wd,
float rescaleGrad,
float clipGrad,
float momentum);
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/PtModelZoo.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.zoo;
import ai.djl.Application.CV;
import ai.djl.Application.NLP;
import ai.djl.Application.TimeSeries;
import ai.djl.pytorch.engine.PtEngine;
import ai.djl.repository.RemoteRepository;
import ai.djl.repository.Repository;
import ai.djl.repository.zoo.ModelZoo;
import java.util.Collections;
import java.util.Set;
/**
* PtModelZoo is a repository that contains all PyTorch models in {@link
* ai.djl.pytorch.engine.PtSymbolBlock} for DJL.
*/
public class PtModelZoo extends ModelZoo {
private static final Repository REPOSITORY = new RemoteRepository("PyTorch", DJL_REPO_URL);
public static final String GROUP_ID = "ai.djl.pytorch";
PtModelZoo() {
addModel(
REPOSITORY.model(
CV.ACTION_RECOGNITION,
GROUP_ID,
"Human-Action-Recognition-VIT-Base-patch16-224",
"0.0.1"));
addModel(REPOSITORY.model(CV.IMAGE_CLASSIFICATION, GROUP_ID, "resnet", "0.0.1"));
addModel(
REPOSITORY.model(CV.IMAGE_CLASSIFICATION, GROUP_ID, "resnet18_embedding", "0.0.1"));
addModel(REPOSITORY.model(CV.INSTANCE_SEGMENTATION, GROUP_ID, "yolo11n-seg", "0.0.1"));
addModel(REPOSITORY.model(CV.INSTANCE_SEGMENTATION, GROUP_ID, "yolov8n-seg", "0.0.1"));
addModel(REPOSITORY.model(CV.MASK_GENERATION, GROUP_ID, "sam2-hiera-tiny", "0.0.1"));
addModel(REPOSITORY.model(CV.MASK_GENERATION, GROUP_ID, "sam2-hiera-large", "0.0.1"));
addModel(REPOSITORY.model(CV.OBJECT_DETECTION, GROUP_ID, "ssd", "0.0.1"));
addModel(REPOSITORY.model(CV.OBJECT_DETECTION, GROUP_ID, "yolo11n", "0.0.1"));
addModel(REPOSITORY.model(CV.OBJECT_DETECTION, GROUP_ID, "yolov5s", "0.0.1"));
addModel(REPOSITORY.model(CV.OBJECT_DETECTION, GROUP_ID, "yolov8n", "0.0.1"));
addModel(REPOSITORY.model(CV.POSE_ESTIMATION, GROUP_ID, "yolo11n-pose", "0.0.1"));
addModel(REPOSITORY.model(CV.POSE_ESTIMATION, GROUP_ID, "yolov8n-pose", "0.0.1"));
addModel(
REPOSITORY.model(
CV.ZERO_SHOT_OBJECT_DETECTION, GROUP_ID, "yolov8s-worldv2", "0.0.1"));
addModel(REPOSITORY.model(NLP.QUESTION_ANSWER, GROUP_ID, "bertqa", "0.0.1"));
addModel(REPOSITORY.model(NLP.SENTIMENT_ANALYSIS, GROUP_ID, "distilbert", "0.0.1"));
addModel(REPOSITORY.model(CV.IMAGE_GENERATION, GROUP_ID, "biggan-deep", "0.0.1"));
addModel(REPOSITORY.model(CV.IMAGE_GENERATION, GROUP_ID, "cyclegan", "0.0.1"));
addModel(REPOSITORY.model(CV.SEMANTIC_SEGMENTATION, GROUP_ID, "deeplabv3", "0.0.1"));
addModel(REPOSITORY.model(TimeSeries.FORECASTING, GROUP_ID, "deepar", "0.0.1"));
}
/** {@inheritDoc} */
@Override
public String getGroupId() {
return GROUP_ID;
}
/** {@inheritDoc} */
@Override
public Set<String> getSupportedEngines() {
return Collections.singleton(PtEngine.ENGINE_NAME);
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/PtZooProvider.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.zoo;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooProvider;
/**
* An PyTorch model zoo provider implements the {@link ai.djl.repository.zoo.ZooProvider} interface.
*/
public class PtZooProvider implements ZooProvider {
/** {@inheritDoc} */
@Override
public ModelZoo getModelZoo() {
return new PtModelZoo();
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains the built-in {@link ai.djl.pytorch.zoo.PtModelZoo}. */
package ai.djl.pytorch.zoo;
|
0
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/cv
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/cv/objectdetection/PtSsdTranslator.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.zoo.cv.objectdetection;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.modality.cv.translator.ObjectDetectionTranslator;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.translate.ArgumentsUtil;
import ai.djl.translate.TranslatorContext;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* A {@link PtSsdTranslator} that post-process the {@link NDArray} into {@link DetectedObjects} with
* boundaries. Reference implementation: <a
* href="https://github.com/NVIDIA/DeepLearningExamples/tree/master/PyTorch/Detection/SSD">SSD</a>.
*/
public class PtSsdTranslator extends ObjectDetectionTranslator {
private NDArray boxRecover;
private int figSize;
private int[] featSize;
private int[] steps;
private int[] scale;
private int[][] aspectRatio;
/**
* Creates the SSD translator from the given builder.
*
* @param builder the builder for the translator
*/
protected PtSsdTranslator(Builder builder) {
super(builder);
this.figSize = builder.figSize;
this.featSize = builder.featSize;
this.steps = builder.steps;
this.scale = builder.scale;
this.aspectRatio = builder.aspectRatio;
}
/** {@inheritDoc} */
@Override
public void prepare(TranslatorContext ctx) throws Exception {
super.prepare(ctx);
NDManager manager = ctx.getPredictorManager();
boxRecover = boxRecover(manager, figSize, featSize, steps, scale, aspectRatio);
}
/** {@inheritDoc} */
@Override
public DetectedObjects processOutput(TranslatorContext ctx, NDList list) {
double scaleXY = 0.1;
double scaleWH = 0.2;
// kill the 1st prediction as not needed
NDArray prob = list.get(1).swapAxes(0, 1).softmax(1).get(":, 1:");
prob =
NDArrays.stack(
new NDList(
prob.argMax(1).toType(DataType.FLOAT32, false),
prob.max(new int[] {1})));
NDArray boundingBoxes = list.get(0).swapAxes(0, 1);
NDArray bbWH = boundingBoxes.get(":, 2:").mul(scaleWH).exp().mul(boxRecover.get(":, 2:"));
NDArray bbXY =
boundingBoxes
.get(":, :2")
.mul(scaleXY)
.mul(boxRecover.get(":, 2:"))
.add(boxRecover.get(":, :2"))
.sub(bbWH.mul(0.5f));
boundingBoxes = NDArrays.concat(new NDList(bbXY, bbWH), 1);
// filter the result below the threshold
NDArray cutOff = prob.get(1).gte(threshold);
boundingBoxes = boundingBoxes.transpose().booleanMask(cutOff, 1).transpose();
prob = prob.booleanMask(cutOff, 1);
// start categorical filtering
long[] order = prob.get(1).argSort().toLongArray();
double desiredIoU = 0.45;
prob = prob.transpose();
List<String> retNames = new ArrayList<>();
List<Double> retProbs = new ArrayList<>();
List<BoundingBox> retBB = new ArrayList<>();
Map<Integer, List<BoundingBox>> recorder = new ConcurrentHashMap<>();
for (int i = order.length - 1; i >= 0; i--) {
long currMaxLoc = order[i];
float[] classProb = prob.get(currMaxLoc).toFloatArray();
int classId = (int) classProb[0];
double probability = classProb[1];
double[] boxArr = boundingBoxes.get(currMaxLoc).toDoubleArray();
Rectangle rect = new Rectangle(boxArr[0], boxArr[1], boxArr[2], boxArr[3]);
List<BoundingBox> boxes = recorder.getOrDefault(classId, new ArrayList<>());
boolean belowIoU = true;
for (BoundingBox box : boxes) {
if (box.getIoU(rect) > desiredIoU) {
belowIoU = false;
break;
}
}
if (belowIoU) {
boxes.add(rect);
recorder.put(classId, boxes);
String className = classes.get(classId);
retNames.add(className);
retProbs.add(probability);
retBB.add(rect);
}
}
return new DetectedObjects(retNames, retProbs, retBB);
}
NDArray boxRecover(
NDManager manager,
int figSize,
int[] featSize,
int[] steps,
int[] scale,
int[][] aspectRatio) {
double[] fk =
manager.create(steps)
.toType(DataType.FLOAT64, true)
.getNDArrayInternal()
.rdiv((double) figSize)
.toDoubleArray();
List<double[]> defaultBoxes = new ArrayList<>();
for (int idx = 0; idx < featSize.length; idx++) {
double sk1 = scale[idx] * 1.0 / figSize;
double sk2 = scale[idx + 1] * 1.0 / figSize;
double sk3 = Math.sqrt(sk1 * sk2);
List<double[]> array = new ArrayList<>();
array.add(new double[] {sk1, sk1});
array.add(new double[] {sk3, sk3});
for (int alpha : aspectRatio[idx]) {
double w = sk1 * Math.sqrt(alpha);
double h = sk1 / Math.sqrt(alpha);
array.add(new double[] {w, h});
array.add(new double[] {h, w});
}
for (double[] size : array) {
for (int i = 0; i < featSize[idx]; i++) {
for (int j = 0; j < featSize[idx]; j++) {
double cx = (j + 0.5) / fk[idx];
double cy = (i + 0.5) / fk[idx];
defaultBoxes.add(new double[] {cx, cy, size[0], size[1]});
}
}
}
}
double[][] boxes = new double[defaultBoxes.size()][defaultBoxes.get(0).length];
for (int i = 0; i < defaultBoxes.size(); i++) {
boxes[i] = defaultBoxes.get(i);
}
return manager.create(boxes).clip(0.0, 1.0);
}
/**
* Creates a builder to build a {@code PtSSDTranslatorBuilder}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/**
* Creates a builder to build a {@code PtSSDTranslatorBuilder} with specified arguments.
*
* @param arguments arguments to specify builder options
* @return a new builder
*/
public static Builder builder(Map<String, ?> arguments) {
Builder builder = new Builder();
builder.configPreProcess(arguments);
builder.configPostProcess(arguments);
return builder;
}
/** The builder for SSD translator. */
public static class Builder extends ObjectDetectionBuilder<Builder> {
private int figSize;
private int[] featSize;
private int[] steps;
private int[] scale;
private int[][] aspectRatio;
/**
* Set the box parameter to reconstruct the anchor box.
*
* @param figSize image size
* @param featSize feature size
* @param steps steps to create boxes
* @param scale scale between different level of generated boxes
* @param aspectRatio parameter go along with scale
* @return this builder
*/
public Builder setBoxes(
int figSize, int[] featSize, int[] steps, int[] scale, int[][] aspectRatio) {
this.figSize = figSize;
this.featSize = featSize;
this.steps = steps;
this.scale = scale;
this.aspectRatio = aspectRatio;
return this;
}
/** {@inheritDoc} */
@Override
protected Builder self() {
return this;
}
/** {@inheritDoc} */
@Override
protected void configPreProcess(Map<String, ?> arguments) {
super.configPreProcess(arguments);
}
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
protected void configPostProcess(Map<String, ?> arguments) {
super.configPostProcess(arguments);
threshold = ArgumentsUtil.floatValue(arguments, "threshold", 0.4f);
figSize = ArgumentsUtil.intValue(arguments, "size", 300);
List<Double> list = (List<Double>) arguments.get("featSize");
if (list == null) {
featSize = new int[] {38, 19, 10, 5, 3, 1};
} else {
featSize = list.stream().mapToInt(Double::intValue).toArray();
}
list = (List<Double>) arguments.get("steps");
if (list == null) {
steps = new int[] {8, 16, 32, 64, 100, 300};
} else {
steps = list.stream().mapToInt(Double::intValue).toArray();
}
list = (List<Double>) arguments.get("scale");
if (list == null) {
scale = new int[] {21, 45, 99, 153, 207, 261, 315};
} else {
scale = list.stream().mapToInt(Double::intValue).toArray();
}
List<List<Double>> ratio = (List<List<Double>>) arguments.get("aspectRatios");
if (ratio == null) {
aspectRatio = new int[][] {{2}, {2, 3}, {2, 3}, {2, 3}, {2}, {2}};
} else {
aspectRatio = new int[ratio.size()][];
for (int i = 0; i < aspectRatio.length; ++i) {
aspectRatio[i] = ratio.get(i).stream().mapToInt(Double::intValue).toArray();
}
}
}
/**
* Builds the translator.
*
* @return the new translator
*/
public PtSsdTranslator build() {
validate();
return new PtSsdTranslator(this);
}
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/cv
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/cv/objectdetection/PtSsdTranslatorFactory.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.zoo.cv.objectdetection;
import ai.djl.Model;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.translator.ObjectDetectionTranslatorFactory;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorFactory;
import java.util.Map;
/** An {@link TranslatorFactory} that creates a {@link PtSsdTranslator} instance. */
public class PtSsdTranslatorFactory extends ObjectDetectionTranslatorFactory {
/** {@inheritDoc} */
@Override
protected Translator<Image, DetectedObjects> buildBaseTranslator(
Model model, Map<String, ?> arguments) {
return PtSsdTranslator.builder(arguments).build();
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/cv
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/cv/objectdetection/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains classes for the {@link ai.djl.Application.CV#OBJECT_DETECTION} models in the {@link
* ai.djl.pytorch.zoo.PtModelZoo}.
*/
package ai.djl.pytorch.zoo.cv.objectdetection;
|
0
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/nlp/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains supplemental classes for the {@link ai.djl.Application.NLP} models in the {@link
* ai.djl.pytorch.zoo.PtModelZoo}.
*/
package ai.djl.pytorch.zoo.nlp;
|
0
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/nlp
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/nlp/qa/PtBertQATranslator.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.zoo.nlp.qa;
import ai.djl.modality.nlp.DefaultVocabulary;
import ai.djl.modality.nlp.Vocabulary;
import ai.djl.modality.nlp.bert.BertFullTokenizer;
import ai.djl.modality.nlp.bert.BertToken;
import ai.djl.modality.nlp.bert.BertTokenizer;
import ai.djl.modality.nlp.qa.QAInput;
import ai.djl.modality.nlp.translator.QATranslator;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.translate.TranslatorContext;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/** The {@link ai.djl.translate.Translator} for PyTorch Question Answering model. */
public class PtBertQATranslator extends QATranslator {
private List<String> tokens;
private Vocabulary vocabulary;
private BertTokenizer tokenizer;
PtBertQATranslator(Builder builder) {
super(builder);
}
/** {@inheritDoc} */
@Override
public void prepare(TranslatorContext ctx) throws IOException {
vocabulary =
DefaultVocabulary.builder()
.addFromTextFile(ctx.getModel().getArtifact(vocab))
.optUnknownToken("[UNK]")
.build();
if (tokenizerName == null) {
tokenizer = new BertTokenizer();
} else {
tokenizer = new BertFullTokenizer(vocabulary, true);
}
}
/** {@inheritDoc} */
@Override
public NDList processInput(TranslatorContext ctx, QAInput input) {
String question = input.getQuestion();
String paragraph = input.getParagraph();
if (toLowerCase) {
question = question.toLowerCase(locale);
paragraph = paragraph.toLowerCase(locale);
}
BertToken token;
if (padding) {
token = tokenizer.encode(question, paragraph, maxLength);
} else {
token = tokenizer.encode(question, paragraph);
}
tokens = token.getTokens();
NDManager manager = ctx.getNDManager();
long[] indices = tokens.stream().mapToLong(vocabulary::getIndex).toArray();
long[] attentionMask = token.getAttentionMask().stream().mapToLong(i -> i).toArray();
NDList ndList = new NDList(3);
ndList.add(manager.create(indices));
ndList.add(manager.create(attentionMask));
if (includeTokenTypes) {
long[] tokenTypes = token.getTokenTypes().stream().mapToLong(i -> i).toArray();
ndList.add(manager.create(tokenTypes));
}
return ndList;
}
/** {@inheritDoc} */
@Override
public String processOutput(TranslatorContext ctx, NDList list) {
NDArray startLogits = list.get(0);
NDArray endLogits = list.get(1);
int startIdx = (int) startLogits.argMax().getLong();
int endIdx = (int) endLogits.argMax().getLong();
if (startIdx >= endIdx) {
int tmp = startIdx;
startIdx = endIdx;
endIdx = tmp;
}
return tokenizer.buildSentence(tokens.subList(startIdx, endIdx + 1));
}
/**
* Creates a builder to build a {@code PtBertQATranslator}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/**
* Creates a builder to build a {@code PtSSDTranslatorBuilder} with specified arguments.
*
* @param arguments arguments to specify builder options
* @return a new builder
*/
public static Builder builder(Map<String, ?> arguments) {
Builder builder = new Builder();
builder.configure(arguments);
return builder;
}
/** The builder for Bert QA translator. */
public static class Builder extends BaseBuilder<Builder> {
/**
* Returns the builder.
*
* @return the builder
*/
@Override
protected Builder self() {
return this;
}
/**
* Builds the translator.
*
* @return the new translator
*/
protected PtBertQATranslator build() {
return new PtBertQATranslator(this);
}
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/nlp
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/nlp/qa/PtBertQATranslatorFactory.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.zoo.nlp.qa;
import ai.djl.Model;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.modality.nlp.qa.QAInput;
import ai.djl.modality.nlp.translator.QATranslator;
import ai.djl.modality.nlp.translator.QaServingTranslator;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorFactory;
import ai.djl.util.Pair;
import java.lang.reflect.Type;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/** An {@link TranslatorFactory} that creates a {@link PtBertQATranslator} instance. */
public class PtBertQATranslatorFactory implements TranslatorFactory {
private static final Set<Pair<Type, Type>> SUPPORTED_TYPES = new HashSet<>();
static {
SUPPORTED_TYPES.add(new Pair<>(QAInput.class, String.class));
SUPPORTED_TYPES.add(new Pair<>(Input.class, Output.class));
}
/** {@inheritDoc} */
@Override
public Set<Pair<Type, Type>> getSupportedTypes() {
return SUPPORTED_TYPES;
}
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public <I, O> Translator<I, O> newInstance(
Class<I> input, Class<O> output, Model model, Map<String, ?> arguments) {
if (!isSupported(input, output)) {
throw new IllegalArgumentException("Unsupported input/output types.");
}
QATranslator translator = PtBertQATranslator.builder(arguments).build();
if (input == Input.class && output == Output.class) {
return (Translator<I, O>) new QaServingTranslator(translator);
}
return (Translator<I, O>) translator;
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/nlp
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/nlp/qa/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains classes for the {@link ai.djl.Application.NLP#QUESTION_ANSWER} models in the {@link
* ai.djl.pytorch.zoo.PtModelZoo}.
*/
package ai.djl.pytorch.zoo.nlp.qa;
|
0
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/nlp
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/nlp/sentimentanalysis/PtDistilBertTranslator.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.zoo.nlp.sentimentanalysis;
import ai.djl.Model;
import ai.djl.modality.Classifications;
import ai.djl.modality.nlp.DefaultVocabulary;
import ai.djl.modality.nlp.Vocabulary;
import ai.djl.modality.nlp.bert.BertTokenizer;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import java.io.IOException;
import java.net.URL;
import java.util.Arrays;
import java.util.List;
/** The {@link ai.djl.translate.Translator} for PyTorch Sentiment Analysis model. */
public class PtDistilBertTranslator implements Translator<String, Classifications> {
private Vocabulary vocabulary;
private BertTokenizer tokenizer;
/** {@inheritDoc} */
@Override
public void prepare(TranslatorContext ctx) throws IOException {
Model model = ctx.getModel();
URL url = model.getArtifact("distilbert-base-uncased-finetuned-sst-2-english-vocab.txt");
vocabulary =
DefaultVocabulary.builder().addFromTextFile(url).optUnknownToken("[UNK]").build();
tokenizer = new BertTokenizer();
}
/** {@inheritDoc} */
@Override
public Classifications processOutput(TranslatorContext ctx, NDList list) {
NDArray raw = list.singletonOrThrow();
NDArray computed = raw.exp().div(raw.exp().sum(new int[] {0}, true));
return new Classifications(Arrays.asList("Negative", "Positive"), computed);
}
/** {@inheritDoc} */
@Override
public NDList processInput(TranslatorContext ctx, String input) {
List<String> tokens = tokenizer.tokenize(input);
long[] indices = tokens.stream().mapToLong(vocabulary::getIndex).toArray();
long[] attentionMask = new long[tokens.size()];
Arrays.fill(attentionMask, 1);
NDManager manager = ctx.getNDManager();
NDArray indicesArray = manager.create(indices);
NDArray attentionMaskArray = manager.create(attentionMask);
return new NDList(indicesArray, attentionMaskArray);
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/nlp
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/nlp/sentimentanalysis/PtDistilBertTranslatorFactory.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.zoo.nlp.sentimentanalysis;
import ai.djl.Model;
import ai.djl.modality.Classifications;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.modality.nlp.translator.TextClassificationServingTranslator;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorFactory;
import ai.djl.util.Pair;
import java.lang.reflect.Type;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/** An {@link TranslatorFactory} that creates a {@link PtDistilBertTranslator} instance. */
public class PtDistilBertTranslatorFactory implements TranslatorFactory {
private static final Set<Pair<Type, Type>> SUPPORTED_TYPES = new HashSet<>();
static {
SUPPORTED_TYPES.add(new Pair<>(String.class, Classifications.class));
SUPPORTED_TYPES.add(new Pair<>(Input.class, Output.class));
}
/** {@inheritDoc} */
@Override
public Set<Pair<Type, Type>> getSupportedTypes() {
return SUPPORTED_TYPES;
}
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public <I, O> Translator<I, O> newInstance(
Class<I> input, Class<O> output, Model model, Map<String, ?> arguments) {
if (!isSupported(input, output)) {
throw new IllegalArgumentException("Unsupported input/output types.");
}
Translator<String, Classifications> translator = new PtDistilBertTranslator();
if (input == Input.class && output == Output.class) {
return (Translator<I, O>) new TextClassificationServingTranslator(translator);
}
return (Translator<I, O>) translator;
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/nlp
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/nlp/sentimentanalysis/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains classes for the {@link ai.djl.Application.NLP#SENTIMENT_ANALYSIS} models in the {@link
* ai.djl.pytorch.zoo.PtModelZoo}.
*/
package ai.djl.pytorch.zoo.nlp.sentimentanalysis;
|
0
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/nlp
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/nlp/textgeneration/PtGptTranslator.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.zoo.nlp.textgeneration;
import ai.djl.modality.nlp.generate.CausalLMOutput;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.index.NDIndex;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.translate.NoBatchifyTranslator;
import ai.djl.translate.TranslatorContext;
import java.util.stream.Collectors;
/** The {@link ai.djl.translate.Translator} for PyTorch GPT2 model. */
public class PtGptTranslator implements NoBatchifyTranslator<NDList, CausalLMOutput> {
private long kvDim;
private int numAttentionHeads;
private int numLayers;
private String tupleName;
/**
* Constructs a new instance of {@code PtGptTranslator}.
*
* @param kvDim the kv dimension
* @param numAttentionHeads the number of attention heads
* @param numLayers the number of layers
*/
public PtGptTranslator(long kvDim, int numAttentionHeads, int numLayers) {
this.kvDim = kvDim;
this.numAttentionHeads = numAttentionHeads;
this.numLayers = numLayers;
tupleName = "past_key_values(" + numLayers + ',' + 2 + ')';
}
/** {@inheritDoc} */
@Override
public NDList processInput(TranslatorContext ctx, NDList input) throws Exception {
// input = [inputIds, posIds, attnMask]
NDManager manager = ctx.getNDManager();
if (input.size() == 3) {
// In this case, input has null pastKeyValues. We prefix-append a dummy pastKeyValues,
// which is treated as prefix padding, and set the corresponding attnMask to be zero. No
// need to shift the position ids, since the starting position id, which is 0, won't
// change after appending the dummy kv_cache.
ctx.setAttachment("useDummyPastKeyValues", Boolean.TRUE);
// Pad the null pastKeyValues with dummy values
initialDummyPastKeyValues(input.get(0), manager, input);
// Append zero to the attentionMask from left, corresponding to the padding
long batchSize = input.get(0).getShape().get(0);
NDArray attentionMask =
manager.zeros(new Shape(batchSize, 1), DataType.INT64).concat(input.get(2), -1);
input.set(2, attentionMask);
}
for (int i = 3; i < numLayers * 2 + 3; ++i) {
input.get(i).setName(tupleName);
}
return input;
}
/** {@inheritDoc} */
@Override
public CausalLMOutput processOutput(TranslatorContext ctx, NDList output) throws Exception {
NDArray logitsOutput = output.get(0);
NDManager manager = output.getManager();
NDList pastKeyValuesOutput = output.subNDList(1, numLayers * 2 + 1);
NDArray hiddenStatesOutput;
if (output.size() > numLayers * 2 + 1) {
hiddenStatesOutput = output.get(numLayers * 2 + 1);
} else {
// Here is reached only if the language model doesn't output hiddenStates, which is
// needed only in contrastive search. We can also throw a warning here.
hiddenStatesOutput = manager.zeros(new Shape(1));
}
if (ctx.getAttachment("useDummyPastKeyValues") != null) {
NDIndex index2 = new NDIndex(":, :, 1:, ...");
pastKeyValuesOutput =
new NDList(
pastKeyValuesOutput.stream()
.map(object -> object.get(index2))
.collect(Collectors.toList()));
}
for (NDArray array : pastKeyValuesOutput) {
array.setName(tupleName);
}
return new CausalLMOutput(logitsOutput, hiddenStatesOutput, pastKeyValuesOutput);
}
private void initialDummyPastKeyValues(NDArray inputIds, NDManager manager, NDList list) {
long numBatch = inputIds.getShape().get(0);
for (int i = 0; i < numLayers * 2; ++i) {
NDArray array = manager.zeros(new Shape(numBatch, numAttentionHeads, 1, kvDim));
list.add(array);
}
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/nlp
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/nlp/textgeneration/PtGptTranslatorFactory.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.zoo.nlp.textgeneration;
import ai.djl.Model;
import ai.djl.modality.nlp.generate.CausalLMOutput;
import ai.djl.ndarray.NDList;
import ai.djl.translate.ArgumentsUtil;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorFactory;
import ai.djl.util.Pair;
import java.lang.reflect.Type;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/** An {@link TranslatorFactory} that creates a {@link PtGptTranslator} instance. */
public class PtGptTranslatorFactory implements TranslatorFactory {
private static final Set<Pair<Type, Type>> SUPPORTED_TYPES = new HashSet<>();
static {
SUPPORTED_TYPES.add(new Pair<>(NDList.class, CausalLMOutput.class));
}
/** {@inheritDoc} */
@Override
public Set<Pair<Type, Type>> getSupportedTypes() {
return SUPPORTED_TYPES;
}
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public <I, O> Translator<I, O> newInstance(
Class<I> input, Class<O> output, Model model, Map<String, ?> arguments) {
if (!isSupported(input, output)) {
throw new IllegalArgumentException("Unsupported input/output types.");
}
long kvDim = ArgumentsUtil.longValue(arguments, "kvDim", 64);
int numAttentionHeads = ArgumentsUtil.intValue(arguments, "numAttentionHeads", 12);
int numLayers = ArgumentsUtil.intValue(arguments, "numLayers", 12);
return (Translator<I, O>) (new PtGptTranslator(kvDim, numAttentionHeads, numLayers));
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/nlp
|
java-sources/ai/djl/pytorch/pytorch-model-zoo/0.34.0/ai/djl/pytorch/zoo/nlp/textgeneration/package-info.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes for the {@link ai.djl.Application.NLP#TEXT_GENERATION} models. */
package ai.djl.pytorch.zoo.nlp.textgeneration;
|
0
|
java-sources/ai/djl/pytorch/pytorch-native-auto/1.9.1/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-native-auto/1.9.1/ai/djl/pytorch/jni/NativeHelper.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.pytorch.jni;
/** A helper class allows engine shared library to be loaded from different class loader. */
public final class NativeHelper {
private NativeHelper() {}
/**
* Load native shared library from file.
*
* @param path the file to load
*/
public static void load(String path) {
System.load(path); // NOPMD
}
}
|
0
|
java-sources/ai/djl/pytorch/pytorch-native-auto/1.9.1/ai/djl/pytorch
|
java-sources/ai/djl/pytorch/pytorch-native-auto/1.9.1/ai/djl/pytorch/jni/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains helper class to load native shared library. */
package ai.djl.pytorch.jni;
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/AbstractRepository.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository;
import ai.djl.util.Progress;
import ai.djl.util.Utils;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Map;
import java.util.zip.GZIPInputStream;
import java.util.zip.ZipInputStream;
/**
* The {@code AbstractRepository} is the shared base for implementers of the {@link Repository}
* interface.
*
* @see Repository
*/
public abstract class AbstractRepository implements Repository {
/** {@inheritDoc} */
@Override
public InputStream openStream(Artifact.Item item, String path) throws IOException {
return Files.newInputStream(Paths.get(resolvePath(item, path)));
}
/** {@inheritDoc} */
@Override
public String[] listDirectory(Artifact.Item item, String path) throws IOException {
return Paths.get(resolvePath(item, path)).toFile().list();
}
/** {@inheritDoc} */
@Override
public Path getFile(Artifact.Item item, String path) throws IOException {
return Paths.get(resolvePath(item, path)).toAbsolutePath();
}
protected URI resolvePath(Artifact.Item item, String path) throws IOException {
Artifact artifact = item.getArtifact();
URI artifactUri = artifact.getResourceUri();
String itemUri = item.getUri();
// Resolve cached item
if (itemUri != null && URI.create(itemUri).isAbsolute() || isRemote()) {
Path cacheDir = getCacheDirectory();
Path resourceDir = cacheDir.resolve(artifactUri.getPath());
String type = item.getType();
String fileName = item.getName();
Path cachedFile;
if ("dir".equals(type)) {
if (!fileName.isEmpty()) {
cachedFile = resourceDir.resolve(fileName);
} else {
cachedFile = resourceDir;
}
return cachedFile.resolve(path).toUri();
} else {
return resourceDir.resolve(fileName).toUri();
}
}
// Resolve metadata item
String uriSuffix = itemUri != null ? itemUri : item.getName();
return getBaseUri().resolve(artifactUri.resolve(uriSuffix));
}
/** {@inheritDoc} */
@Override
public void prepare(Artifact artifact, Progress progress) throws IOException {
Path cacheDir = getCacheDirectory();
URI resourceUri = artifact.getResourceUri();
Path resourceDir = cacheDir.resolve(resourceUri.getPath());
if (Files.exists(resourceDir)) {
// files have been downloaded already.
return;
}
Metadata metadata = artifact.getMetadata();
URI baseUri = metadata.getRepositoryUri();
Map<String, Artifact.Item> files = artifact.getFiles();
Path parentDir = resourceDir.toAbsolutePath().getParent();
if (parentDir == null) {
throw new AssertionError("Parent path should never be null: " + resourceDir.toString());
}
Files.createDirectories(parentDir);
Path tmp = Files.createTempDirectory(parentDir, resourceDir.toFile().getName());
if (progress != null) {
long totalSize = 0;
for (Artifact.Item item : files.values()) {
totalSize += item.getSize();
}
progress.reset("Downloading", totalSize);
}
try {
for (Artifact.Item item : files.values()) {
download(tmp, baseUri, item, progress);
}
Files.move(tmp, resourceDir, StandardCopyOption.ATOMIC_MOVE);
} finally {
Utils.deleteQuietly(tmp);
if (progress != null) {
progress.end();
}
}
}
/** {@inheritDoc} */
@Override
public Path getCacheDirectory() throws IOException {
String cacheDir = System.getProperty("DJL_CACHE_DIR");
if (cacheDir == null || cacheDir.isEmpty()) {
cacheDir = System.getenv("DJL_CACHE_DIR");
if (cacheDir == null || cacheDir.isEmpty()) {
String userHome = System.getProperty("user.home");
cacheDir = userHome + "/.djl.ai/cache";
}
}
Path dir = Paths.get(cacheDir, "repo");
if (Files.notExists(dir)) {
Files.createDirectories(dir);
} else if (!Files.isDirectory(dir)) {
throw new IOException("Failed initialize cache directory: " + dir.toString());
}
return dir;
}
private void download(Path tmp, URI baseUri, Artifact.Item item, Progress progress)
throws IOException {
URI fileUri = URI.create(item.getUri());
if (!fileUri.isAbsolute()) {
fileUri = getBaseUri().resolve(baseUri).resolve(fileUri);
}
try (InputStream is = fileUri.toURL().openStream()) {
ProgressInputStream pis = new ProgressInputStream(is, progress);
String fileName = item.getName();
String extension = item.getExtension();
if ("dir".equals(item.getType())) {
Path dir;
if (!fileName.isEmpty()) {
// honer the name set in metadata.json
dir = tmp.resolve(fileName);
Files.createDirectories(dir);
} else {
dir = tmp;
}
if (!"zip".equals(extension)) {
throw new IOException("File type is not supported: " + extension);
}
ZipUtils.unzip(pis, dir);
} else {
Path file = tmp.resolve(fileName);
if ("zip".equals(extension)) {
ZipInputStream zis = new ZipInputStream(pis);
zis.getNextEntry();
Files.copy(zis, file);
} else if ("gzip".equals(extension)) {
Files.copy(new GZIPInputStream(pis), file);
} else if (extension.isEmpty()) {
Files.copy(pis, file);
} else {
throw new IOException("File type is not supported: " + extension);
}
}
pis.validateChecksum(item);
}
}
/**
* A {@code ProgressInputStream} is a wrapper around an {@link InputStream} that also uses
* {@link Progress}.
*/
private static final class ProgressInputStream extends InputStream {
private DigestInputStream dis;
private Progress progress;
/**
* Constructs a new ProgressInputStream with an input stream and progress.
*
* @param is the input stream
* @param progress the (optionally null) progress tracker
*/
public ProgressInputStream(InputStream is, Progress progress) {
MessageDigest md;
try {
md = MessageDigest.getInstance("SHA1");
} catch (NoSuchAlgorithmException e) {
throw new AssertionError("SHA1 algorithm not found.", e);
}
dis = new DigestInputStream(is, md);
this.progress = progress;
}
/** {@inheritDoc} */
@Override
public int read() throws IOException {
int ret = dis.read();
if (progress != null) {
if (ret >= 0) {
progress.increment(1);
} else {
progress.end();
}
}
return ret;
}
/** {@inheritDoc} */
@Override
public int read(byte[] b, int off, int len) throws IOException {
int size = dis.read(b, off, len);
if (progress != null) {
progress.increment(size);
}
return size;
}
private void validateChecksum(Artifact.Item item) throws IOException {
// drain InputSteam to get correct sha1 hash
Utils.toByteArray(dis);
String sha1 = Hex.toHexString(dis.getMessageDigest().digest());
if (!sha1.equalsIgnoreCase(item.getSha1Hash())) {
throw new IOException(
"Checksum error: "
+ item.getName()
+ ", expected sha1: "
+ item.getSha1Hash()
+ ", actual sha1: "
+ sha1);
}
}
/** {@inheritDoc} */
@Override
public void close() throws IOException {
dis.close();
}
}
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/Anchor.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* An {@code Anchor} represents a multi-level category of {@link Metadata} in a {@link MRL}.
*
* <p>The paths can have subpaths separated by slashes such as "dataset/cv" and "dataset/nlp". The
* anchors translate to directories. Directories sharing a path prefix can be used to organize a
* multi-level hierarchy of categories.
*
* @see MRL
*/
class Anchor {
public static final Anchor MODEL = new Anchor("model");
public static final Anchor DATASET = new Anchor("dataset");
private String[] path;
/**
* Constructs an anchor from a split path.
*
* @param path a split path where each element in the path corresponds to a directory
*/
public Anchor(String... path) {
this.path = path;
}
/**
* Creates an anchor from a file path string.
*
* @param anchor the string containing each level separated by "/"
* @return the new anchor
*/
public static Anchor parse(String anchor) {
String[] tokens = anchor.split("[:/]");
return new Anchor(tokens);
}
/**
* Splits path elements that contain multiple levels into separate components of the path.
*
* <p>For example, it will convert path("a/b","c","d/e/f") to path("a", "b", "c", "d", "e",
* "f").
*
* @return a new split anchor
*/
public Anchor normalize() {
List<String> parts = new ArrayList<>();
for (String s : path) {
String[] tokens = s.split("/");
Collections.addAll(parts, tokens);
}
return new Anchor(parts.toArray(new String[0]));
}
/**
* Returns the path element at the given index.
*
* @param index the index to retrieve
* @return the path element at the given index
*/
public String get(int index) {
return path[index];
}
/**
* Returns the path as a single "/" separated string.
*
* @return the path as a single "/" separated string
*/
public String getPath() {
return String.join("/", path);
}
/**
* Returns the parent {@code Anchor} of this anchor.
*
* @return the parent {@code Anchor} of this anchor
*/
public Anchor getParent() {
String[] parent = Arrays.copyOfRange(path, 0, path.length - 1);
return new Anchor(parent);
}
/**
* Joins two anchors together.
*
* <p>When joined, this this.path is the prefix and other.path is the suffix of the resulting
* path.
*
* @param other the path to append
* @return the joined path
*/
public Anchor resolve(Anchor other) {
String[] newPath = new String[path.length + other.path.length];
System.arraycopy(path, 0, newPath, 0, path.length);
System.arraycopy(other.path, 0, newPath, path.length, other.path.length);
return new Anchor(newPath);
}
/**
* Appends path items to the anchor.
*
* @param others the path elements to append
* @return this anchor
*/
public Anchor resolve(String... others) {
Anchor anchor = this;
for (String other : others) {
anchor = anchor.resolve(Anchor.parse(other));
}
return anchor;
}
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/Artifact.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository;
import java.io.Serializable;
import java.net.URI;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* An {@code Artifact} is a set of data files such as a model or dataset.
*
* @see Repository
*/
@SuppressWarnings("PMD.LooseCoupling")
public class Artifact {
private transient String metadataVersion;
private String version;
private boolean snapshot;
private String name;
private LinkedHashMap<String, String> properties;
private LinkedHashMap<String, Object> arguments;
private Map<String, Item> files;
private transient Metadata metadata;
private transient Version cache;
/**
* Returns the metadata format version.
*
* @return the metadata format version
*/
public String getMetadataVersion() {
return metadataVersion;
}
/**
* Sets the metadata format version.
*
* @param metadataVersion the new version
*/
public void setMetadataVersion(String metadataVersion) {
this.metadataVersion = metadataVersion;
}
/**
* Returns the artifact version.
*
* @return the artifact version
* @see Version
*/
public String getVersion() {
return version;
}
/**
* Sets the artifact version.
*
* @param version the new version
* @see Version
*/
public void setVersion(String version) {
this.version = version;
}
/**
* Returns true if the artifact is a snapshot.
*
* @return true if the artifact is a snapshot
* @see Version
*/
public boolean isSnapshot() {
return snapshot;
}
/**
* Sets if the artifact is a snapshot.
*
* @param snapshot true to make the artifact a snapshot
* @see Version
*/
public void setSnapshot(boolean snapshot) {
this.snapshot = snapshot;
}
/**
* Returns the artifact name.
*
* @return the artifact name
*/
public String getName() {
return name;
}
/**
* Sets the artifact name.
*
* @param name the new name
*/
public void setName(String name) {
this.name = name;
}
/**
* Returns the artifact properties.
*
* @return the artifact properties
* @see Repository
*/
public Map<String, String> getProperties() {
if (properties == null) {
return Collections.emptyMap();
}
return properties;
}
/**
* Sets the artifact properties.
*
* @param properties the new properties
* @see Repository
*/
public void setProperties(LinkedHashMap<String, String> properties) {
this.properties = properties;
}
/**
* Returns the artifact arguments.
*
* @param override the override configurations to the default arguments
* @return the artifact arguments
* @see Repository
*/
@SuppressWarnings("unchecked")
public Map<String, Object> getArguments(Map<String, Object> override) {
if (arguments == null) {
if (override != null) {
return override;
}
return Collections.emptyMap();
}
if (override != null) {
((Map<String, Object>) arguments.clone()).putAll(override);
}
return arguments;
}
/**
* Sets the artifact arguments.
*
* @param arguments the new arguments
* @see Repository
*/
public void setArguments(LinkedHashMap<String, Object> arguments) {
this.arguments = arguments;
}
/**
* Returns the metadata containing this artifact.
*
* @return the metadata containing this artifact
* @see Repository
*/
public Metadata getMetadata() {
return metadata;
}
/**
* Sets the associated metadata.
*
* @param metadata the new metadata
* @see Repository
*/
public void setMetadata(Metadata metadata) {
this.metadata = metadata;
}
/**
* Returns the location of the resource directory.
*
* @return the location of the resource directory
*/
public URI getResourceUri() {
URI uri = metadata.getRepositoryUri();
if (properties != null) {
for (String values : properties.values()) {
uri = uri.resolve(values + '/');
}
}
if (version == null) {
return uri;
}
return uri.resolve(version + '/');
}
/**
* Returns all the file items in the artifact.
*
* @return all the file items in the artifact
*/
public Map<String, Item> getFiles() {
if (files == null) {
return Collections.emptyMap();
}
for (Map.Entry<String, Item> file : files.entrySet()) {
file.getValue().setArtifact(this);
if (file.getValue().name == null && "dir".equals(file.getValue().getType())) {
file.getValue().name = file.getKey();
}
}
return files;
}
/**
* Sets the file items.
*
* @param files the replacement file items
*/
public void setFiles(Map<String, Item> files) {
this.files = files;
}
/**
* Returns true if every filter matches the corresponding property.
*
* @param filter the values to check against the properties
* @return true if every filter matches the corresponding property
* @see Repository
*/
public boolean hasProperties(Map<String, String> filter) {
if (filter == null || filter.isEmpty()) {
return true;
}
if (properties == null || properties.isEmpty()) {
return false;
}
for (Map.Entry<String, String> entry : filter.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
if (!value.equals(properties.get(key))) {
return false;
}
}
return true;
}
/**
* Returns the artifact version as a {@link Version}.
*
* @return the artifact version as a {@link Version}
* @see Version
*/
public Version getParsedVersion() {
if (cache == null) {
cache = new Version(version);
}
return cache;
}
/** {@inheritDoc} */
@Override
public String toString() {
StringBuilder sb = new StringBuilder(100);
if (metadata != null) {
sb.append(metadata.getGroupId())
.append(':')
.append(metadata.getArtifactId())
.append(':');
} else {
sb.append(name).append(':');
}
sb.append(version).append(" {");
if (properties != null) {
boolean first = true;
for (Map.Entry<String, String> entry : properties.entrySet()) {
if (first) {
first = false;
} else {
sb.append(',');
}
sb.append('"')
.append(entry.getKey())
.append("\":\"")
.append(entry.getValue())
.append('"');
}
}
sb.append('}');
return sb.toString();
}
/** A file (possibly compressed) within an {@link Artifact}. */
public static final class Item {
private String uri;
private String sha1Hash;
private String name;
private String type;
private long size;
private String extension;
private Artifact artifact;
/**
* Returns the URI of the item.
*
* @return the URI of the item
*/
public String getUri() {
return uri;
}
/**
* Sets the URI of the item.
*
* @param uri the new URI
*/
public void setUri(String uri) {
this.uri = uri;
}
/**
* Returns the hash of the item.
*
* <p>This value is from the metadata, but should be checked when the item is downloaded.
*
* @return the sha1 hash
*/
public String getSha1Hash() {
return sha1Hash;
}
/**
* Sets the sha1hash of the item.
*
* @param sha1Hash the new hash
*/
public void setSha1Hash(String sha1Hash) {
this.sha1Hash = sha1Hash;
}
/**
* Sets the type of the item.
*
* <p>The valid types are:
*
* <ul>
* <li>"file" - used for single files and gzip compressed files
* <li>"dir" - used for extracted zip folders
* </ul>
*
* @return the type string
*/
public String getType() {
if (type == null) {
getExtension();
if ("zip".equals(extension)) {
type = "dir";
} else {
type = "file";
}
}
return type;
}
/**
* Sets the type of the item.
*
* @param type the type
* @see Item#getType()
*/
public void setType(String type) {
this.type = type;
}
/**
* Returns the file size.
*
* @return the file size in bytes
*/
public long getSize() {
return size;
}
/**
* Sets the file size.
*
* @param size the new size in bytes
*/
public void setSize(long size) {
this.size = size;
}
/**
* Returns the item name.
*
* @return the item name
*/
public String getName() {
if (name == null) {
if ("dir".equals(getType())) {
name = "";
} else {
int pos = uri.lastIndexOf('/');
if (pos >= 0) {
name = uri.substring(pos + 1);
} else {
name = uri;
}
if (name.endsWith(".z") || name.endsWith(".gz") || name.endsWith(".zip")) {
pos = name.lastIndexOf('.');
if (pos > 0) {
name = name.substring(0, pos);
}
}
}
}
return name;
}
/**
* Sets the item name.
*
* @param name the new name
*/
public void setName(String name) {
this.name = name;
}
/**
* Returns the type of file extension.
*
* @return the type as "zip", "gzip", or "" for other
*/
public String getExtension() {
if (extension == null) {
if (uri.endsWith(".zip")) {
extension = "zip";
} else if (uri.endsWith(".gz") || uri.endsWith(".z")) {
extension = "gzip";
} else {
extension = "";
}
}
return extension;
}
/**
* Sets the file extension.
*
* @param extension the new extension
*/
public void setExtension(String extension) {
this.extension = extension;
}
/**
* Returns the artifact associated with this item.
*
* @return the artifact
*/
public Artifact getArtifact() {
return artifact;
}
/**
* Sets the artifact associated with this item.
*
* @param artifact the new artifact
*/
public void setArtifact(Artifact artifact) {
this.artifact = artifact;
}
}
/** A {@link Comparator} to compare artifacts based on their version numbers. */
public static final class VersionComparator implements Comparator<Artifact>, Serializable {
private static final long serialVersionUID = 1L;
/** {@inheritDoc} */
@Override
public int compare(Artifact o1, Artifact o2) {
return o1.getParsedVersion().compareTo(o2.getParsedVersion());
}
}
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/Hex.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository;
/** {@code Hex} is a set of utilities for working with Hexadecimal Strings. */
public final class Hex {
private static final char[] HEX_CHARS = {
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'
};
private Hex() {}
/**
* Converts a byte array to a hex string.
*
* @param block the bytes to convert
* @return the converted hex String
*/
public static String toHexString(byte[] block) {
if (block == null) {
return null;
}
StringBuilder buf = new StringBuilder();
for (byte aBlock : block) {
int high = ((aBlock & 0xf0) >> 4);
int low = (aBlock & 0x0f);
buf.append(HEX_CHARS[high]);
buf.append(HEX_CHARS[low]);
}
return buf.toString();
}
/**
* Converts a hex string to a byte array.
*
* @param s the string to convert
* @return the converted byte array
*/
public static byte[] toByteArray(String s) {
int len = s.length();
if ((len % 2) != 0) {
throw new NumberFormatException("Invalid Hex String");
}
byte[] ret = new byte[len / 2];
for (int i = 0; i < len / 2; i++) {
ret[i] = (byte) Integer.parseInt(s.substring(i * 2, i * 2 + 2), 16);
}
return ret;
}
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/LocalRepository.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository;
import java.io.IOException;
import java.io.Reader;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
/**
* A {@code LocalRepository} is a {@link Repository} located in a filesystem directory.
*
* @see Repository
*/
public class LocalRepository extends AbstractRepository {
private String name;
private Path path;
/**
* (Internal) Constructs a {@code LocalRepository} from the path with inferred name.
*
* <p>Use {@link Repository#newInstance(String, String)}.
*
* @param path the path to the repository
*/
public LocalRepository(Path path) {
this(path.toFile().getName(), path);
}
/**
* (Internal) Constructs a {@code LocalRepository} from the path with inferred name.
*
* <p>Use {@link Repository#newInstance(String, String)}.
*
* @param name the name of the repository
* @param path the path to the repository
*/
public LocalRepository(String name, Path path) {
this.name = name;
this.path = path;
}
/** {@inheritDoc} */
@Override
public boolean isRemote() {
return false;
}
/** {@inheritDoc} */
@Override
public String getName() {
return name;
}
/** {@inheritDoc} */
@Override
public URI getBaseUri() {
return path.toUri();
}
/** {@inheritDoc} */
@Override
public Metadata locate(MRL mrl) throws IOException {
URI uri = mrl.toURI();
Path base = path.resolve(uri.getPath());
Path file = base.resolve("metadata.json");
if (!Files.isRegularFile(file)) {
return null;
}
try (Reader reader = Files.newBufferedReader(file)) {
Metadata metadata = GSON.fromJson(reader, Metadata.class);
metadata.setRepositoryUri(uri);
return metadata;
}
}
/** {@inheritDoc} */
@Override
public Artifact resolve(MRL mrl, String version, Map<String, String> filter)
throws IOException {
Metadata metadata = locate(mrl);
VersionRange range = VersionRange.parse(version);
List<Artifact> artifacts = metadata.search(range, filter);
if (artifacts.isEmpty()) {
return null;
}
// TODO: find highest version.
return artifacts.get(0);
}
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/MRL.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository;
import ai.djl.Application;
import java.net.URI;
/**
* The {@code MRL} (Machine learning Resource Locator) is a pointer to a {@link Metadata} "resource"
* on a machine learning {@link Repository}.
*
* <p>Each mrl references a single metadata file (parsed to {@link Metadata} and the collection of
* artifacts located within it. Those artifacts all share the same groupId and artifactId, but can
* differ based on the name and properties.
*
* <p>The mrl consists of three different properties:
*
* <ul>
* <li>baseAnchor - The base anchor is used to organize metadata and artifacts into (multi-level)
* categories (See {@link Anchor}).
* <li>groupId - The group id identifies the group publishing the artifacts using a reverse domain
* name system.
* <li>artifactId - The artifact id identifies the different artifacts published by a single
* group.
* </ul>
*/
public class MRL {
private Anchor baseAnchor;
private String groupId;
private String artifactId;
/**
* Constructs an MRL.
*
* @param baseAnchor the desired anchor
* @param groupId the desired groupId
* @param artifactId the desired artifactId
*/
MRL(Anchor baseAnchor, String groupId, String artifactId) {
this.baseAnchor = baseAnchor;
this.groupId = groupId;
this.artifactId = artifactId;
}
/**
* Creates a model {@code MRL} with specified application.
*
* @param application the desired application
* @param groupId the desired groupId
* @param artifactId the desired artifactId
* @return a model {@code MRL}
*/
public static MRL model(Application application, String groupId, String artifactId) {
Anchor baseAnchor = Anchor.MODEL.resolve(application.getPath());
return new MRL(baseAnchor, groupId, artifactId);
}
/**
* Creates a dataset {@code MRL} with specified application.
*
* @param application the desired application
* @param groupId the desired groupId
* @param artifactId the desired artifactId
* @return a dataset {@code MRL}
*/
public static MRL dataset(Application application, String groupId, String artifactId) {
Anchor baseAnchor = Anchor.DATASET.resolve(application.getPath()).getParent();
return new MRL(baseAnchor, groupId, artifactId);
}
/**
* Returns the URI to the metadata location (used for {@link Repository} implementations).
*
* @return the URI to the metadata location
*/
public URI toURI() {
String groupIdPath = groupId.replace('.', '/');
Anchor anchor = baseAnchor.resolve(groupIdPath, artifactId);
return URI.create(anchor.getPath() + '/');
}
/**
* Returns the base anchor.
*
* @return the base anchor
*/
public Anchor getBaseAnchor() {
return baseAnchor;
}
/**
* Sets the base anchor.
*
* @param baseAnchor the new base anchor
*/
public void setBaseAnchor(Anchor baseAnchor) {
this.baseAnchor = baseAnchor;
}
/**
* Returns the groupId.
*
* @return the groupId
*/
public String getGroupId() {
return groupId;
}
/**
* Sets the groupId.
*
* @param groupId the new groupId
*/
public void setGroupId(String groupId) {
this.groupId = groupId;
}
/**
* Returns the artifactId.
*
* @return the artifactId
*/
public String getArtifactId() {
return artifactId;
}
/**
* Sets the artifactId.
*
* @param artifactId the new artifactId
*/
public void setArtifactId(String artifactId) {
this.artifactId = artifactId;
}
/** {@inheritDoc} */
@Override
public String toString() {
return toURI().toString();
}
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/Metadata.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository;
import java.net.URI;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* A {@code Metadata} is a collection of {@link Artifact}s with unified metadata (including {@link
* MRL}) that are stored in the same "metadata.json" file.
*
* <p>All of the artifacts located within the metadata share the data defined at the metadata level
* such as name, description, and website. The key difference between the artifacts within the same
* metadata are the properties.
*
* @see Repository
*/
public class Metadata {
private String metadataVersion;
private String groupId;
private String artifactId;
private String name;
private String description;
private String website;
private List<Artifact> artifacts;
private String checksum;
private Date lastUpdated;
private transient URI repositoryUri;
/**
* Returns the artifacts matching the version and property requirements.
*
* @param versionRange the version range for the artifact
* @param filter the property filter
* @return the matching artifacts
*/
public List<Artifact> search(VersionRange versionRange, Map<String, String> filter) {
List<Artifact> results = versionRange.matches(artifacts);
if (filter == null) {
return results;
}
return results.stream().filter(a -> a.hasProperties(filter)).collect(Collectors.toList());
}
/**
* Returns the metadata format version.
*
* @return the metadata format version
*/
public String getMetadataVersion() {
return metadataVersion;
}
/**
* Sets the metadata format version.
*
* @param metadataVersion the new version
*/
public void setMetadataVersion(String metadataVersion) {
this.metadataVersion = metadataVersion;
}
/**
* Returns the groupId.
*
* @return the groupId
*/
public String getGroupId() {
return groupId;
}
/**
* Sets the groupId.
*
* @param groupId the new groupId
*/
public void setGroupId(String groupId) {
this.groupId = groupId;
}
/**
* Returns the artifactId.
*
* @return the artifactId
*/
public String getArtifactId() {
return artifactId;
}
/**
* Sets the artifactId.
*
* @param artifactId the new artifactId
*/
public void setArtifactId(String artifactId) {
this.artifactId = artifactId;
}
/**
* Returns the metadata-level name.
*
* @return the metadata-level name
*/
public String getName() {
return name;
}
/**
* Sets the metadata-level name.
*
* @param name the new metadata-level name
*/
public void setName(String name) {
this.name = name;
}
/**
* Returns the description.
*
* @return the description
*/
public String getDescription() {
return description;
}
/**
* Sets the description.
*
* @param description the description
*/
public void setDescription(String description) {
this.description = description;
}
/**
* Returns the website.
*
* @return the website
*/
public String getWebsite() {
return website;
}
/**
* Sets the website.
*
* @param website the website
*/
public void setWebsite(String website) {
this.website = website;
}
/**
* Returns all the artifacts in the metadata.
*
* @return the artifacts in the metadata
*/
public List<Artifact> getArtifacts() {
return artifacts;
}
/**
* Sets the artifacts for the metadata.
*
* @param artifacts the new artifacts
*/
public void setArtifacts(List<Artifact> artifacts) {
this.artifacts = artifacts;
}
/**
* Returns the metadata checksum.
*
* @return the checksum
*/
public String getChecksum() {
return checksum;
}
/**
* Sets the metadata checksum.
*
* @param checksum the new checksum
*/
public void setChecksum(String checksum) {
this.checksum = checksum;
}
/**
* Returns the last update date for the metadata.
*
* @return the last update date
*/
public Date getLastUpdated() {
return lastUpdated;
}
/**
* Sets the last update date for the metadata.
*
* @param lastUpdated the new last update date
*/
public void setLastUpdated(Date lastUpdated) {
this.lastUpdated = lastUpdated;
}
/**
* Returns the URI to the repository storing the metadata.
*
* @return the URI to the repository storing the metadata
*/
public URI getRepositoryUri() {
return repositoryUri;
}
/**
* Sets the repository URI.
*
* @param repositoryUri the new URI
*/
public void setRepositoryUri(URI repositoryUri) {
this.repositoryUri = repositoryUri;
if (artifacts != null) {
for (Artifact artifact : artifacts) {
artifact.setMetadata(this);
}
}
}
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/RemoteRepository.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository;
import ai.djl.util.Utils;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.io.Writer;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Duration;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
* A {@code RemoteRepository} is a {@link Repository} located on a remote web server.
*
* @see Repository
*/
public class RemoteRepository extends AbstractRepository {
private static final long ONE_DAY = Duration.ofDays(1).toMillis();
private String name;
private URI uri;
/**
* (Internal) Constructs a remote repository.
*
* <p>Use {@link Repository#newInstance(String, String)}.
*
* @param name the repository name
* @param uri the repository location
*/
public RemoteRepository(String name, URI uri) {
this.name = name;
this.uri = uri;
}
/** {@inheritDoc} */
@Override
public boolean isRemote() {
return true;
}
/** {@inheritDoc} */
@Override
public String getName() {
return name;
}
/** {@inheritDoc} */
@Override
public URI getBaseUri() {
return uri;
}
/** {@inheritDoc} */
@Override
public Metadata locate(MRL mrl) throws IOException {
URI mrlUri = mrl.toURI();
URI file = uri.resolve(mrlUri.getPath() + "/metadata.json");
Path cacheDir = getCacheDirectory().resolve(mrlUri.getPath());
if (!Files.exists(cacheDir)) {
Files.createDirectories(cacheDir);
}
Path cacheFile = cacheDir.resolve("metadata.json");
if (Files.exists(cacheFile)) {
try (Reader reader = Files.newBufferedReader(cacheFile)) {
Metadata metadata = GSON.fromJson(reader, Metadata.class);
Date lastUpdated = metadata.getLastUpdated();
if (Boolean.getBoolean("offline")
|| System.currentTimeMillis() - lastUpdated.getTime() < ONE_DAY) {
metadata.setRepositoryUri(mrlUri);
return metadata;
}
}
}
try (InputStream is = file.toURL().openStream()) {
String json = Utils.toString(is);
Metadata metadata = GSON.fromJson(json, Metadata.class);
metadata.setLastUpdated(new Date());
try (Writer writer = Files.newBufferedWriter(cacheFile)) {
writer.write(GSON.toJson(metadata));
}
metadata.setRepositoryUri(mrlUri);
return metadata;
}
}
/** {@inheritDoc} */
@Override
public Artifact resolve(MRL mrl, String version, Map<String, String> filter)
throws IOException {
Metadata metadata = locate(mrl);
VersionRange range = VersionRange.parse(version);
List<Artifact> artifacts = metadata.search(range, filter);
if (artifacts.isEmpty()) {
return null;
}
// TODO: find highest version.
return artifacts.get(0);
}
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/Repository.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository;
import ai.djl.util.Progress;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* {@code Repository} is a format for storing data {@link Artifact}s for various uses including deep
* learning models and datasets.
*
* <p>This repository format is based off of the design of the Maven Repository format (See <a
* href="https://maven.apache.org/guides/introduction/introduction-to-repositories.html">maven</a>).
* Unlike in Maven, the data doesn't need to be located within the repository. Instead, the
* repository only stores metadata including the URL and checksum of the actual data. When the
* artifact is prepared, the data is downloaded, checked, and then stored in the {@code
* ~/.djo-ai/cache} folder.
*
* <p>The artifacts are first divided into a number of {@link Metadata} files that can each have
* multiple artifacts. The metadata files are identified by an {@link MRL} which contains:
*
* <ul>
* <li>{@link Anchor} - The anchor is used to organize metadata and artifacts into multi-level
* categories (See {@link Anchor}).
* <li>Group Id - The group id identifies the group publishing the artifacts using a reverse
* domain name system.
* <li>Artifact Id - The artifact id identifies the different artifacts published by a single
* group.
* </ul>
*
* <p>Within each metadata are a number of artifacts that share the same groupId, artifactId, name,
* description, website, and update date. The artifacts within the metadata differ primarily based
* on name and properties. Note that there is a metadata name and a separate artifact name. The
* properties are a map with string property names and string property values that can be used to
* represent key differentiators between artifacts such as dataset, flavors, and image sizes. For
* example, you might have a ResNet metadata file with different artifacts to represent different
* hyperparameters and datasets used for training the ResNet.
*
* <p>Each artifact contains a {@link Version} number (which can be a snapshot version). The data in
* the artifacts are represented by files in the format of an {@link Artifact.Item} and a parsed
* JSON object of arguments. The files can either by a single file, an automatically extracted gzip
* file, or an automatically extracted zip file that will be treated as a directory. These can be
* used to store data such as the dataset, model parameters, and synset files. The arguments can be
* used to store data about the model used for initialization. For example, it can store the image
* size which can be used by the model loader for both initializing the block and setting up
* resizing in the translator.
*
* <p>There are three kinds of repositories: a {@link LocalRepository}, {@link RemoteRepository},
* and {@link SimpleRepository}. For all three kinds, new repositories should be created by calling
* {@link Repository#newInstance(String, String)} with the location of the repository.
*/
public interface Repository {
Gson GSON =
new GsonBuilder()
.setDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")
.setPrettyPrinting()
.create();
/**
* Creates a new instance of a repository with a name and url.
*
* @param name the repository name
* @param url the repository location
* @return the new repository
*/
static Repository newInstance(String name, String url) {
final Logger logger = LoggerFactory.getLogger(Repository.class);
URI uri = URI.create(url);
Path path = null;
if (!uri.isAbsolute()) {
path = Paths.get(url);
}
String scheme = uri.getScheme();
if ("file".equalsIgnoreCase(scheme)) {
path = Paths.get(uri.getPath());
}
if (path != null) {
boolean isLocal;
try {
isLocal =
Files.walk(path)
.anyMatch(
f ->
"metadata.json".equals(f.toFile().getName())
&& f.toFile().isFile());
} catch (IOException e) {
isLocal = false;
logger.warn(
"Failed determining if local or naked repository. Defaulting to naked", e);
}
if (isLocal) {
return new LocalRepository(name, path);
} else {
return new SimpleRepository(name, path);
}
} else {
return new RemoteRepository(name, uri);
}
}
/**
* Returns whether the repository is remote repository.
*
* @return whether the repository is remote repository
*/
boolean isRemote();
/**
* Returns the repository name.
*
* @return the repository name
*/
String getName();
/**
* Returns the URI to the base of the repository.
*
* @return the URI
*/
URI getBaseUri();
/**
* Returns the metadata at a mrl.
*
* @param mrl the mrl of the metadata to retrieve
* @return the metadata
* @throws IOException if it failed to load the metadata
*/
Metadata locate(MRL mrl) throws IOException;
/**
* Returns the artifact matching a mrl, version, and property filter.
*
* @param mrl the mrl to match the artifact against
* @param version the version of the artifact
* @param filter the property filter
* @return the matched artifact
* @throws IOException if it failed to load the artifact
*/
Artifact resolve(MRL mrl, String version, Map<String, String> filter) throws IOException;
/**
* Returns an {@link InputStream} for an item in a repository.
*
* @param item the item to open
* @param path the path to a file if the item is a zipped directory. Otherwise, pass null
* @return the file stream
* @throws IOException if it failed to open the stream
*/
InputStream openStream(Artifact.Item item, String path) throws IOException;
/**
* Returns the path to a file for the item.
*
* @param item the item to find the path for
* @param path the path to a file if the item is a zipped directory. Otherwise, pass null
* @return the file path
* @throws IOException if it failed to find the path
*/
Path getFile(Artifact.Item item, String path) throws IOException;
/**
* Returns the list of files directly within a specified directory in a zipped directory item.
*
* @param item the zipped directory item
* @param path the path within the zip directory
* @return the list of files/directories
* @throws IOException if it failed to list the directory
*/
String[] listDirectory(Artifact.Item item, String path) throws IOException;
/**
* Prepares the artifact for use.
*
* @param artifact the artifact to prepare
* @throws IOException if it failed to prepare
*/
default void prepare(Artifact artifact) throws IOException {
prepare(artifact, null);
}
/**
* Prepares the artifact for use with progress tracking.
*
* @param artifact the artifact to prepare
* @param progress the progress tracker
* @throws IOException if it failed to prepare
*/
void prepare(Artifact artifact, Progress progress) throws IOException;
/**
* Returns the cache directory for the repository.
*
* @return the cache directory path
* @throws IOException if it failed to ensure the creation of the cache directory
*/
Path getCacheDirectory() throws IOException;
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/SimpleRepository.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository;
import ai.djl.repository.Artifact.Item;
import ai.djl.util.Progress;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Path;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* A {@code SimpleRepository} is a {@link Repository} containing only a single artifact without
* requiring a "metadata.json" file.
*
* @see Repository
*/
public class SimpleRepository extends AbstractRepository {
private String name;
private Path path;
/**
* (Internal) Constructs a SimpleRepository.
*
* <p>Use {@link Repository#newInstance(String, String)}.
*
* @param path the path to the repository
*/
public SimpleRepository(Path path) {
this(path.toFile().getName(), path);
}
/**
* (Internal) Constructs a SimpleRepository.
*
* <p>Use {@link Repository#newInstance(String, String)}.
*
* @param name the name of the repository
* @param path the path to the repository
*/
public SimpleRepository(String name, Path path) {
this.name = name;
this.path = path;
}
/** {@inheritDoc} */
@Override
public boolean isRemote() {
return false;
}
/** {@inheritDoc} */
@Override
public String getName() {
return name;
}
/** {@inheritDoc} */
@Override
public URI getBaseUri() {
return path.toUri();
}
/** {@inheritDoc} */
@Override
public Metadata locate(MRL mrl) throws IOException {
Metadata metadata = new Metadata();
metadata.setRepositoryUri(URI.create(""));
Artifact artifact = new Artifact();
artifact.setMetadata(metadata);
metadata.setArtifacts(Collections.singletonList(artifact));
artifact.setName(name);
Map<String, Item> files = new ConcurrentHashMap<>();
File[] fileList = path.toFile().listFiles();
if (fileList == null) {
throw new IllegalArgumentException("No files found in SimpleRepository");
}
for (File file : fileList) {
Item item = new Item();
item.setName(file.getName());
item.setSize(file.length());
item.setArtifact(artifact);
files.put(file.getName(), item);
}
artifact.setFiles(files);
return metadata;
}
/** {@inheritDoc} */
@Override
public Artifact resolve(MRL mrl, String version, Map<String, String> filter)
throws IOException {
return locate(mrl).getArtifacts().get(0);
}
/** {@inheritDoc} */
@Override
public void prepare(Artifact artifact, Progress progress) {
// Do nothing
}
/** {@inheritDoc} */
@Override
public Path getCacheDirectory() {
return path;
}
/** {@inheritDoc} */
@Override
protected URI resolvePath(Item item, String path) throws IOException {
return this.path.resolve(item.getName()).toUri();
}
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/VersionRange.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.stream.Collectors;
/**
* A {@code VersionRange} is a set of {@link Restriction}s that match some {@link Version}s.
*
* <p>A {@code VersionRange} should be constructed using {@link VersionRange#parse(String)}. The
* format used by the version ranges matches the <a
* href="https://cwiki.apache.org/confluence/display/MAVENOLD/Dependency+Mediation+and+Conflict+Resolution#DependencyMediationandConflictResolution-DependencyVersionRanges">maven
* version range syntax</a>.
*/
public final class VersionRange {
private static final VersionRange ANY = new VersionRange(null, Collections.emptyList());
private Version recommendedVersion;
private List<Restriction> restrictions;
private VersionRange(Version recommendedVersion, List<Restriction> restrictions) {
this.recommendedVersion = recommendedVersion;
this.restrictions = restrictions;
}
/**
* Returns the recommended version in the range.
*
* @return the recommended version in the range
*/
public Version getRecommendedVersion() {
return recommendedVersion;
}
/**
* Returns the restrictions that compose the range.
*
* @return the restrictions that compose the range
*/
public List<Restriction> getRestrictions() {
return restrictions;
}
/**
* Creates a new version range from a string version range.
*
* @param spec the string version range
* @return the {@link VersionRange}
*/
public static VersionRange parse(String spec) {
if (spec == null || spec.isEmpty()) {
return ANY;
}
List<Restriction> restrictions = new ArrayList<>();
String process = spec;
Version version = null;
Version upperBound = null;
Version lowerBound = null;
while (process.startsWith("[") || process.startsWith("(")) {
int index1 = process.indexOf(')');
int index2 = process.indexOf(']');
int index = index2;
if (index2 < 0 || index1 < index2) {
if (index1 >= 0) {
index = index1;
}
}
if (index < 0) {
throw new IllegalArgumentException("Unbounded range: " + spec);
}
Restriction restriction = parseRestriction(process.substring(0, index + 1));
if (lowerBound == null) {
lowerBound = restriction.getLowerBound();
}
if (upperBound != null) {
if (restriction.getLowerBound() == null
|| restriction.getLowerBound().compareTo(upperBound) < 0) {
throw new IllegalArgumentException("Ranges overlap: " + spec);
}
}
restrictions.add(restriction);
upperBound = restriction.getUpperBound();
process = process.substring(index + 1).trim();
if (process.length() > 0 && process.startsWith(",")) {
process = process.substring(1).trim();
}
}
if (process.length() > 0) {
if (!restrictions.isEmpty()) {
throw new IllegalArgumentException(
"Only fully-qualified sets allowed in multiple set scenario: " + spec);
}
version = new Version(process);
restrictions.add(Restriction.EVERYTHING);
}
return new VersionRange(version, restrictions);
}
private static Restriction parseRestriction(String spec) {
boolean lowerBoundInclusive = spec.startsWith("[");
boolean upperBoundInclusive = spec.endsWith("]");
String process = spec.substring(1, spec.length() - 1).trim();
Restriction restriction;
int index = process.indexOf(',');
if (index < 0) {
if (!lowerBoundInclusive || !upperBoundInclusive) {
throw new IllegalArgumentException(
"Single version must be surrounded by []: " + spec);
}
Version version = new Version(process);
restriction = new Restriction(version, true, version, true);
} else {
String lowerBound = process.substring(0, index).trim();
String upperBound = process.substring(index + 1).trim();
if (lowerBound.equals(upperBound)) {
throw new IllegalArgumentException(
"Range cannot have identical boundaries: " + spec);
}
Version lowerVersion = null;
if (lowerBound.length() > 0) {
lowerVersion = new Version(lowerBound);
}
Version upperVersion = null;
if (upperBound.length() > 0) {
upperVersion = new Version(upperBound);
}
if (upperVersion != null
&& lowerVersion != null
&& upperVersion.compareTo(lowerVersion) < 0) {
throw new IllegalArgumentException("Range defies version ordering: " + spec);
}
restriction =
new Restriction(
lowerVersion, lowerBoundInclusive, upperVersion, upperBoundInclusive);
}
return restriction;
}
/**
* Filters the provided artifacts to those that match the version range.
*
* @param artifacts the artifacts to filter
* @return the filtered artifacts
*/
public List<Artifact> matches(List<Artifact> artifacts) {
return artifacts.stream().filter(this::contains).collect(Collectors.toList());
}
/**
* Returns true if a version falls within this range.
*
* @param version the version to test
* @return true if the version falls within this range
*/
public boolean contains(Version version) {
if (recommendedVersion != null) {
return recommendedVersion.equals(version);
}
for (Restriction restriction : restrictions) {
if (restriction.containsVersion(version)) {
return true;
}
}
return false;
}
/**
* Returns true if the artifact's version falls within this range.
*
* @param artifact the artifact to test
* @return true if the artifact's version falls within this range
*/
public boolean contains(Artifact artifact) {
return artifact.getVersion() == null || contains(artifact.getParsedVersion());
}
/** {@inheritDoc} */
@Override
public String toString() {
if (recommendedVersion != null) {
return recommendedVersion.toString();
}
StringBuilder buf = new StringBuilder();
for (Iterator<Restriction> i = restrictions.iterator(); i.hasNext(); ) {
Restriction r = i.next();
buf.append(r.toString());
if (i.hasNext()) {
buf.append(',');
}
}
return buf.toString();
}
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/ZipUtils.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
/** Utilities for working with zip files. */
public final class ZipUtils {
private ZipUtils() {}
/**
* Unzips an input stream to a given path.
*
* @param is the input stream to unzip
* @param dest the path to store the unzipped files
* @throws IOException for failures to unzip the input stream and create files in the dest path
*/
public static void unzip(InputStream is, Path dest) throws IOException {
ZipInputStream zis = new ZipInputStream(is);
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
String name = entry.getName();
Path file = dest.resolve(name).toAbsolutePath();
if (entry.isDirectory()) {
Files.createDirectories(file);
} else {
Path parentFile = file.getParent();
if (parentFile == null) {
throw new AssertionError(
"Parent path should never be null: " + file.toString());
}
Files.createDirectories(parentFile);
Files.copy(zis, file);
}
}
}
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains a Maven-based Repository format for creating repositories of artifacts such as datasets
* and model zoos.
*
* <p>There are also helper classes for Datasets ({@link ai.djl.repository.dataset}) and Model Zoos
* ({@link ai.djl.repository.zoo}) as well.
*
* @see ai.djl.repository.Repository
*/
package ai.djl.repository;
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/dataset/PreparedDataset.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.dataset;
import ai.djl.util.Progress;
import java.io.IOException;
/**
* A {@code PreparedDataset} is a {@link ai.djl.training.dataset.Dataset} that requires an
* additional preparation step before use.
*
* <p>The preparation steps can be run by calling {@link PreparedDataset#prepare()}.
*/
public interface PreparedDataset {
/**
* Prepares the dataset for use.
*
* @throws IOException for various exceptions depending on the dataset
*/
default void prepare() throws IOException {
prepare(null);
}
/**
* Prepares the dataset for use with tracked progress.
*
* @param progress the progress tracker
* @throws IOException for various exceptions depending on the dataset
*/
void prepare(Progress progress) throws IOException;
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/dataset/ZooDataset.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.dataset;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.training.dataset.Dataset;
import ai.djl.util.Progress;
import java.io.IOException;
/**
* A {@link Dataset} whose data is found in the dataset zoo of a {@link Repository}.
*
* <p>The {@code ZooDataset}s are all {@link PreparedDataset}s.
*/
public interface ZooDataset extends Dataset, PreparedDataset {
/**
* Returns the {@link MRL} of the dataset.
*
* @return the {@link MRL} of the dataset
*/
MRL getMrl();
/**
* Returns the {@link Repository} the dataset is found in.
*
* @return the {@link Repository} the dataset is found in
*/
Repository getRepository();
/**
* Returns the {@link Artifact} the dataset is found in.
*
* @return the {@link Artifact} the dataset is found in
*/
Artifact getArtifact();
/**
* Returns the {@link ai.djl.training.dataset.Dataset.Usage} of the dataset.
*
* @return the {@link ai.djl.training.dataset.Dataset.Usage} of the dataset
*/
Usage getUsage();
/**
* Returns whether the dataset has been prepared.
*
* @return true if the dataset has been prepared
*/
boolean isPrepared();
/**
* Sets if the dataset has been prepared.
*
* @param prepared true if the dataset has been prepared
*/
void setPrepared(boolean prepared);
/**
* Sets the artifact to the default one.
*
* <p>The default artifact is usually found by searching within the repository with a default
* mrl, version, and filter.
*
* @throws IOException for various exceptions depending on the specific dataset
*/
void useDefaultArtifact() throws IOException;
/**
* Prepares the {@link ZooDataset} with the dataset specific behavior.
*
* <p>This method is called only when the dataset is not prepared, has an artifact set, and the
* repository artifact has already been prepared. {@link ZooDataset#setPrepared(boolean)} does
* not need to be called within this method and will be called after.
*
* @param usage the usage to prepare
* @throws IOException for various exceptions depending on the specific dataset
*/
void prepareData(Usage usage) throws IOException;
/** {@inheritDoc} */
@Override
default void prepare(Progress progress) throws IOException {
if (!isPrepared()) {
if (getArtifact() == null) {
useDefaultArtifact();
if (getArtifact() == null) {
throw new IOException(getMrl() + " dataset not found.");
}
}
getRepository().prepare(getArtifact(), progress);
prepareData(getUsage());
setPrepared(true);
}
}
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/dataset/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains interfaces for datasets in repositories.
*
* @see ai.djl.Model
* @see ai.djl.Device
*/
package ai.djl.repository.dataset;
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/zoo/BaseModelLoader.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
import ai.djl.Device;
import ai.djl.MalformedModelException;
import ai.djl.Model;
import ai.djl.ndarray.NDList;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.repository.Metadata;
import ai.djl.repository.Repository;
import ai.djl.repository.VersionRange;
import ai.djl.translate.NoopTranslator;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorFactory;
import ai.djl.util.Pair;
import ai.djl.util.Progress;
import java.io.IOException;
import java.lang.reflect.Type;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
/** Shared code for the {@link ModelLoader} implementations. */
public abstract class BaseModelLoader<I, O> implements ModelLoader<I, O> {
protected Repository repository;
protected MRL mrl;
protected String version;
protected Map<Pair<Type, Type>, TranslatorFactory<?, ?>> factories;
private Metadata metadata;
/**
* Constructs a {@link ModelLoader} given the repository, mrl, and version.
*
* @param repository the repository to load the model from
* @param mrl the mrl of the model to load
* @param version the version of the model to load
*/
protected BaseModelLoader(Repository repository, MRL mrl, String version) {
this.repository = repository;
this.mrl = mrl;
this.version = version;
factories = new ConcurrentHashMap<>();
factories.put(
new Pair<>(NDList.class, NDList.class),
(TranslatorFactory<NDList, NDList>) arguments -> new NoopTranslator());
}
/** {@inheritDoc} */
@Override
public String getArtifactId() {
return mrl.getArtifactId();
}
/** {@inheritDoc} */
@Override
public <S, T> ZooModel<S, T> loadModel(Criteria<S, T> criteria)
throws IOException, ModelNotFoundException, MalformedModelException {
Artifact artifact = match(criteria.getFilters());
if (artifact == null) {
throw new ModelNotFoundException("Model not found.");
}
Map<String, Object> override = criteria.getArguments();
Progress progress = criteria.getProgress();
Map<String, Object> arguments = artifact.getArguments(override);
try {
Translator<S, T> translator = criteria.getTranslator();
if (translator == null) {
TranslatorFactory<S, T> factory = getTranslatorFactory(criteria);
if (factory == null) {
throw new ModelNotFoundException("No matching default translator found.");
}
translator = factory.newInstance(arguments);
}
repository.prepare(artifact, progress);
if (progress != null) {
progress.reset("Loading", 2);
progress.update(1);
}
Path dir = repository.getCacheDirectory();
String relativePath = artifact.getResourceUri().getPath();
Path modelPath = dir.resolve(relativePath);
Model model = createModel(criteria.getDevice(), artifact, arguments);
model.load(modelPath, artifact.getName());
return new ZooModel<>(model, translator);
} finally {
if (progress != null) {
progress.end();
}
}
}
/** {@inheritDoc} */
@Override
public List<Artifact> listModels() throws IOException, ModelNotFoundException {
List<Artifact> list = getMetadata().getArtifacts();
return list.stream()
.filter(a -> version.equals(a.getVersion()))
.collect(Collectors.toList());
}
protected Model createModel(Device device, Artifact artifact, Map<String, Object> arguments)
throws IOException {
return Model.newInstance(device);
}
/**
* Returns the first artifact that matches a given criteria.
*
* @param criteria the criteria to match against
* @return the first artifact that matches the criteria. Null will be returned if no artifact
* matches
* @throws IOException for errors while loading the model
* @throws ModelNotFoundException if the metadata to get artifacts from is not found
*/
protected Artifact match(Map<String, String> criteria)
throws IOException, ModelNotFoundException {
List<Artifact> list = search(criteria);
if (list.isEmpty()) {
return null;
}
return list.get(0);
}
/**
* Returns all the artifacts that match a given criteria.
*
* @param criteria the criteria to match against
* @return all the artifacts that match a given criteria
* @throws IOException for errors while loading the model
* @throws ModelNotFoundException if the metadata to get artifacts from is not found
*/
private List<Artifact> search(Map<String, String> criteria)
throws IOException, ModelNotFoundException {
return getMetadata().search(VersionRange.parse(version), criteria);
}
private Metadata getMetadata() throws IOException, ModelNotFoundException {
if (metadata == null) {
metadata = repository.locate(mrl);
if (metadata == null) {
throw new ModelNotFoundException(mrl.getArtifactId() + " Models not found.");
}
}
return metadata;
}
/** {@inheritDoc} */
@Override
public String toString() {
StringBuilder sb = new StringBuilder(200);
sb.append(repository.getName())
.append(':')
.append(mrl.getGroupId())
.append(':')
.append(mrl.getArtifactId())
.append(" [\n");
try {
for (Artifact artifact : listModels()) {
sb.append('\t').append(artifact).append('\n');
}
} catch (IOException | ModelNotFoundException e) {
sb.append("\tFailed load metadata.");
}
sb.append("\n]");
return sb.toString();
}
@SuppressWarnings("unchecked")
private <S, T> TranslatorFactory<S, T> getTranslatorFactory(Criteria<S, T> criteria) {
return (TranslatorFactory<S, T>)
factories.get(new Pair<>(criteria.getInputClass(), criteria.getOutputClass()));
}
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/zoo/Criteria.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
import ai.djl.Application;
import ai.djl.Device;
import ai.djl.engine.Engine;
import ai.djl.translate.Translator;
import ai.djl.util.Progress;
import java.util.HashMap;
import java.util.Map;
/**
* The {@code Criteria} class contains search criteria to look up a {@link ZooModel}.
*
* @param <I> the model input type
* @param <O> the model output type
*/
public class Criteria<I, O> {
private Application application;
private Class<I> inputClass;
private Class<O> outputClass;
private String engine;
private Device device;
private String groupId;
private String artifactId;
private Map<String, String> filters;
private Map<String, Object> arguments;
private Translator<I, O> translator;
private Progress progress;
Criteria(Builder<I, O> builder) {
this.application = builder.application;
this.inputClass = builder.inputClass;
this.outputClass = builder.outputClass;
this.engine = builder.engine;
this.device = builder.device;
this.groupId = builder.groupId;
this.artifactId = builder.artifactId;
this.filters = builder.filters;
this.arguments = builder.arguments;
this.translator = builder.translator;
this.progress = builder.progress;
}
/**
* Returns the application of the model.
*
* @return the application of the model
*/
public Application getApplication() {
return application;
}
/**
* Returns the input data type.
*
* @return the input data type
*/
public Class<I> getInputClass() {
return inputClass;
}
/**
* Returns the output data type.
*
* @return the output data type
*/
public Class<O> getOutputClass() {
return outputClass;
}
/**
* Returns the engine name.
*
* @return the engine name
*/
public String getEngine() {
return engine;
}
/**
* Returns the {@link Device} of the model to be loaded on.
*
* @return the {@link Device} of the model to be loaded on
*/
public Device getDevice() {
return device;
}
/**
* Returns the groupId of the {@link ModelZoo} to be searched.
*
* @return the groupId of the {@link ModelZoo} to be searched
*/
public String getGroupId() {
return groupId;
}
/**
* Returns the artifactId of the {@link ModelLoader} to be searched.
*
* @return the artifactIds of the {@link ModelLoader} to be searched
*/
public String getArtifactId() {
return artifactId;
}
/**
* Returns the search filters that must match the properties of the model.
*
* @return the search filters that must match the properties of the model.
*/
public Map<String, String> getFilters() {
return filters;
}
/**
* Returns the override configurations of the model loading arguments.
*
* @return the override configurations of the model loading arguments
*/
public Map<String, Object> getArguments() {
return arguments;
}
/**
* Returns the optional {@link Translator} to be used for {@link ZooModel}.
*
* @return the optional {@link Translator} to be used for {@link ZooModel}
*/
public Translator<I, O> getTranslator() {
return translator;
}
/**
* Returns the optional {@link Progress} for the model loading.
*
* @return the optional {@link Progress} for the model loading
*/
public Progress getProgress() {
return progress;
}
/**
* Creates a builder to build a {@code Criteria}.
*
* @return a new builder
*/
public static Builder<?, ?> builder() {
return new Builder<>();
}
/** A Builder to construct a {@code Criteria}. */
public static final class Builder<I, O> {
Application application;
Class<I> inputClass;
Class<O> outputClass;
String engine;
Device device;
String groupId;
String artifactId;
Map<String, String> filters;
Map<String, Object> arguments;
Translator<I, O> translator;
Progress progress;
Builder() {
engine = Engine.getInstance().getEngineName();
}
private Builder(Class<I> inputClass, Class<O> outputClass, Builder<?, ?> parent) {
this.inputClass = inputClass;
this.outputClass = outputClass;
application = parent.application;
engine = parent.engine;
device = parent.device;
groupId = parent.groupId;
filters = parent.filters;
arguments = parent.arguments;
progress = parent.progress;
}
/**
* Creates a new @{code Builder} class with the specified input and output data type.
*
* @param <P> the input data type
* @param <Q> the output data type
* @param inputClass the input class
* @param outputClass the output class
* @return a new @{code Builder} class with the specified input and output data type
*/
public <P, Q> Builder<P, Q> setTypes(Class<P> inputClass, Class<Q> outputClass) {
return new Builder<>(inputClass, outputClass, this);
}
/**
* Sets the model application for this criteria.
*
* @param application the model application
* @return this {@code Builder}
*/
public Builder<I, O> optApplication(Application application) {
this.application = application;
return this;
}
/**
* Sets the engine name for this criteria.
*
* @param engine the engine name
* @return this {@code Builder}
*/
public Builder<I, O> optEngine(String engine) {
this.engine = engine;
return this;
}
/**
* Sets the {@link Device} for this criteria.
*
* @param device the {@link Device} for the criteria
* @return this {@code Builder}
*/
public Builder<I, O> optDevice(Device device) {
this.device = device;
return this;
}
/**
* Sets optional groupId of the {@link ModelZoo} for this criteria.
*
* @param groupId the groupId of the {@link ModelZoo}
* @return this {@code Builder}
*/
public Builder<I, O> optGroupId(String groupId) {
this.groupId = groupId;
return this;
}
/**
* Sets optional artifactId of the {@link ModelLoader} for this criteria.
*
* @param artifactId the artifactId of the {@link ModelLoader}
* @return this {@code Builder}
*/
public Builder<I, O> optArtifactId(String artifactId) {
if (artifactId.contains(":")) {
String[] tokens = artifactId.split(":");
groupId = tokens[0];
this.artifactId = tokens[1];
} else {
this.artifactId = artifactId;
}
return this;
}
/**
* Sets the extra search filters for this criteria.
*
* @param filters the extra search filters
* @return this {@code Builder}
*/
public Builder<I, O> optFilters(Map<String, String> filters) {
this.filters = filters;
return this;
}
/**
* Sets an extra search filter for this criteria.
*
* @param key the search key
* @param value the search value
* @return this {@code Builder}
*/
public Builder<I, O> optFilter(String key, String value) {
if (filters == null) {
filters = new HashMap<>();
}
filters.put(key, value);
return this;
}
/**
* Sets an extra model loading argument for this criteria.
*
* @param arguments optional model loading arguments
* @return this {@code Builder}
*/
public Builder<I, O> optArguments(Map<String, Object> arguments) {
this.arguments = arguments;
return this;
}
/**
* Sets the optional model loading argument for this criteria.
*
* @param key the model loading argument key
* @param value the model loading argument value
* @return this {@code Builder}
*/
public Builder<I, O> optArgument(String key, Object value) {
if (arguments == null) {
arguments = new HashMap<>();
}
arguments.put(key, value);
return this;
}
/**
* Sets the optional {@link Translator} to override default {@code Translator}.
*
* @param translator the override {@code Translator}
* @return this {@code Builder}
*/
public Builder<I, O> optTranslator(Translator<I, O> translator) {
this.translator = translator;
return this;
}
/**
* Set the optional {@link Progress}.
*
* @param progress the {@code Progress}
* @return this {@code Builder}
*/
public Builder<I, O> optProgress(Progress progress) {
this.progress = progress;
return this;
}
/**
* Builds a {@link Criteria} instance.
*
* @return the {@link Criteria} instance
*/
public Criteria<I, O> build() {
return new Criteria<>(this);
}
}
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/zoo/LocalModelLoader.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
import ai.djl.Application;
import ai.djl.Device;
import ai.djl.MalformedModelException;
import ai.djl.Model;
import ai.djl.ndarray.NDList;
import ai.djl.repository.Artifact;
import ai.djl.repository.Repository;
import ai.djl.translate.NoopTranslator;
import ai.djl.translate.Translator;
import ai.djl.util.Progress;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/** A {@link ModelLoader} loads a particular {@link ZooModel} from a local folder. */
public class LocalModelLoader implements ModelLoader<NDList, NDList> {
private Repository repository;
/**
* Creates the model loader from the given repository.
*
* @param repository the repository to load the model from
*/
public LocalModelLoader(Repository repository) {
this.repository = repository;
}
/** {@inheritDoc} */
@Override
public String getArtifactId() {
return repository.getName();
}
/** {@inheritDoc} */
@Override
public Application getApplication() {
return null;
}
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public <S, T> ZooModel<S, T> loadModel(Criteria<S, T> criteria)
throws IOException, ModelNotFoundException, MalformedModelException {
Progress progress = criteria.getProgress();
try {
Translator<S, T> translator = criteria.getTranslator();
if (translator == null) {
translator = (Translator<S, T>) new NoopTranslator();
}
if (progress != null) {
progress.reset("Loading", 2);
progress.update(1);
}
Path dir = repository.getCacheDirectory();
Model model = Model.newInstance(criteria.getDevice());
model.load(dir);
return new ZooModel<>(model, translator);
} finally {
if (progress != null) {
progress.end();
}
}
}
/** {@inheritDoc} */
@Override
public ZooModel<NDList, NDList> loadModel(
Map<String, String> filters, Device device, Progress progress)
throws IOException, ModelNotFoundException, MalformedModelException {
Criteria<NDList, NDList> criteria =
Criteria.builder()
.setTypes(NDList.class, NDList.class)
.optFilters(filters)
.optDevice(device)
.optProgress(progress)
.build();
return loadModel(criteria);
}
/** {@inheritDoc} */
@Override
public List<Artifact> listModels() {
return Collections.emptyList();
}
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/zoo/LocalModelZoo.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
import ai.djl.engine.Engine;
import ai.djl.repository.SimpleRepository;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** A {@link ModelZoo} that contains models in local directory. */
public class LocalModelZoo implements ModelZoo {
private static final Logger logger = LoggerFactory.getLogger(LocalModelZoo.class);
public static final String GROUP_ID = "ai.djl.localmodelzoo";
private Path folder;
/**
* Creates the {@code LocalModelZoo} instance from the given directory.
*
* @param folder the directory to load models from
*/
public LocalModelZoo(Path folder) {
this.folder = folder;
}
/** {@inheritDoc} */
@Override
public List<ModelLoader<?, ?>> getModelLoaders() {
try {
List<Path> dirs =
Files.list(folder)
.filter(p -> Files.isDirectory(p))
.collect(Collectors.toList());
if (dirs.isEmpty()) {
LocalModelLoader loader = new LocalModelLoader(new SimpleRepository(folder));
return Collections.singletonList(loader);
}
List<ModelLoader<?, ?>> loaders = new ArrayList<>();
for (Path p : dirs) {
loaders.add(new LocalModelLoader(new SimpleRepository(p)));
}
return loaders;
} catch (IOException e) {
logger.error("Failed list files.", e);
}
return Collections.emptyList();
}
/** {@inheritDoc} */
@Override
public String getGroupId() {
return GROUP_ID;
}
/** {@inheritDoc} */
@Override
public Set<String> getSupportedEngines() {
return Collections.singleton(Engine.getInstance().getEngineName());
}
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/zoo/LocalZooProvider.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
/** An {@link ZooProvider} implementation can load models from local directory. */
public class LocalZooProvider implements ZooProvider {
/** {@inheritDoc} */
@Override
public ModelZoo getModelZoo() {
String localRepoPath = System.getProperty("ai.djl.repository.zoo.location");
if (localRepoPath != null) {
Path path = Paths.get(localRepoPath);
if (Files.isDirectory(path)) {
return new LocalModelZoo(path);
}
}
return null;
}
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/zoo/ModelLoader.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
import ai.djl.Application;
import ai.djl.Device;
import ai.djl.MalformedModelException;
import ai.djl.repository.Artifact;
import ai.djl.util.Progress;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
* A ModelLoader loads a particular {@link ZooModel} from a Repository for a model zoo.
*
* @param <I> the input data type
* @param <O> the output data type
*/
public interface ModelLoader<I, O> {
/**
* Returns the artifact ID of the {@code ModelLoader}.
*
* @return the artifact ID of the {@code ModelLoader}
*/
String getArtifactId();
/**
* Returns the application of the {@code ModelLoader}.
*
* @return the application of the {@code ModelLoader}
*/
Application getApplication();
/**
* Loads the model with the given criteria.
*
* @param <S> the input data type
* @param <T> the output data type
* @param criteria the criteria to match against the loaded model
* @return the loaded model
* @throws IOException for various exceptions loading data from the repository
* @throws ModelNotFoundException if no model with the specified criteria is found
* @throws MalformedModelException if the model data is malformed
*/
<S, T> ZooModel<S, T> loadModel(Criteria<S, T> criteria)
throws IOException, ModelNotFoundException, MalformedModelException;
/**
* Loads the model.
*
* @return the loaded model
* @throws IOException for various exceptions loading data from the repository
* @throws ModelNotFoundException if no model with the specified criteria is found
* @throws MalformedModelException if the model data is malformed
*/
default ZooModel<I, O> loadModel()
throws MalformedModelException, ModelNotFoundException, IOException {
return loadModel(null, null, null);
}
/**
* Loads the model.
*
* @param progress the progress tracker to update while loading the model
* @return the loaded model
* @throws IOException for various exceptions loading data from the repository
* @throws ModelNotFoundException if no model with the specified criteria is found
* @throws MalformedModelException if the model data is malformed
*/
default ZooModel<I, O> loadModel(Progress progress)
throws MalformedModelException, ModelNotFoundException, IOException {
return loadModel(null, null, progress);
}
/**
* Loads the model with the given search filters.
*
* @param filters the search filters to match against the loaded model
* @return the loaded model
* @throws IOException for various exceptions loading data from the repository
* @throws ModelNotFoundException if no model with the specified criteria is found
* @throws MalformedModelException if the model data is malformed
*/
default ZooModel<I, O> loadModel(Map<String, String> filters)
throws MalformedModelException, ModelNotFoundException, IOException {
return loadModel(filters, null, null);
}
/**
* Loads the model with the given search filters.
*
* @param filters the search filters to match against the loaded model
* @param progress the progress tracker to update while loading the model
* @return the loaded model
* @throws IOException for various exceptions loading data from the repository
* @throws ModelNotFoundException if no model with the specified criteria is found
* @throws MalformedModelException if the model data is malformed
*/
default ZooModel<I, O> loadModel(Map<String, String> filters, Progress progress)
throws MalformedModelException, ModelNotFoundException, IOException {
return loadModel(filters, null, progress);
}
/**
* Loads the model with the given search filters.
*
* @param filters the search filters to match against the loaded model
* @param device the device the loaded model should use
* @param progress the progress tracker to update while loading the model
* @return the loaded model
* @throws IOException for various exceptions loading data from the repository
* @throws ModelNotFoundException if no model with the specified criteria is found
* @throws MalformedModelException if the model data is malformed
*/
ZooModel<I, O> loadModel(Map<String, String> filters, Device device, Progress progress)
throws IOException, ModelNotFoundException, MalformedModelException;
/**
* Returns a list of the available artifacts that can be loaded.
*
* @return a list of the available artifacts that can be loaded
* @throws IOException for errors reading the artifact list
* @throws ModelNotFoundException if models with the mrl defined within this loader are found
*/
List<Artifact> listModels() throws IOException, ModelNotFoundException;
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/zoo/ModelNotFoundException.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
import ai.djl.ModelException;
/** /** Thrown when an application tries to load a model from repository search path. */
public class ModelNotFoundException extends ModelException {
private static final long serialVersionUID = 1L;
/**
* Constructs a new exception with the specified detail message. The cause is not initialized,
* and may subsequently be initialized by a call to {@link #initCause}.
*
* @param message the detail message. The detail message is saved for later retrieval by the
* {@link #getMessage()} method.
*/
public ModelNotFoundException(String message) {
super(message);
}
/**
* Constructs a new exception with the specified detail message and cause.
*
* <p>Note that the detail message associated with {@code cause} is <i>not</i> automatically
* incorporated in this exception's detail message.
*
* @param message the detail message (which is saved for later retrieval by the {@link
* #getMessage()} method).
* @param cause the cause (which is saved for later retrieval by the {@link #getCause()}
* method). (A {@code null} value is permitted, and indicates that the cause is nonexistent
* or unknown.)
*/
public ModelNotFoundException(String message, Throwable cause) {
super(message, cause);
}
/**
* Constructs a new exception with the specified cause and a detail message of {@code
* (cause==null ? null : cause.toString())} (which typically contains the class and detail
* message of {@code cause}). This constructor is useful for exceptions that are little more
* than wrappers for other throwables (for example, {@link
* java.security.PrivilegedActionException}).
*
* @param cause the cause (which is saved for later retrieval by the {@link #getCause()}
* method). (A {@code null} value is permitted, and indicates that the cause is nonexistent
* or unknown.)
*/
public ModelNotFoundException(Throwable cause) {
super(cause);
}
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/zoo/ModelZoo.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
import ai.djl.Application;
import ai.djl.MalformedModelException;
import ai.djl.repository.Artifact;
import java.io.IOException;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.ServiceLoader;
import java.util.Set;
import java.util.TreeMap;
/** An interface represents a collection of models. */
public interface ModelZoo {
/**
* Returns the global unique identifier of the {@code ModelZoo}.
*
* <p>We recommend to use reverse DNS name as your model zoo group ID to make sure it's not
* conflict with other ModelZoos.
*
* @return the global unique identifier of the {@code ModelZoo}
*/
String getGroupId();
/**
* Lists the available model families in the ModelZoo.
*
* @return the list of all available model families
*/
default List<ModelLoader<?, ?>> getModelLoaders() {
List<ModelLoader<?, ?>> list = new ArrayList<>();
try {
Field[] fields = getClass().getDeclaredFields();
for (Field field : fields) {
if (ModelLoader.class.isAssignableFrom(field.getType())) {
list.add((ModelLoader<?, ?>) field.get(null));
}
}
} catch (ReflectiveOperationException e) {
// ignore
}
return list;
}
/**
* Returns the {@link ModelLoader} based on the model name.
*
* @param name the name of the model
* @return the {@link ModelLoader} of the model
*/
default ModelLoader<?, ?> getModelLoader(String name) {
for (ModelLoader<?, ?> loader : getModelLoaders()) {
if (name.equals(loader.getArtifactId())) {
return loader;
}
}
return null;
}
/**
* Returns all supported engine names.
*
* @return all supported engine names
*/
Set<String> getSupportedEngines();
/**
* Gets the {@link ModelLoader} based on the model name.
*
* @param criteria the name of the model
* @param <I> the input data type for preprocessing
* @param <O> the output data type after postprocessing
* @return the model that matches the criteria
* @throws IOException for various exceptions loading data from the repository
* @throws ModelNotFoundException if no model with the specified criteria is found
* @throws MalformedModelException if the model data is malformed
*/
static <I, O> ZooModel<I, O> loadModel(Criteria<I, O> criteria)
throws IOException, ModelNotFoundException, MalformedModelException {
String groupId = criteria.getGroupId();
ServiceLoader<ZooProvider> providers = ServiceLoader.load(ZooProvider.class);
for (ZooProvider provider : providers) {
ModelZoo zoo = provider.getModelZoo();
if (zoo == null) {
continue;
}
if (groupId != null && !zoo.getGroupId().equals(groupId)) {
// filter out ModelZoo by groupId
continue;
}
Set<String> supportedEngine = zoo.getSupportedEngines();
if (!supportedEngine.contains(criteria.getEngine())) {
continue;
}
Application application = criteria.getApplication();
String artifactId = criteria.getArtifactId();
for (ModelLoader<?, ?> loader : zoo.getModelLoaders()) {
if (artifactId != null && !artifactId.equals(loader.getArtifactId())) {
// filter out by model loader artifactId
continue;
}
Application app = loader.getApplication();
if (application != null && app != null && !app.equals(application)) {
// filter out ModelLoader by application
continue;
}
try {
return loader.loadModel(criteria);
} catch (ModelNotFoundException e) {
// ignore
}
}
}
throw new ModelNotFoundException(
"No matching model with specified Input/Output type found.");
}
/**
* Returns the available {@link Application} and their model artifact metadata.
*
* @return the available {@link Application} and their model artifact metadata
* @throws IOException if failed to download to repository metadata
* @throws ModelNotFoundException if failed to parse repository metadata
*/
static Map<Application, List<Artifact>> listModels()
throws IOException, ModelNotFoundException {
@SuppressWarnings("PMD.UseConcurrentHashMap")
Map<Application, List<Artifact>> models =
new TreeMap<>(Comparator.comparing(Application::getPath));
ServiceLoader<ZooProvider> providers = ServiceLoader.load(ZooProvider.class);
for (ZooProvider provider : providers) {
ModelZoo zoo = provider.getModelZoo();
if (zoo == null) {
continue;
}
List<ModelLoader<?, ?>> list = zoo.getModelLoaders();
for (ModelLoader<?, ?> loader : list) {
Application app = loader.getApplication();
final List<Artifact> artifacts = loader.listModels();
models.compute(
app,
(key, val) -> {
if (val == null) {
val = new ArrayList<>();
}
val.addAll(artifacts);
return val;
});
}
}
return models;
}
}
|
0
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository
|
java-sources/ai/djl/repository/0.4.1/ai/djl/repository/zoo/ZooModel.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
import ai.djl.Model;
import ai.djl.inference.Predictor;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Block;
import ai.djl.training.Trainer;
import ai.djl.training.TrainingConfig;
import ai.djl.translate.Translator;
import ai.djl.util.PairList;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.file.Path;
import java.util.Map;
import java.util.function.Function;
/**
* A {@code ZooModel} is a {@link Model} loaded from a model zoo and includes a default {@link
* Translator}.
*
* @param <I> the model input type
* @param <O> the model output type
*/
public class ZooModel<I, O> implements Model {
private Model model;
private Translator<I, O> translator;
/**
* Constructs a {@code ZooModel} given the model and translator.
*
* @param model the model to wrap
* @param translator the translator
*/
public ZooModel(Model model, Translator<I, O> translator) {
this.model = model;
this.translator = translator;
}
/** {@inheritDoc} */
@Override
public void load(Path modelPath, String modelName, Map<String, String> options) {
throw new IllegalArgumentException("ZooModel should not be re-loaded.");
}
/** {@inheritDoc} */
@Override
public void save(Path modelPath, String modelName) throws IOException {
model.save(modelPath, modelName);
}
/** {@inheritDoc} */
@Override
public Block getBlock() {
return model.getBlock();
}
/** {@inheritDoc} */
@Override
public void setBlock(Block block) {
model.setBlock(block);
}
/** {@inheritDoc} */
@Override
public String getName() {
return model.getName();
}
/** {@inheritDoc} */
@Override
public String getProperty(String key) {
return model.getProperty(key);
}
/** {@inheritDoc} */
@Override
public void setProperty(String key, String value) {
model.setProperty(key, value);
}
/** {@inheritDoc} */
@Override
public Trainer newTrainer(TrainingConfig trainingConfig) {
return model.newTrainer(trainingConfig);
}
/**
* Creates a new Predictor based on the model with the default translator.
*
* @return an instance of {@code Predictor}
*/
public Predictor<I, O> newPredictor() {
return newPredictor(translator);
}
/** {@inheritDoc} */
@Override
public <P, Q> Predictor<P, Q> newPredictor(Translator<P, Q> translator) {
return model.newPredictor(translator);
}
/**
* Returns the default translator.
*
* @return the default translator
*/
public Translator<I, O> getTranslator() {
return translator;
}
/** {@inheritDoc} */
@Override
public PairList<String, Shape> describeInput() {
return model.describeInput();
}
/** {@inheritDoc} */
@Override
public PairList<String, Shape> describeOutput() {
return model.describeOutput();
}
/** {@inheritDoc} */
@Override
public String[] getArtifactNames() {
return model.getArtifactNames();
}
/** {@inheritDoc} */
@Override
public <T> T getArtifact(String name, Function<InputStream, T> function) throws IOException {
return model.getArtifact(name, function);
}
/** {@inheritDoc} */
@Override
public URL getArtifact(String name) throws IOException {
return model.getArtifact(name);
}
/** {@inheritDoc} */
@Override
public InputStream getArtifactAsStream(String name) throws IOException {
return model.getArtifactAsStream(name);
}
/** {@inheritDoc} */
@Override
public NDManager getNDManager() {
return model.getNDManager();
}
/** {@inheritDoc} */
@Override
public void setDataType(DataType dataType) {
model.setDataType(dataType);
}
/** {@inheritDoc} */
@Override
public DataType getDataType() {
return model.getDataType();
}
/** {@inheritDoc} */
@Override
public void cast(DataType dataType) {
model.cast(dataType);
}
/** {@inheritDoc} */
@Override
public void close() {
model.close();
}
}
|
0
|
java-sources/ai/djl/sentencepiece/sentencepiece/0.34.0/ai/djl
|
java-sources/ai/djl/sentencepiece/sentencepiece/0.34.0/ai/djl/sentencepiece/SpProcessor.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.sentencepiece;
import ai.djl.sentencepiece.jni.LibUtils;
import ai.djl.sentencepiece.jni.SentencePieceLibrary;
import ai.djl.util.Ec2Utils;
import ai.djl.util.NativeResource;
import ai.djl.util.Platform;
/** The processor holder for SentencePiece. */
public final class SpProcessor extends NativeResource<Long> {
private static RuntimeException libraryStatus;
static {
try {
LibUtils.loadLibrary();
} catch (RuntimeException e) {
libraryStatus = e;
}
}
private SpProcessor() {
super(SentencePieceLibrary.LIB.createSentencePieceProcessor());
}
static SpProcessor newInstance() {
if (libraryStatus != null) {
throw libraryStatus;
}
Ec2Utils.callHome("SentencePiece");
return new SpProcessor();
}
/**
* Returns the version of the sentencepiece.
*
* @return the version number of the sentencepiece
*/
public String getVersion() {
Platform platform = Platform.detectPlatform("sentencepiece");
return platform.getVersion();
}
void loadModel(String path) {
SentencePieceLibrary.LIB.loadModel(getHandle(), path);
}
void loadModelFromBytes(byte[] serializedProto) {
SentencePieceLibrary.LIB.loadModelFromBytes(getHandle(), serializedProto);
}
/**
* Tokenize a sentence into array of tokens.
*
* @param input sentence
* @return tokens
*/
public String[] tokenize(String input) {
return SentencePieceLibrary.LIB.tokenize(getHandle(), input);
}
/**
* Build sentence from tokens.
*
* @param tokens input
* @return recovered sentence
*/
public String buildSentence(String[] tokens) {
return SentencePieceLibrary.LIB.detokenize(getHandle(), tokens);
}
/**
* Get tokens from ID.
*
* @param id the index of token
* @return recovered token
*/
public String getToken(int id) {
return SentencePieceLibrary.LIB.idToPiece(getHandle(), id);
}
/**
* Get ID from token.
*
* @param token token that ready to map
* @return id from token
*/
public int getId(String token) {
return SentencePieceLibrary.LIB.pieceToId(getHandle(), token);
}
/**
* Encode sentence into indices.
*
* @param sentence input sentence
* @return indices
*/
public int[] encode(String sentence) {
return SentencePieceLibrary.LIB.encode(getHandle(), sentence);
}
/**
* Decode indices into sentence.
*
* @param ids the indices
* @return recovered sentence
*/
public String decode(int[] ids) {
return SentencePieceLibrary.LIB.decode(getHandle(), ids);
}
/** {@inheritDoc} */
@Override
public void close() {
Long pointer = handle.get();
if (pointer != null) {
SentencePieceLibrary.LIB.deleteSentencePieceProcessor(pointer);
}
}
}
|
0
|
java-sources/ai/djl/sentencepiece/sentencepiece/0.34.0/ai/djl
|
java-sources/ai/djl/sentencepiece/sentencepiece/0.34.0/ai/djl/sentencepiece/SpTextEmbedding.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.sentencepiece;
import ai.djl.modality.nlp.embedding.TextEmbedding;
import ai.djl.ndarray.NDArray;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* A {@link TextEmbedding} in SentencePiece that do sentence tokenization and map tokens into
* indices.
*/
public final class SpTextEmbedding implements TextEmbedding {
private SpProcessor processor;
private SpTextEmbedding(SpProcessor processor) {
this.processor = processor;
}
/**
* Get SentencePiece TextEmbeeding from {@link SpTokenizer}.
*
* @param tokenizer the {@link SpTokenizer}
* @return {@link SpTextEmbedding}
*/
public static SpTextEmbedding from(SpTokenizer tokenizer) {
return new SpTextEmbedding(tokenizer.getProcessor());
}
/** {@inheritDoc} */
@Override
public long[] preprocessTextToEmbed(List<String> text) {
if (text.size() != 1) {
throw new IllegalArgumentException(
"SentencePiece require one single sentence to be passed as text");
}
int[] indices = processor.encode(text.get(0));
return Arrays.stream(indices).asLongStream().toArray();
}
/** {@inheritDoc} */
@Override
public NDArray embedText(NDArray textIndices) {
return textIndices;
}
/** {@inheritDoc} */
@Override
public List<String> unembedText(NDArray textEmbedding) {
long[] indices = textEmbedding.toLongArray();
String result = processor.decode(Arrays.stream(indices).mapToInt(i -> (int) i).toArray());
return Collections.singletonList(result);
}
}
|
0
|
java-sources/ai/djl/sentencepiece/sentencepiece/0.34.0/ai/djl
|
java-sources/ai/djl/sentencepiece/sentencepiece/0.34.0/ai/djl/sentencepiece/SpTokenizer.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.sentencepiece;
import ai.djl.modality.nlp.preprocess.Tokenizer;
import ai.djl.util.Utils;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.List;
/**
* {@code SpTokenizer} is a SentencePiece implementation of the {@link Tokenizer} interface that
* converts sentences into token.
*/
public class SpTokenizer implements Tokenizer, AutoCloseable {
private SpProcessor processor;
/**
* Create a SentencePiece Tokenizer from existing models.
*
* @param modelPath the directory or file path of the model location
* @throws IOException when IO operation fails in loading a resource
*/
public SpTokenizer(Path modelPath) throws IOException {
this(modelPath, null);
}
/**
* Create a SentencePiece Tokenizer from existing models.
*
* @param modelPath the directory or file path of the model location
* @param prefix the model file name or path prefix
* @throws IOException when IO operation fails in loading a resource
*/
public SpTokenizer(Path modelPath, String prefix) throws IOException {
this.processor = SpProcessor.newInstance();
loadModel(modelPath, prefix);
}
/**
* Creates a SentencePiece Tokenizer from byte array.
*
* @param serializedModel the serialized model
*/
public SpTokenizer(byte[] serializedModel) {
this.processor = SpProcessor.newInstance();
processor.loadModelFromBytes(serializedModel);
}
/**
* Creates a SentencePiece Tokenizer from inputStream.
*
* @param is {@link InputStream} of the serialized model
* @throws IOException when IO operation fails in loading a resource
*/
public SpTokenizer(InputStream is) throws IOException {
this.processor = SpProcessor.newInstance();
processor.loadModelFromBytes(Utils.toByteArray(is));
}
/** {@inheritDoc} */
@Override
public List<String> tokenize(String sentence) {
return Arrays.asList(processor.tokenize(sentence));
}
/** {@inheritDoc} */
@Override
public String buildSentence(List<String> tokens) {
return processor.buildSentence(tokens.toArray(Utils.EMPTY_ARRAY));
}
/** {@inheritDoc} */
@Override
public void close() {
processor.close();
}
/**
* Get SentencePiece processor.
*
* @return {@link SpProcessor}
*/
public SpProcessor getProcessor() {
return processor;
}
private void loadModel(Path modelPath, String prefix) throws IOException {
if (Files.notExists(modelPath)) {
throw new FileNotFoundException(
"Model path doesn't exist: " + modelPath.toAbsolutePath());
}
Path modelDir = modelPath.toAbsolutePath();
if (prefix == null || prefix.isEmpty()) {
prefix = modelDir.toFile().getName();
}
Path modelFile = findModelFile(modelDir, prefix);
if (modelFile == null) {
// TODO: support proto and IOStream model
throw new FileNotFoundException("No .model found in : " + modelPath);
}
String modelFilePath = modelFile.toString();
processor.loadModel(modelFilePath);
}
private Path findModelFile(Path modelPath, String prefix) {
if (Files.isRegularFile(modelPath)) {
return modelPath;
}
Path modelFile = modelPath.resolve(prefix);
if (Files.notExists(modelFile) || !Files.isRegularFile(modelFile)) {
if (prefix.endsWith(".model")) {
return null;
}
modelFile = modelPath.resolve(prefix + ".model");
if (Files.notExists(modelFile) || !Files.isRegularFile(modelFile)) {
return null;
}
}
return modelFile;
}
}
|
0
|
java-sources/ai/djl/sentencepiece/sentencepiece/0.34.0/ai/djl
|
java-sources/ai/djl/sentencepiece/sentencepiece/0.34.0/ai/djl/sentencepiece/SpVocabulary.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.sentencepiece;
import ai.djl.modality.nlp.Vocabulary;
/** {@link SpVocabulary} is a SentencePiece implementation of {@link Vocabulary}. */
public final class SpVocabulary implements Vocabulary {
private SpProcessor processor;
// TODO: Support direct Vocabulary loading
private SpVocabulary(SpProcessor processor) {
this.processor = processor;
}
/**
* Get Vocabulary from {@link SpTokenizer}.
*
* @param tokenizer the {@link SpTokenizer}
* @return {@link SpVocabulary}
*/
public static SpVocabulary from(SpTokenizer tokenizer) {
return new SpVocabulary(tokenizer.getProcessor());
}
/** {@inheritDoc} */
@Override
public String getToken(long index) {
return processor.getToken((int) index);
}
/** {@inheritDoc} */
@Override
public boolean contains(String token) {
throw new UnsupportedOperationException("Not supported for Sentence Piece");
}
/** {@inheritDoc} */
@Override
public long getIndex(String token) {
return processor.getId(token);
}
/** {@inheritDoc} */
@Override
public long size() {
throw new UnsupportedOperationException("Not supported for Sentence Piece");
}
}
|
0
|
java-sources/ai/djl/sentencepiece/sentencepiece/0.34.0/ai/djl
|
java-sources/ai/djl/sentencepiece/sentencepiece/0.34.0/ai/djl/sentencepiece/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes to interface with the underlying SentencePiece. */
package ai.djl.sentencepiece;
|
0
|
java-sources/ai/djl/sentencepiece/sentencepiece/0.34.0/ai/djl/sentencepiece
|
java-sources/ai/djl/sentencepiece/sentencepiece/0.34.0/ai/djl/sentencepiece/jni/LibUtils.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.sentencepiece.jni;
import ai.djl.util.ClassLoaderUtils;
import ai.djl.util.Platform;
import ai.djl.util.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
/**
* Utilities for finding the SentencePiece binary on the System.
*
* <p>The binary will be searched for in a variety of locations in the following order:
*
* <ol>
* <li>In the path specified by the SENTENCEPIECE_LIBRARY_PATH environment variable
* <li>In a jar file location in the classpath. These jars can be created with the pytorch-native
* module.
* </ol>
*/
@SuppressWarnings("MissingJavadocMethod")
public final class LibUtils {
private static final Logger logger = LoggerFactory.getLogger(LibUtils.class);
private static final String LIB_NAME = "sentencepiece_native";
private LibUtils() {}
public static void loadLibrary() {
String libName = copyJniLibraryFromClasspath();
logger.debug("Loading sentencepiece library from: {}", libName);
System.load(libName); // NOPMD
}
private static String copyJniLibraryFromClasspath() {
String name = System.mapLibraryName(LIB_NAME);
Path nativeDir = Utils.getEngineCacheDir("sentencepiece");
Platform platform = Platform.detectPlatform("sentencepiece");
String classifier = platform.getClassifier();
String version = platform.getVersion();
Path path = nativeDir.resolve(version).resolve(name);
if (Files.exists(path)) {
return path.toAbsolutePath().toString();
}
Path tmp = null;
String libPath = "native/lib/" + classifier + "/" + name;
logger.info("Extracting {} to cache ...", libPath);
try (InputStream is = ClassLoaderUtils.getResourceAsStream(libPath)) {
Files.createDirectories(nativeDir.resolve(version));
tmp = Files.createTempFile(nativeDir, "jni", "tmp");
Files.copy(is, tmp, StandardCopyOption.REPLACE_EXISTING);
Utils.moveQuietly(tmp, path);
return path.toAbsolutePath().toString();
} catch (IOException e) {
throw new IllegalStateException("Cannot copy jni files", e);
} finally {
if (tmp != null) {
Utils.deleteQuietly(tmp);
}
}
}
}
|
0
|
java-sources/ai/djl/sentencepiece/sentencepiece/0.34.0/ai/djl/sentencepiece
|
java-sources/ai/djl/sentencepiece/sentencepiece/0.34.0/ai/djl/sentencepiece/jni/SentencePieceLibrary.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.sentencepiece.jni;
/** A class containing utilities to interact with the SentencePiece Engine's JNI layer. */
@SuppressWarnings("MissingJavadocMethod")
public final class SentencePieceLibrary {
public static final SentencePieceLibrary LIB = new SentencePieceLibrary();
private SentencePieceLibrary() {}
public native long createSentencePieceProcessor();
public native void loadModel(long handle, String filePath);
public native void loadModelFromBytes(long handle, byte[] bytes);
public native void deleteSentencePieceProcessor(long handle);
public native String[] tokenize(long handle, String text);
public native int[] encode(long handle, String text);
public native String detokenize(long handle, String[] tokens);
public native String decode(long handle, int[] ids);
public native String idToPiece(long handle, int id);
public native int pieceToId(long handle, String piece);
}
|
0
|
java-sources/ai/djl/sentencepiece/sentencepiece/0.34.0/ai/djl/sentencepiece
|
java-sources/ai/djl/sentencepiece/sentencepiece/0.34.0/ai/djl/sentencepiece/jni/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes to interface with the underlying SentencePiece Engine. */
package ai.djl.sentencepiece.jni;
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/Arguments.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving;
import ai.djl.serving.util.ConfigManager;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
/** A class represents parsed command line arguments. */
public final class Arguments {
private String configFile;
private String modelStore;
private String[] models;
private boolean help;
/**
* Constructs a new {@code Arguments} instance.
*
* @param cmd a parsed {@code CommandLine}
*/
public Arguments(CommandLine cmd) {
configFile = cmd.getOptionValue("config-file");
modelStore = cmd.getOptionValue("model-store");
models = cmd.getOptionValues("models");
help = cmd.hasOption("help");
}
/**
* Builds the command line options.
*
* @return the command line options
*/
public static Options getOptions() {
Options options = new Options();
options.addOption(
Option.builder("h").longOpt("help").hasArg(false).desc("Print this help.").build());
options.addOption(
Option.builder("f")
.longOpt("config-file")
.hasArg()
.argName("CONFIG-FILE")
.desc("Path to the configuration properties file.")
.build());
options.addOption(
Option.builder("m")
.longOpt("models")
.hasArgs()
.argName("MODELS")
.desc("Models to be loaded at startup.")
.build());
options.addOption(
Option.builder("s")
.longOpt("model-store")
.hasArg()
.argName("MODELS-STORE")
.desc("Model store location where models can be loaded.")
.build());
return options;
}
/**
* Returns the configuration file path.
*
* @return the configuration file path
*/
public Path getConfigFile() {
if (configFile == null) {
configFile = System.getProperty("ai.djl.conf", null);
}
if (configFile != null) {
Path file = Paths.get(configFile);
if (!Files.isRegularFile(file)) {
throw new IllegalArgumentException("Configuration file not found: " + configFile);
}
return file;
}
Path modelServerHome = Paths.get(ConfigManager.getModelServerHome());
Path file = modelServerHome.resolve("conf/config.properties");
if (Files.isRegularFile(file)) {
return file;
}
file = modelServerHome.resolve("config.properties");
if (Files.isRegularFile(file)) {
return file;
}
return null;
}
/**
* Returns the model store location.
*
* @return the model store location
*/
public String getModelStore() {
return modelStore;
}
/**
* Returns the model urls that specified in command line.
*
* @return the model urls that specified in command line
*/
public String[] getModels() {
return models;
}
/**
* Returns if the command line has help option.
*
* @return {@code true} if the command line has help option
*/
public boolean hasHelp() {
return help;
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/ModelServer.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving;
import ai.djl.repository.FilenameUtils;
import ai.djl.serving.plugins.FolderScanPluginManager;
import ai.djl.serving.util.ConfigManager;
import ai.djl.serving.util.Connector;
import ai.djl.serving.util.ServerGroups;
import ai.djl.serving.wlm.ModelInfo;
import ai.djl.serving.wlm.ModelManager;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.ServerChannel;
import io.netty.handler.ssl.SslContext;
import io.netty.util.internal.logging.InternalLoggerFactory;
import io.netty.util.internal.logging.Slf4JLoggerFactory;
import java.io.IOException;
import java.net.MalformedURLException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.GeneralSecurityException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** The main entry point for model server. */
public class ModelServer {
private static final Logger logger = LoggerFactory.getLogger(ModelServer.class);
private static final Pattern MODEL_STORE_PATTERN = Pattern.compile("(\\[(.+)]=)?(.+)");
private ServerGroups serverGroups;
private List<ChannelFuture> futures = new ArrayList<>(2);
private AtomicBoolean stopped = new AtomicBoolean(false);
private ConfigManager configManager;
private FolderScanPluginManager pluginManager;
/**
* Creates a new {@code ModelServer} instance.
*
* @param configManager the model server configuration
*/
public ModelServer(ConfigManager configManager) {
this.configManager = configManager;
this.pluginManager = new FolderScanPluginManager(configManager);
serverGroups = new ServerGroups(configManager);
}
/**
* The entry point for the model server.
*
* @param args the command line arguments
*/
public static void main(String[] args) {
Options options = Arguments.getOptions();
try {
DefaultParser parser = new DefaultParser();
CommandLine cmd = parser.parse(options, args, null, false);
Arguments arguments = new Arguments(cmd);
if (arguments.hasHelp()) {
printHelp("model-server [OPTIONS]", options);
return;
}
ConfigManager.init(arguments);
ConfigManager configManager = ConfigManager.getInstance();
InternalLoggerFactory.setDefaultFactory(Slf4JLoggerFactory.INSTANCE);
new ModelServer(configManager).startAndWait();
} catch (IllegalArgumentException e) {
logger.error("Invalid configuration: " + e.getMessage());
System.exit(1); // NOPMD
} catch (ParseException e) {
printHelp(e.getMessage(), options);
System.exit(1); // NOPMD
} catch (Throwable t) {
logger.error("Unexpected error", t);
System.exit(1); // NOPMD
}
}
/**
* Starts the model server and block until server stops.
*
* @throws InterruptedException if interrupted
* @throws IOException if failed to start socket listener
* @throws GeneralSecurityException if failed to read SSL certificate
*/
public void startAndWait() throws InterruptedException, IOException, GeneralSecurityException {
try {
List<ChannelFuture> channelFutures = start();
logger.info("Model server started.");
channelFutures.get(0).sync();
} finally {
serverGroups.shutdown(true);
logger.info("Model server stopped.");
}
}
/**
* Main Method that prepares the future for the channel and sets up the ServerBootstrap.
*
* @return a list of ChannelFuture object
* @throws InterruptedException if interrupted
* @throws IOException if failed to start socket listener
* @throws GeneralSecurityException if failed to read SSL certificate
*/
public List<ChannelFuture> start()
throws InterruptedException, IOException, GeneralSecurityException {
stopped.set(false);
logger.info(configManager.dumpConfigurations());
initModelStore();
pluginManager.loadPlugins();
Connector inferenceConnector =
configManager.getConnector(Connector.ConnectorType.INFERENCE);
Connector managementConnector =
configManager.getConnector(Connector.ConnectorType.MANAGEMENT);
inferenceConnector.clean();
managementConnector.clean();
EventLoopGroup serverGroup = serverGroups.getServerGroup();
EventLoopGroup workerGroup = serverGroups.getChildGroup();
futures.clear();
if (inferenceConnector.equals(managementConnector)) {
Connector both = configManager.getConnector(Connector.ConnectorType.BOTH);
futures.add(initializeServer(both, serverGroup, workerGroup));
} else {
futures.add(initializeServer(inferenceConnector, serverGroup, workerGroup));
futures.add(initializeServer(managementConnector, serverGroup, workerGroup));
}
return futures;
}
/**
* Return if the server is running.
*
* @return {@code true} if the server is running
*/
public boolean isRunning() {
return !stopped.get();
}
/** Stops the model server. */
public void stop() {
if (stopped.get()) {
return;
}
stopped.set(true);
for (ChannelFuture future : futures) {
future.channel().close();
}
serverGroups.shutdown(true);
serverGroups.reset();
}
private ChannelFuture initializeServer(
Connector connector, EventLoopGroup serverGroup, EventLoopGroup workerGroup)
throws InterruptedException, IOException, GeneralSecurityException {
Class<? extends ServerChannel> channelClass = connector.getServerChannel();
logger.info(
"Initialize {} server with: {}.",
connector.getType(),
channelClass.getSimpleName());
ServerBootstrap b = new ServerBootstrap();
b.option(ChannelOption.SO_BACKLOG, 1024)
.channel(channelClass)
.childOption(ChannelOption.SO_LINGER, 0)
.childOption(ChannelOption.SO_REUSEADDR, true)
.childOption(ChannelOption.SO_KEEPALIVE, true);
b.group(serverGroup, workerGroup);
SslContext sslCtx = null;
if (connector.isSsl()) {
sslCtx = configManager.getSslContext();
}
b.childHandler(new ServerInitializer(sslCtx, connector.getType(), pluginManager));
ChannelFuture future;
try {
future = b.bind(connector.getSocketAddress()).sync();
} catch (Exception e) {
// https://github.com/netty/netty/issues/2597
if (e instanceof IOException) {
throw new IOException("Failed to bind to address: " + connector, e);
}
throw e;
}
future.addListener(
(ChannelFutureListener)
f -> {
if (!f.isSuccess()) {
try {
f.get();
} catch (InterruptedException | ExecutionException e) {
logger.error("", e);
}
System.exit(2); // NOPMD
}
serverGroups.registerChannel(f.channel());
});
future.sync();
ChannelFuture f = future.channel().closeFuture();
f.addListener(
(ChannelFutureListener)
listener -> logger.info("{} model server stopped.", connector.getType()));
logger.info("{} API bind to: {}", connector.getType(), connector);
return f;
}
private void initModelStore() throws IOException {
ModelManager.init(configManager);
Set<String> startupModels = ModelManager.getInstance().getStartupModels();
String loadModels = configManager.getLoadModels();
if (loadModels == null || loadModels.isEmpty()) {
return;
}
ModelManager modelManager = ModelManager.getInstance();
List<String> urls;
if ("ALL".equalsIgnoreCase(loadModels)) {
Path modelStore = configManager.getModelStore();
if (modelStore == null) {
logger.warn("Model store is not configured.");
return;
}
if (!Files.isDirectory(modelStore)) {
logger.warn("Model store path is not found: {}", modelStore);
return;
}
// Check folders to see if they can be models as well
urls =
Files.list(modelStore)
.filter(
p -> {
logger.info("Found file in model_store: {}", p);
try {
return !Files.isHidden(p) && Files.isDirectory(p)
|| FilenameUtils.isArchiveFile(p.toString());
} catch (IOException e) {
logger.warn("Failed to access file: " + p, e);
return false;
}
})
.map(
p -> {
try {
return p.toUri().toURL().toString();
} catch (MalformedURLException e) {
throw new AssertionError("Invalid path: " + p, e);
}
})
.collect(Collectors.toList());
} else {
String[] modelsUrls = loadModels.split("[, ]+");
urls = Arrays.asList(modelsUrls);
}
for (String url : urls) {
logger.info("Initializing model: {}", url);
Matcher matcher = MODEL_STORE_PATTERN.matcher(url);
if (!matcher.matches()) {
throw new AssertionError("Invalid model store url: " + url);
}
String endpoint = matcher.group(2);
String modelUrl = matcher.group(3);
String version = null;
String engine = null;
int gpuId = -1;
String modelName;
if (endpoint != null) {
String[] tokens = endpoint.split(":", -1);
modelName = tokens[0];
if (tokens.length > 1) {
version = tokens[1].isEmpty() ? null : tokens[1];
}
if (tokens.length > 2) {
engine = tokens[2].isEmpty() ? null : tokens[2];
}
if (tokens.length > 3) {
gpuId = tokens[3].isEmpty() ? -1 : Integer.parseInt(tokens[3]);
}
} else {
modelName = ModelInfo.inferModelNameFromUrl(modelUrl);
}
int workers = configManager.getDefaultWorkers();
CompletableFuture<ModelInfo> future =
modelManager.registerModel(
modelName,
version,
modelUrl,
engine,
gpuId,
configManager.getBatchSize(),
configManager.getMaxBatchDelay(),
configManager.getMaxIdleTime());
ModelInfo modelInfo = future.join();
modelManager.triggerModelUpdated(modelInfo.scaleWorkers(workers, workers));
startupModels.add(modelName);
}
}
private static void printHelp(String msg, Options options) {
HelpFormatter formatter = new HelpFormatter();
formatter.setLeftPadding(1);
formatter.setWidth(120);
formatter.printHelp(msg, options);
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/ServerInitializer.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving;
import ai.djl.serving.http.ConfigurableHttpRequestHandler;
import ai.djl.serving.http.InferenceRequestHandler;
import ai.djl.serving.http.InvalidRequestHandler;
import ai.djl.serving.http.ManagementRequestHandler;
import ai.djl.serving.plugins.FolderScanPluginManager;
import ai.djl.serving.util.ConfigManager;
import ai.djl.serving.util.Connector;
import io.netty.channel.Channel;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelPipeline;
import io.netty.handler.codec.http.HttpObjectAggregator;
import io.netty.handler.codec.http.HttpServerCodec;
import io.netty.handler.ssl.SslContext;
import io.netty.handler.stream.ChunkedWriteHandler;
/**
* A special {@link io.netty.channel.ChannelInboundHandler} which offers an easy way to initialize a
* {@link io.netty.channel.Channel} once it was registered to its {@link
* io.netty.channel.EventLoop}.
*/
public class ServerInitializer extends ChannelInitializer<Channel> {
private Connector.ConnectorType connectorType;
private SslContext sslCtx;
private FolderScanPluginManager pluginManager;
/**
* Creates a new {@code HttpRequestHandler} instance.
*
* @param sslCtx null if SSL is not enabled
* @param connectorType type of {@link Connector}
* @param pluginManager a pluginManager instance.
*/
public ServerInitializer(
SslContext sslCtx,
Connector.ConnectorType connectorType,
FolderScanPluginManager pluginManager) {
this.sslCtx = sslCtx;
this.connectorType = connectorType;
this.pluginManager = pluginManager;
}
/** {@inheritDoc} */
@Override
public void initChannel(Channel ch) {
ChannelPipeline pipeline = ch.pipeline();
int maxRequestSize = ConfigManager.getInstance().getMaxRequestSize();
if (sslCtx != null) {
pipeline.addLast("ssl", sslCtx.newHandler(ch.alloc()));
}
pipeline.addLast("http", new HttpServerCodec());
pipeline.addLast("aggregator", new HttpObjectAggregator(maxRequestSize, true));
pipeline.addLast(new ChunkedWriteHandler());
switch (connectorType) {
case MANAGEMENT:
pipeline.addLast(new ConfigurableHttpRequestHandler(pluginManager));
pipeline.addLast("management", new ManagementRequestHandler());
break;
case INFERENCE:
pipeline.addLast("inference", new InferenceRequestHandler());
break;
case BOTH:
default:
pipeline.addLast("inference", new InferenceRequestHandler());
pipeline.addLast(new ConfigurableHttpRequestHandler(pluginManager));
pipeline.addLast("management", new ManagementRequestHandler());
break;
}
pipeline.addLast("badRequest", new InvalidRequestHandler());
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains a model server implementation. */
package ai.djl.serving;
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/http/BadRequestException.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
/** Thrown when a bad HTTP request is received. */
public class BadRequestException extends IllegalArgumentException {
static final long serialVersionUID = 1L;
/**
* Constructs an {@code BadRequestException} with the specified detail message.
*
* @param message The detail message (which is saved for later retrieval by the {@link
* #getMessage()} method)
*/
public BadRequestException(String message) {
super(message);
}
/**
* Constructs an {@code BadRequestException} with the specified detail message and a root cause.
*
* @param message The detail message (which is saved for later retrieval by the {@link
* #getMessage()} method)
* @param cause root cause
*/
public BadRequestException(String message, Throwable cause) {
super(message, cause);
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/http/ConfigurableHttpRequestHandler.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import ai.djl.ModelException;
import ai.djl.serving.plugins.FolderScanPluginManager;
import ai.djl.serving.plugins.RequestHandler;
import ai.djl.serving.util.NettyUtils;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.QueryStringDecoder;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* HttpRequestHandler that tries to process a http-request using the configured RequestHandlers.
*
* <p>RequestHandlers are configured by the PluginManager.
*
* @author erik.bamberg@web.de
*/
public class ConfigurableHttpRequestHandler extends HttpRequestHandler {
private static final Logger logger =
LoggerFactory.getLogger(ConfigurableHttpRequestHandler.class);
private FolderScanPluginManager pluginManager;
/**
* constructing a ConfigurableHttpRequestHandler.
*
* @param pluginManager a pluginManager instance used to search for available plug-ins to
* process a request.
*/
public ConfigurableHttpRequestHandler(FolderScanPluginManager pluginManager) {
this.pluginManager = pluginManager;
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override
protected void handleRequest(
ChannelHandlerContext ctx,
FullHttpRequest req,
QueryStringDecoder decoder,
String[] segments)
throws ModelException {
RequestHandler<?> requestHandler =
findRequestHandler(req)
.orElseThrow(
() -> new BadRequestException("request handler no longer valid"));
logger.debug(
"request handler {} processes request ", requestHandler.getClass().getSimpleName());
try {
Object result = requestHandler.handleRequest(ctx, req, decoder, segments);
if (result != null) {
if (result instanceof CompletableFuture) {
((CompletableFuture<Object>) result)
.handle(
(response, error) -> {
if (error != null) {
NettyUtils.sendError(ctx, error);
} else {
NettyUtils.sendJsonResponse(ctx, response);
}
return response;
});
} else {
NettyUtils.sendJsonResponse(ctx, result);
}
}
} catch (Exception ex) {
NettyUtils.sendError(ctx, ex);
}
}
/**
* findRequestHandler.
*
* @param req the full Http Request.
* @return an optional RequestHandler.
*/
@SuppressWarnings("rawtypes")
private Optional<RequestHandler> findRequestHandler(FullHttpRequest req) {
return pluginManager
.findImplementations(RequestHandler.class)
.stream()
.filter(h -> h.acceptInboundMessage(req))
.findFirst();
}
/** {@inheritDoc} */
@Override
public boolean acceptInboundMessage(Object msg) throws Exception {
if (msg instanceof FullHttpRequest) {
return findRequestHandler((FullHttpRequest) msg).isPresent();
} else {
return false;
}
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/http/DescribeModelResponse.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/** A class that holds information about model status. */
public class DescribeModelResponse {
private String modelName;
private String modelUrl;
private int minWorkers;
private int maxWorkers;
private int batchSize;
private int maxBatchDelay;
private int maxIdleTime;
private String status;
private boolean loadedAtStartup;
private List<Worker> workers;
/** Constructs a {@code DescribeModelResponse} instance. */
public DescribeModelResponse() {
workers = new ArrayList<>();
}
/**
* Returns the model name.
*
* @return the model name
*/
public String getModelName() {
return modelName;
}
/**
* Sets the model name.
*
* @param modelName the model name
*/
public void setModelName(String modelName) {
this.modelName = modelName;
}
/**
* Returns if the models was loaded at startup.
*
* @return {@code true} if the models was loaded at startup
*/
public boolean isLoadedAtStartup() {
return loadedAtStartup;
}
/**
* Sets the load at startup status.
*
* @param loadedAtStartup {@code true} if the models was loaded at startup
*/
public void setLoadedAtStartup(boolean loadedAtStartup) {
this.loadedAtStartup = loadedAtStartup;
}
/**
* Returns the model URL.
*
* @return the model URL
*/
public String getModelUrl() {
return modelUrl;
}
/**
* Sets the model URL.
*
* @param modelUrl the model URL
*/
public void setModelUrl(String modelUrl) {
this.modelUrl = modelUrl;
}
/**
* Returns the desired minimum number of workers.
*
* @return the desired minimum number of workers
*/
public int getMinWorkers() {
return minWorkers;
}
/**
* Sets the desired minimum number of workers.
*
* @param minWorkers the desired minimum number of workers
*/
public void setMinWorkers(int minWorkers) {
this.minWorkers = minWorkers;
}
/**
* Returns the desired maximum number of workers.
*
* @return the desired maximum number of workers
*/
public int getMaxWorkers() {
return maxWorkers;
}
/**
* Sets the desired maximum number of workers.
*
* @param maxWorkers the desired maximum number of workers
*/
public void setMaxWorkers(int maxWorkers) {
this.maxWorkers = maxWorkers;
}
/**
* Returns the batch size.
*
* @return the batch size
*/
public int getBatchSize() {
return batchSize;
}
/**
* Sets the batch size.
*
* @param batchSize the batch size
*/
public void setBatchSize(int batchSize) {
this.batchSize = batchSize;
}
/**
* Returns the maximum delay in milliseconds to aggregate a batch.
*
* @return the maximum delay in milliseconds to aggregate a batch
*/
public int getMaxBatchDelay() {
return maxBatchDelay;
}
/**
* Sets the maximum delay in milliseconds to aggregate a batch.
*
* @param maxBatchDelay the maximum delay in milliseconds to aggregate a batch
*/
public void setMaxBatchDelay(int maxBatchDelay) {
this.maxBatchDelay = maxBatchDelay;
}
/**
* Returns the model's status.
*
* @return the model's status
*/
public String getStatus() {
return status;
}
/**
* Sets the model's status.
*
* @param status the model's status
*/
public void setStatus(String status) {
this.status = status;
}
/**
* Sets the max idle time for worker threads.
*
* @param maxIdleTime the time a worker thread can be idle before scaling down.
*/
public void setMaxIdleTime(int maxIdleTime) {
this.maxIdleTime = maxIdleTime;
}
/**
* Returns the maximum idle time for worker threads.
*
* @return the maxIdleTime
*/
public int getMaxIdleTime() {
return maxIdleTime;
}
/**
* Returns all workers information of the model.
*
* @return all workers information of the model
*/
public List<Worker> getWorkers() {
return workers;
}
/**
* Adds worker to the worker list.
*
* @param id the worker's ID
* @param startTime the worker's start time
* @param isRunning {@code true} if worker is running
* @param gpuId the GPU id assigned to the worker, -1 for CPU
*/
public void addWorker(int id, long startTime, boolean isRunning, int gpuId) {
Worker worker = new Worker();
worker.setId(id);
worker.setStartTime(new Date(startTime));
worker.setStatus(isRunning ? "READY" : "UNLOADING");
worker.setGpu(gpuId >= 0);
workers.add(worker);
}
/** A class that holds workers information. */
public static final class Worker {
private int id;
private Date startTime;
private String status;
private boolean gpu;
/**
* Returns the worker's ID.
*
* @return the worker's ID
*/
public int getId() {
return id;
}
/**
* Sets the worker's ID.
*
* @param id the workers ID
*/
public void setId(int id) {
this.id = id;
}
/**
* Returns the worker's start time.
*
* @return the worker's start time
*/
public Date getStartTime() {
return startTime;
}
/**
* Sets the worker's start time.
*
* @param startTime the worker's start time
*/
public void setStartTime(Date startTime) {
this.startTime = startTime;
}
/**
* Returns the worker's status.
*
* @return the worker's status
*/
public String getStatus() {
return status;
}
/**
* Sets the worker's status.
*
* @param status the worker's status
*/
public void setStatus(String status) {
this.status = status;
}
/**
* Return if the worker using GPU.
*
* @return {@code true} if the worker using GPU
*/
public boolean isGpu() {
return gpu;
}
/**
* Sets if the worker using GPU.
*
* @param gpu if the worker using GPU
*/
public void setGpu(boolean gpu) {
this.gpu = gpu;
}
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/http/ErrorResponse.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
/** A class that holds error message. */
public class ErrorResponse {
private int code;
private String type;
private String message;
/**
* Constructs a {@code ErrorResponse} instance with code, type and message.
*
* @param code the error code
* @param type the error type
* @param message the error message
*/
public ErrorResponse(int code, String type, String message) {
this.code = code;
this.type = type;
this.message = message;
}
/**
* Returns the error code.
*
* @return the error code
*/
public int getCode() {
return code;
}
/**
* Returns the error type.
*
* @return the error type
*/
public String getType() {
return type;
}
/**
* Returns the error message.
*
* @return the error message
*/
public String getMessage() {
return message;
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/http/HttpRequestHandler.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import ai.djl.ModelException;
import ai.djl.serving.util.NettyUtils;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.HttpMethod;
import io.netty.handler.codec.http.QueryStringDecoder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** A class handling inbound HTTP requests. */
public abstract class HttpRequestHandler extends SimpleChannelInboundHandler<FullHttpRequest> {
private static final Logger logger = LoggerFactory.getLogger(HttpRequestHandler.class);
/** {@inheritDoc} */
@Override
protected void channelRead0(ChannelHandlerContext ctx, FullHttpRequest req) {
try {
NettyUtils.requestReceived(ctx.channel(), req);
if (!req.decoderResult().isSuccess()) {
throw new BadRequestException("Invalid HTTP message.");
}
QueryStringDecoder decoder = new QueryStringDecoder(req.uri());
String path = decoder.path();
if ("/".equals(path) && HttpMethod.OPTIONS.equals(req.method())) {
handleApiDescription(ctx);
return;
}
String[] segments = path.split("/");
handleRequest(ctx, req, decoder, segments);
} catch (Throwable t) {
NettyUtils.sendError(ctx, t);
}
}
/** {@inheritDoc} */
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
logger.error("", cause);
ctx.close();
}
protected abstract void handleRequest(
ChannelHandlerContext ctx,
FullHttpRequest req,
QueryStringDecoder decoder,
String[] segments)
throws ModelException;
private void handleApiDescription(ChannelHandlerContext ctx) {
NettyUtils.sendJsonResponse(ctx, "{}");
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/http/InferenceRequestHandler.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import ai.djl.ModelException;
import ai.djl.modality.Input;
import ai.djl.repository.zoo.ModelNotFoundException;
import ai.djl.serving.util.ConfigManager;
import ai.djl.serving.util.NettyUtils;
import ai.djl.serving.wlm.Job;
import ai.djl.serving.wlm.ModelInfo;
import ai.djl.serving.wlm.ModelManager;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.HttpMethod;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.codec.http.QueryStringDecoder;
import java.nio.charset.StandardCharsets;
import java.util.Set;
import java.util.regex.Pattern;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** A class handling inbound HTTP requests for the management API. */
public class InferenceRequestHandler extends HttpRequestHandler {
private static final Logger logger = LoggerFactory.getLogger(InferenceRequestHandler.class);
private RequestParser requestParser;
private static final Pattern PATTERN =
Pattern.compile("^/(ping|invocations|predictions)([/?].*)?");
/** default constructor. */
public InferenceRequestHandler() {
this.requestParser = new RequestParser();
}
/** {@inheritDoc} */
@Override
public boolean acceptInboundMessage(Object msg) throws Exception {
if (super.acceptInboundMessage(msg)) {
FullHttpRequest req = (FullHttpRequest) msg;
return PATTERN.matcher(req.uri()).matches();
}
return false;
}
/** {@inheritDoc} */
@Override
protected void handleRequest(
ChannelHandlerContext ctx,
FullHttpRequest req,
QueryStringDecoder decoder,
String[] segments)
throws ModelException {
switch (segments[1]) {
case "ping":
// TODO: Check if its OK to send other 2xx errors to ALB for "Partial Healthy"
// and "Unhealthy"
ModelManager.getInstance()
.workerStatus()
.thenAccept(
response ->
NettyUtils.sendJsonResponse(
ctx,
new StatusResponse(response),
HttpResponseStatus.OK));
break;
case "invocations":
handleInvocations(ctx, req, decoder);
break;
case "predictions":
handlePredictions(ctx, req, decoder, segments);
break;
default:
throw new AssertionError("Invalid request uri: " + req.uri());
}
}
private void handlePredictions(
ChannelHandlerContext ctx,
FullHttpRequest req,
QueryStringDecoder decoder,
String[] segments)
throws ModelNotFoundException {
if (segments.length < 3) {
throw new ResourceNotFoundException();
}
String modelName = segments[2];
String version;
if (segments.length > 3) {
version = segments[3].isEmpty() ? null : segments[3];
} else {
version = null;
}
Input input = requestParser.parseRequest(ctx, req, decoder);
predict(ctx, req, input, modelName, version);
}
private void handleInvocations(
ChannelHandlerContext ctx, FullHttpRequest req, QueryStringDecoder decoder)
throws ModelNotFoundException {
Input input = requestParser.parseRequest(ctx, req, decoder);
String modelName = NettyUtils.getParameter(decoder, "model_name", null);
String version = NettyUtils.getParameter(decoder, "model_version", null);
if ((modelName == null || modelName.isEmpty())) {
modelName = input.getProperty("model_name", null);
if (modelName == null) {
byte[] buf = input.getContent().get("model_name");
if (buf != null) {
modelName = new String(buf, StandardCharsets.UTF_8);
}
}
}
if (modelName == null) {
Set<String> startModels = ModelManager.getInstance().getStartupModels();
if (startModels.size() == 1) {
modelName = startModels.iterator().next();
}
if (modelName == null) {
throw new BadRequestException("Parameter model_name is required.");
}
}
if (version == null) {
version = input.getProperty("model_version", null);
}
predict(ctx, req, input, modelName, version);
}
private void predict(
ChannelHandlerContext ctx,
FullHttpRequest req,
Input input,
String modelName,
String version)
throws ModelNotFoundException {
ModelManager modelManager = ModelManager.getInstance();
ModelInfo model = modelManager.getModel(modelName, version, true);
if (model == null) {
String regex = ConfigManager.getInstance().getModelUrlPattern();
if (regex == null) {
throw new ModelNotFoundException("Model not found: " + modelName);
}
String modelUrl = input.getProperty("model_url", null);
if (modelUrl == null) {
byte[] buf = input.getContent().get("model_url");
if (buf == null) {
throw new ModelNotFoundException("Parameter model_url is required.");
}
modelUrl = new String(buf, StandardCharsets.UTF_8);
if (!modelUrl.matches(regex)) {
throw new ModelNotFoundException("Permission denied: " + modelUrl);
}
}
String engineName = input.getProperty("engine_name", null);
int gpuId = Integer.parseInt(input.getProperty("gpu_id", "-1"));
logger.info("Loading model {} from: {}", modelName, modelUrl);
modelManager
.registerModel(
modelName,
version,
modelUrl,
engineName,
gpuId,
ConfigManager.getInstance().getBatchSize(),
ConfigManager.getInstance().getMaxBatchDelay(),
ConfigManager.getInstance().getMaxIdleTime())
.thenApply(m -> modelManager.triggerModelUpdated(m.scaleWorkers(1, 1)))
.thenAccept(
m -> {
try {
if (!modelManager.addJob(new Job(ctx, m, input))) {
throw new ServiceUnavailableException(
"No worker is available to serve request: "
+ modelName);
}
} catch (ModelNotFoundException e) {
logger.warn("Unexpected error", e);
NettyUtils.sendError(ctx, e);
}
})
.exceptionally(
t -> {
logger.warn("Unexpected error", t);
NettyUtils.sendError(ctx, t);
return null;
});
return;
}
if (HttpMethod.OPTIONS.equals(req.method())) {
NettyUtils.sendJsonResponse(ctx, "{}");
return;
}
Job job = new Job(ctx, model, input);
if (!modelManager.addJob(job)) {
logger.error("unable to process prediction. no free worker available.");
throw new ServiceUnavailableException(
"No worker is available to serve request: " + modelName);
}
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/http/InternalServerException.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
/** Thrown when an internal server failure occurs. */
public class InternalServerException extends RuntimeException {
static final long serialVersionUID = 1L;
/**
* Constructs an {@code InternalServerException} with the specified detail message.
*
* @param message The detail message (which is saved for later retrieval by the {@link
* #getMessage()} method)
*/
public InternalServerException(String message) {
super(message);
}
/**
* Constructs an {@code BadRequestException} with the specified detail message and cause.
*
* <p>Note that the detail message associated with {@code cause} is <i>not</i> automatically
* incorporated into this exception's detail message.
*
* @param message The detail message (which is saved for later retrieval by the {@link
* #getMessage()} method)
* @param cause The cause (which is saved for later retrieval by the {@link #getCause()}
* method). (A null value is permitted, and indicates that the cause is nonexistent or
* unknown.)
*/
public InternalServerException(String message, Throwable cause) {
super(message, cause);
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/http/InvalidRequestHandler.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.QueryStringDecoder;
/** A class handling unhandled inbound HTTP requests. */
public class InvalidRequestHandler extends HttpRequestHandler {
/** {@inheritDoc} */
@Override
protected void handleRequest(
ChannelHandlerContext ctx,
FullHttpRequest req,
QueryStringDecoder decoder,
String[] segments) {
throw new ResourceNotFoundException();
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/http/ListModelsResponse.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import java.util.ArrayList;
import java.util.List;
/** A class that holds information about the current registered models. */
public class ListModelsResponse {
private String nextPageToken;
private List<ModelItem> models;
/** Constructs a new {@code ListModelsResponse} instance. */
public ListModelsResponse() {
models = new ArrayList<>();
}
/**
* Returns the next page token.
*
* @return the next page token
*/
public String getNextPageToken() {
return nextPageToken;
}
/**
* Sets the next page token.
*
* @param nextPageToken the next page token
*/
public void setNextPageToken(String nextPageToken) {
this.nextPageToken = nextPageToken;
}
/**
* Returns a list of models.
*
* @return a list of models
*/
public List<ModelItem> getModels() {
return models;
}
/**
* Adds the model tp the list.
*
* @param modelName the model name
* @param modelUrl the model url
*/
public void addModel(String modelName, String modelUrl) {
models.add(new ModelItem(modelName, modelUrl));
}
/** A class that holds model name and url. */
public static final class ModelItem {
private String modelName;
private String modelUrl;
/** Constructs a new {@code ModelItem} instance. */
public ModelItem() {}
/**
* Constructs a new {@code ModelItem} instance with model name and url.
*
* @param modelName the model name
* @param modelUrl the model url
*/
public ModelItem(String modelName, String modelUrl) {
this.modelName = modelName;
this.modelUrl = modelUrl;
}
/**
* Returns the model name.
*
* @return the model name
*/
public String getModelName() {
return modelName;
}
/**
* Returns the model url.
*
* @return the model url
*/
public String getModelUrl() {
return modelUrl;
}
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/http/ManagementRequestHandler.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import ai.djl.ModelException;
import ai.djl.repository.zoo.ModelNotFoundException;
import ai.djl.serving.util.NettyUtils;
import ai.djl.serving.wlm.Endpoint;
import ai.djl.serving.wlm.ModelInfo;
import ai.djl.serving.wlm.ModelManager;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.HttpMethod;
import io.netty.handler.codec.http.QueryStringDecoder;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.regex.Pattern;
/**
* A class handling inbound HTTP requests to the management API.
*
* <p>This class
*/
public class ManagementRequestHandler extends HttpRequestHandler {
/** HTTP Parameter "synchronous". */
private static final String SYNCHRONOUS_PARAMETER = "synchronous";
/** HTTP Parameter "initial_workers". */
private static final String INITIAL_WORKERS_PARAMETER = "initial_workers";
/** HTTP Parameter "url". */
private static final String URL_PARAMETER = "url";
/** HTTP Parameter "batch_size". */
private static final String BATCH_SIZE_PARAMETER = "batch_size";
/** HTTP Parameter "model_name". */
private static final String MODEL_NAME_PARAMETER = "model_name";
/** HTTP Parameter "model_version". */
private static final String MODEL_VERSION_PARAMETER = "model_version";
/** HTTP Parameter "engine_name". */
private static final String ENGINE_NAME_PARAMETER = "engine_name";
/** HTTP Parameter "gpu_id". */
private static final String GPU_ID_PARAMETER = "gpu_id";
/** HTTP Parameter "max_batch_delay". */
private static final String MAX_BATCH_DELAY_PARAMETER = "max_batch_delay";
/** HTTP Parameter "max_idle_time". */
private static final String MAX_IDLE_TIME__PARAMETER = "max_idle_time";
/** HTTP Parameter "max_worker". */
private static final String MAX_WORKER_PARAMETER = "max_worker";
/** HTTP Parameter "min_worker". */
private static final String MIN_WORKER_PARAMETER = "min_worker";
private static final Pattern PATTERN = Pattern.compile("^/models([/?].*)?");
/** {@inheritDoc} */
@Override
public boolean acceptInboundMessage(Object msg) throws Exception {
if (super.acceptInboundMessage(msg)) {
FullHttpRequest req = (FullHttpRequest) msg;
return PATTERN.matcher(req.uri()).matches();
}
return false;
}
/** {@inheritDoc} */
@Override
protected void handleRequest(
ChannelHandlerContext ctx,
FullHttpRequest req,
QueryStringDecoder decoder,
String[] segments)
throws ModelException {
HttpMethod method = req.method();
if (segments.length < 3) {
if (HttpMethod.GET.equals(method)) {
handleListModels(ctx, decoder);
return;
} else if (HttpMethod.POST.equals(method)) {
handleRegisterModel(ctx, decoder);
return;
}
throw new MethodNotAllowedException();
}
String modelName = segments[2];
String version = null;
if (segments.length > 3) {
version = segments[3];
}
if (HttpMethod.GET.equals(method)) {
handleDescribeModel(ctx, modelName, version);
} else if (HttpMethod.PUT.equals(method)) {
handleScaleModel(ctx, decoder, modelName, version);
} else if (HttpMethod.DELETE.equals(method)) {
handleUnregisterModel(ctx, modelName, version);
} else {
throw new MethodNotAllowedException();
}
}
private void handleListModels(ChannelHandlerContext ctx, QueryStringDecoder decoder) {
int limit = NettyUtils.getIntParameter(decoder, "limit", 100);
int pageToken = NettyUtils.getIntParameter(decoder, "next_page_token", 0);
if (limit > 100 || limit < 0) {
limit = 100;
}
if (pageToken < 0) {
pageToken = 0;
}
ModelManager modelManager = ModelManager.getInstance();
Map<String, Endpoint> endpoints = modelManager.getEndpoints();
List<String> keys = new ArrayList<>(endpoints.keySet());
Collections.sort(keys);
ListModelsResponse list = new ListModelsResponse();
int last = pageToken + limit;
if (last > keys.size()) {
last = keys.size();
} else {
list.setNextPageToken(String.valueOf(last));
}
for (int i = pageToken; i < last; ++i) {
String modelName = keys.get(i);
for (ModelInfo m : endpoints.get(modelName).getModels()) {
list.addModel(modelName, m.getModelUrl());
}
}
NettyUtils.sendJsonResponse(ctx, list);
}
private void handleDescribeModel(ChannelHandlerContext ctx, String modelName, String version)
throws ModelNotFoundException {
ModelManager modelManager = ModelManager.getInstance();
DescribeModelResponse resp = modelManager.describeModel(modelName, version);
NettyUtils.sendJsonResponse(ctx, resp);
}
private void handleRegisterModel(final ChannelHandlerContext ctx, QueryStringDecoder decoder) {
String modelUrl = NettyUtils.getParameter(decoder, URL_PARAMETER, null);
if (modelUrl == null) {
throw new BadRequestException("Parameter url is required.");
}
String modelName = NettyUtils.getParameter(decoder, MODEL_NAME_PARAMETER, null);
if (modelName == null || modelName.isEmpty()) {
modelName = ModelInfo.inferModelNameFromUrl(modelUrl);
}
String version = NettyUtils.getParameter(decoder, MODEL_VERSION_PARAMETER, null);
int gpuId = NettyUtils.getIntParameter(decoder, GPU_ID_PARAMETER, -1);
String engineName = NettyUtils.getParameter(decoder, ENGINE_NAME_PARAMETER, null);
int batchSize = NettyUtils.getIntParameter(decoder, BATCH_SIZE_PARAMETER, 1);
int maxBatchDelay = NettyUtils.getIntParameter(decoder, MAX_BATCH_DELAY_PARAMETER, 100);
int maxIdleTime = NettyUtils.getIntParameter(decoder, MAX_IDLE_TIME__PARAMETER, 60);
final int initialWorkers =
NettyUtils.getIntParameter(decoder, INITIAL_WORKERS_PARAMETER, 1);
boolean synchronous =
Boolean.parseBoolean(
NettyUtils.getParameter(decoder, SYNCHRONOUS_PARAMETER, "true"));
final ModelManager modelManager = ModelManager.getInstance();
CompletableFuture<ModelInfo> future =
modelManager.registerModel(
modelName,
version,
modelUrl,
engineName,
gpuId,
batchSize,
maxBatchDelay,
maxIdleTime);
CompletableFuture<Void> f =
future.thenAccept(
m ->
modelManager.triggerModelUpdated(
m.scaleWorkers(initialWorkers, initialWorkers)
.configurePool(maxIdleTime, maxBatchDelay)
.configureModelBatch(batchSize)));
if (synchronous) {
final String msg = "Model \"" + modelName + "\" registered.";
f = f.thenAccept(m -> NettyUtils.sendJsonResponse(ctx, new StatusResponse(msg)));
} else {
String msg = "Model \"" + modelName + "\" registration scheduled.";
NettyUtils.sendJsonResponse(ctx, new StatusResponse(msg));
}
f.exceptionally(
t -> {
NettyUtils.sendError(ctx, t.getCause());
return null;
});
}
private void handleUnregisterModel(ChannelHandlerContext ctx, String modelName, String version)
throws ModelNotFoundException {
ModelManager modelManager = ModelManager.getInstance();
if (!modelManager.unregisterModel(modelName, version)) {
throw new ModelNotFoundException("Model not found: " + modelName);
}
String msg = "Model \"" + modelName + "\" unregistered";
NettyUtils.sendJsonResponse(ctx, new StatusResponse(msg));
}
private void handleScaleModel(
ChannelHandlerContext ctx, QueryStringDecoder decoder, String modelName, String version)
throws ModelNotFoundException {
try {
ModelManager modelManager = ModelManager.getInstance();
ModelInfo modelInfo = modelManager.getModel(modelName, version, false);
if (modelInfo == null) {
throw new ModelNotFoundException("Model not found: " + modelName);
}
int minWorkers =
NettyUtils.getIntParameter(
decoder, MIN_WORKER_PARAMETER, modelInfo.getMinWorkers());
int maxWorkers =
NettyUtils.getIntParameter(
decoder, MAX_WORKER_PARAMETER, modelInfo.getMaxWorkers());
if (maxWorkers < minWorkers) {
throw new BadRequestException("max_worker cannot be less than min_worker.");
}
int maxIdleTime =
NettyUtils.getIntParameter(
decoder, MAX_IDLE_TIME__PARAMETER, modelInfo.getMaxIdleTime());
int maxBatchDelay =
NettyUtils.getIntParameter(
decoder, MAX_BATCH_DELAY_PARAMETER, modelInfo.getMaxBatchDelay());
modelInfo =
modelInfo
.scaleWorkers(minWorkers, maxWorkers)
.configurePool(maxIdleTime, maxBatchDelay);
modelManager.triggerModelUpdated(modelInfo);
String msg =
"Model \""
+ modelName
+ "\" worker scaled. New Worker configuration min workers:"
+ minWorkers
+ " max workers:"
+ maxWorkers;
NettyUtils.sendJsonResponse(ctx, new StatusResponse(msg));
} catch (NumberFormatException ex) {
throw new BadRequestException("parameter is invalid number." + ex.getMessage(), ex);
}
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/http/MethodNotAllowedException.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
/** Thrown when a HTTP request which method is not allowed. */
public class MethodNotAllowedException extends RuntimeException {
static final long serialVersionUID = 1L;
/**
* Constructs an {@code MethodNotAllowedException} with {@code null} as its error detail
* message.
*/
public MethodNotAllowedException() {
super("Requested method is not allowed, please refer to API document.");
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/http/RequestParser.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import ai.djl.modality.Input;
import ai.djl.serving.util.NettyUtils;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.HttpHeaderValues;
import io.netty.handler.codec.http.HttpUtil;
import io.netty.handler.codec.http.QueryStringDecoder;
import io.netty.handler.codec.http.multipart.DefaultHttpDataFactory;
import io.netty.handler.codec.http.multipart.HttpDataFactory;
import io.netty.handler.codec.http.multipart.HttpPostRequestDecoder;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* a parser for inbound request.
*
* @author erik.bamberg@web.de
*/
public class RequestParser {
private static final Logger logger = LoggerFactory.getLogger(RequestParser.class);
/**
* parsing a request.
*
* @param ctx the context.
* @param req the full request.
* @param decoder a decoder to decode the query string.
* @return parsed input object.
*/
public Input parseRequest(
ChannelHandlerContext ctx, FullHttpRequest req, QueryStringDecoder decoder) {
String requestId = NettyUtils.getRequestId(ctx.channel());
Input input = new Input(requestId);
if (decoder != null) {
for (Map.Entry<String, List<String>> entry : decoder.parameters().entrySet()) {
String key = entry.getKey();
for (String value : entry.getValue()) {
input.addData(key, value.getBytes(StandardCharsets.UTF_8));
}
}
}
CharSequence contentType = HttpUtil.getMimeType(req);
for (Map.Entry<String, String> entry : req.headers().entries()) {
input.addProperty(entry.getKey(), entry.getValue());
}
if (HttpPostRequestDecoder.isMultipart(req)
|| HttpHeaderValues.APPLICATION_X_WWW_FORM_URLENCODED.contentEqualsIgnoreCase(
contentType)) {
HttpDataFactory factory = new DefaultHttpDataFactory(6553500);
HttpPostRequestDecoder form = new HttpPostRequestDecoder(factory, req);
try {
while (form.hasNext()) {
NettyUtils.addFormData(form.next(), input);
}
} catch (HttpPostRequestDecoder.EndOfDataDecoderException ignore) {
logger.trace("End of multipart items.");
} finally {
form.cleanFiles();
form.destroy();
}
} else {
byte[] content = NettyUtils.getBytes(req.content());
input.addData("body", content);
}
return input;
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/http/ResourceNotFoundException.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
/** Thrown when a HTTP request what requested resource is not found. */
public class ResourceNotFoundException extends RuntimeException {
static final long serialVersionUID = 1L;
/**
* Constructs an {@code ResourceNotFoundException} with {@code null} as its error detail
* message.
*/
public ResourceNotFoundException() {
super("Requested resource is not found, please refer to API document.");
}
/**
* Constructs an {@code ResourceNotFoundException} with a root cause.
*
* @param cause the root cause
*/
public ResourceNotFoundException(Throwable cause) {
super("Requested resource is not found, please refer to API document.", cause);
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/http/ServiceUnavailableException.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
/** Thrown when the server is unable to serve the HTTP request. */
public class ServiceUnavailableException extends RuntimeException {
static final long serialVersionUID = 1L;
/**
* Constructs an {@code ServiceUnavailableException} with the specified detail message.
*
* @param message The detail message (which is saved for later retrieval by the {@link
* #getMessage()} method)
*/
public ServiceUnavailableException(String message) {
super(message);
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/http/Session.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import io.netty.handler.codec.http.HttpRequest;
import java.util.UUID;
/** A class that holds HTTP session information. */
public class Session {
private String requestId;
private String remoteIp;
private String method;
private String uri;
private String protocol;
private int code;
private long startTime;
/**
* Constructs a new {@code Session} instance.
*
* @param remoteIp the remote IP address
* @param request the HTTP request
*/
public Session(String remoteIp, HttpRequest request) {
this.remoteIp = remoteIp;
this.uri = request.uri();
if (request.decoderResult().isSuccess()) {
method = request.method().name();
protocol = request.protocolVersion().text();
} else {
method = "GET";
protocol = "HTTP/1.1";
}
requestId = UUID.randomUUID().toString();
startTime = System.currentTimeMillis();
}
/**
* Returns the request ID.
*
* @return the request ID
*/
public String getRequestId() {
return requestId;
}
/**
* Sets the HTTP response code.
*
* @param code the HTTP response code
*/
public void setCode(int code) {
this.code = code;
}
/** {@inheritDoc} */
@Override
public String toString() {
long duration = System.currentTimeMillis() - startTime;
return remoteIp + " \"" + method + " " + uri + ' ' + protocol + "\" " + code + ' '
+ duration;
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/http/StatusResponse.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
/** A class that holds model server status. */
public class StatusResponse {
private String status;
/** Constructs a new {@code StatusResponse} instance. */
public StatusResponse() {}
/**
* Constructs a new {@code StatusResponse} instance with status line.
*
* @param status the status line
*/
public StatusResponse(String status) {
this.status = status;
}
/**
* Returns the status.
*
* @return the status
*/
public String getStatus() {
return status;
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/http/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes that handles REST API calls. */
package ai.djl.serving.http;
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/plugins/FolderScanPluginManager.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.plugins;
import ai.djl.serving.util.ConfigManager;
import java.beans.BeanInfo;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.io.IOException;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.time.LocalDateTime;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.ServiceLoader;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The {@link PluginManager} is responsible to load and manage plugins from the file system.
*
* <p>The Plugin Folder configuration is received from the {@link ConfigManager} and usually
* defaults to {workpath}/plugins. The plugins uses Java's SPI and have to implement interfaces from
* serving-api.
*
* @author erik.bamberg@web.de
*/
public class FolderScanPluginManager implements PluginManager {
private static final Logger logger = LoggerFactory.getLogger(FolderScanPluginManager.class);
private static Class<?>[] pluginInterfaces = {RequestHandler.class};
private ConfigManager configManager;
private Map<Class<?>, Set<Plugin<?>>> pluginsRegistry;
/**
* Constructs a {@code PluginManager} instance.
*
* @param configManager a instance of the configManager to lookup configuration like
* plugin-folder.
*/
public FolderScanPluginManager(ConfigManager configManager) {
this.configManager = configManager;
this.pluginsRegistry = new HashMap<>();
}
/**
* Loads all plugins from the plugin folder and register them.
*
* @throws IOException when error during IO operation occurs.
*/
@SuppressWarnings("rawtypes")
public void loadPlugins() throws IOException {
logger.info("scanning for plugins...");
URL[] pluginUrls = listPluginJars();
ClassLoader ucl =
AccessController.doPrivileged(
(PrivilegedAction<ClassLoader>) () -> new URLClassLoader(pluginUrls));
AtomicInteger pluginsFound = new AtomicInteger(0);
Arrays.stream(pluginInterfaces)
.forEach(
pluginInterface -> {
logger.trace("looking for plugin of type {}", pluginInterface);
ServiceLoader<?> sl = ServiceLoader.load(pluginInterface, ucl);
for (Object service : sl) {
pluginsFound.incrementAndGet();
logger.info("load plugin: {}", service.getClass().getSimpleName());
Plugin<?> plugin = new Plugin<>(service);
// TODO add a plugin Lifecycle "INITIALIZING", "ACTIVE", "SHUTTING
// DOWN" , so a plug-in
// can be dependent on another plugin.
if (initializePlugin(plugin)) {
pluginsRegistry
.computeIfAbsent(pluginInterface, k -> new HashSet<>())
.add(plugin);
}
}
});
logger.info("{} plug-ins found.", pluginsFound.intValue());
}
/**
* Initializes a plugin by calling known setters to inject managers and other dependant plugins
* into the plugins.
*
* @param plugin the plugin to get initialized
* @return true if plugin could get initialized successfully false otherwise
*/
private boolean initializePlugin(Plugin<?> plugin) {
Object component = plugin.getComponent();
try {
BeanInfo beanInfo = Introspector.getBeanInfo(component.getClass());
for (PropertyDescriptor property : beanInfo.getPropertyDescriptors()) {
// TODO introduce kind of ServiceRegistry and inject all known Managers and others
// plug-ins
if ("pluginManager".equals(property.getName())) {
Method method = property.getWriteMethod();
if (method != null) {
method.invoke(component, this);
} else {
logger.warn(
"no accessible setter for pluginManager found in plugin {}. skipping injecting",
plugin.getName());
}
}
}
} catch (IntrospectionException
| ReflectiveOperationException
| IllegalArgumentException e) {
logger.error(
"plugin {} could not get loaded. Initialization failed", plugin.getName(), e);
return false;
}
return true;
}
/**
* returns a set of plug-in components implementing the specific service interface.
*
* <p>only active plug-ins are returned which are fully initialised at this point.
*
* <p>{@code Set<RequestHandler>
* allActiveRequestHandler=findImplementations(RequestHandler.class)}
*
* @param <T> generic type of service interface
* @param pluginInterface the specific service interface
* @return a set of all plugin components implementing this service interface
*/
@Override
public <T> Set<T> findImplementations(Class<T> pluginInterface) {
return Collections.unmodifiableSet(
pluginsRegistry
.getOrDefault(pluginInterface, new HashSet<>())
.stream()
.map(Plugin::getComponent)
.map(pluginInterface::cast)
.collect(Collectors.toSet()));
}
private URL[] listPluginJars() throws IOException {
Path pluginsFolder = configManager.getPluginFolder();
if (pluginsFolder == null || !Files.isDirectory(pluginsFolder)) {
logger.warn("scanning in plug-in folder :{}....folder does not exists", pluginsFolder);
return new URL[0];
}
logger.debug("scanning in plug-in folder :{}", pluginsFolder);
try (Stream<Path> stream = Files.walk(pluginsFolder, Integer.MAX_VALUE)) {
return stream.filter(file -> !Files.isDirectory(file))
.filter(file -> file.getFileName() != null)
.filter(file -> file.getFileName().toString().toLowerCase().endsWith(".jar"))
.map(Path::toUri)
.map(
t -> {
try {
return t.toURL();
} catch (MalformedURLException e) {
logger.error(e.getMessage(), e);
}
return null;
})
.toArray(URL[]::new);
}
}
// TODO: maybe extract this to a public class in serving-api, so we can have functions like
// "listPlugin" which return Plugin objects
static class Plugin<T> {
private T component;
private LocalDateTime loadTime;
public Plugin(T component) {
this.component = component;
this.loadTime = LocalDateTime.now();
}
/**
* Returns the value of component.
*
* @return the component value.
*/
public T getComponent() {
return component;
}
/**
* Returns the value of loadtime.
*
* @return the loadtime value.
*/
public LocalDateTime getLoadTime() {
return loadTime;
}
/**
* Returns the name of the plug-in.
*
* @return name of the plug-in.
*/
public String getName() {
return component.getClass().getSimpleName();
}
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/plugins/PluginManager.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.plugins;
import java.util.Set;
/**
* The Plugin Manager is responsible to load and manage plugins from the filesystem.
*
* <p>The Plugin Folder configuration is received from the ConfigManager and usually defaults to
* {workpath}/plugins. The plugins uses Java's SPI and have to implement interfaces from
* serving-api.
*
* @author erik.bamberg@web.de
*/
public interface PluginManager {
/**
* Returns a set of plug-in components implementing the specific service interface.
*
* <p>only active plug-ins are returned which are fully initialised at this point.
*
* <p>{@code Set<RequestHandler>
* allActiveRequestHandler=findImplementations(RequestHandler.class)}
*
* @param <T> generic type of service interface
* @param pluginInterface the specific service interface
* @return a set of all plugin components implementing this service interface
*/
<T> Set<T> findImplementations(Class<T> pluginInterface);
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/plugins/RequestHandler.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.plugins;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.QueryStringDecoder;
/**
* Interface to be implemented by HtttpRequestHandler.
*
* <p>Classes implementing this interface and populated as service using the SPI service-manifest
* are pickup by the serving plugin architectur and automatically registered as RequestHandler for
* HTTP Requests.
*
* @author erik.bamberg@web.de
*/
public interface RequestHandler<T> {
/**
* Returns true if this handler can handle the incoming HTTP request.
*
* <p>The interface following the chain of responsibility pattern.
*
* @param msg the incoming HTTP message
* @return true if this handler can handle the incoming HTTP request. false otherwise
*/
boolean acceptInboundMessage(Object msg);
/**
* The main method which handles request.
*
* <p>This method is called by the framework if {@code acceptInboundMessage} indicates that this
* handler can handle the request.
*
* @param ctx the handler context.
* @param req the full HttpRequest object.
* @param decoder a query string decoder helps to parse the url query string.
* @param segments array of splitted segments of the path.
* @return a response or null. The response is returned to the client converting it to the
* requested format by the server.
*/
T handleRequest(
ChannelHandlerContext ctx,
FullHttpRequest req,
QueryStringDecoder decoder,
String[] segments);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.