index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface
|
java-sources/ai/djl/huggingface/tokenizers/0.34.0/ai/djl/huggingface/zoo/package-info.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains the built-in {@link ai.djl.huggingface.zoo.HfModelZoo}. */
package ai.djl.huggingface.zoo;
|
0
|
java-sources/ai/djl/java/java/0.20.0/ai/djl/java
|
java-sources/ai/djl/java/java/0.20.0/ai/djl/java/engine/JavaEngine.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.java.engine;
import ai.djl.Device;
import ai.djl.Model;
import ai.djl.engine.Engine;
import ai.djl.ndarray.NDManager;
import ai.djl.nn.SymbolBlock;
import ai.djl.training.GradientCollector;
/**
* The {@code JavaEngine} is an implementation of the {@link Engine} that runs Java applications.
*/
public class JavaEngine extends Engine {
public static final String ENGINE_NAME = "Java";
static final int RANK = 10;
private Engine alternativeEngine;
private boolean initialized;
static Engine newInstance() {
return new JavaEngine();
}
/** {@inheritDoc} */
@Override
public Engine getAlternativeEngine() {
if (!initialized && !Boolean.getBoolean("ai.djl.java.disable_alternative")) {
Engine engine = Engine.getInstance();
if (engine.getRank() < getRank()) {
// alternativeEngine should not have the same rank as OnnxRuntime
alternativeEngine = engine;
}
initialized = true;
}
return alternativeEngine;
}
/** {@inheritDoc} */
@Override
public String getEngineName() {
return ENGINE_NAME;
}
/** {@inheritDoc} */
@Override
public int getRank() {
return RANK;
}
/** {@inheritDoc} */
@Override
public String getVersion() {
return Engine.class.getPackage().getSpecificationVersion();
}
/** {@inheritDoc} */
@Override
public boolean hasCapability(String capability) {
return false;
}
/** {@inheritDoc} */
@Override
public SymbolBlock newSymbolBlock(NDManager manager) {
throw new UnsupportedOperationException("Java Engine does not support empty symbol block");
}
/** {@inheritDoc} */
@Override
public Model newModel(String name, Device device) {
// TODO: Support Java model loading
throw new UnsupportedOperationException("Java Engine currently does not support model");
}
/** {@inheritDoc} */
@Override
public NDManager newBaseManager() {
return newBaseManager(null);
}
/** {@inheritDoc} */
@Override
public NDManager newBaseManager(Device device) {
return JavaNDManager.getSystemManager().newSubManager(device);
}
/** {@inheritDoc} */
@Override
public GradientCollector newGradientCollector() {
throw new UnsupportedOperationException("Not supported for Java Engine");
}
/** {@inheritDoc} */
@Override
public String toString() {
return getEngineName() + ':' + getVersion();
}
}
|
0
|
java-sources/ai/djl/java/java/0.20.0/ai/djl/java
|
java-sources/ai/djl/java/java/0.20.0/ai/djl/java/engine/JavaEngineProvider.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.java.engine;
import ai.djl.engine.Engine;
import ai.djl.engine.EngineProvider;
/** {@code JavaEngineProvider} is the Java implementation of {@link EngineProvider}. */
public class JavaEngineProvider implements EngineProvider {
private static volatile Engine engine; // NOPMD
/** {@inheritDoc} */
@Override
public String getEngineName() {
return JavaEngine.ENGINE_NAME;
}
/** {@inheritDoc} */
@Override
public int getEngineRank() {
return JavaEngine.RANK;
}
/** {@inheritDoc} */
@Override
public Engine getEngine() {
if (engine == null) {
synchronized (this) {
engine = JavaEngine.newInstance();
}
}
return engine;
}
}
|
0
|
java-sources/ai/djl/java/java/0.20.0/ai/djl/java
|
java-sources/ai/djl/java/java/0.20.0/ai/djl/java/engine/JavaNDArray.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.java.engine;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrayAdapter;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import java.nio.ByteBuffer;
import java.util.UUID;
/** {@code JavaNDArray} is the Java engine of {@link NDArray}. */
public class JavaNDArray extends NDArrayAdapter {
private ByteBuffer data;
JavaNDArray(
NDManager manager,
NDManager alternativeManager,
ByteBuffer data,
Shape shape,
DataType dataType) {
super(manager, alternativeManager, shape, dataType, UUID.randomUUID().toString());
this.data = data;
manager.attachInternal(uid, this);
}
/** {@inheritDoc} */
@Override
public void intern(NDArray replaced) {
data = ((JavaNDArray) replaced).data;
}
/** {@inheritDoc} */
@Override
public ByteBuffer toByteBuffer() {
data.rewind();
return data;
}
/** {@inheritDoc} */
@Override
public void detach() {
manager.detachInternal(getUid());
manager = JavaNDManager.getSystemManager();
}
}
|
0
|
java-sources/ai/djl/java/java/0.20.0/ai/djl/java
|
java-sources/ai/djl/java/java/0.20.0/ai/djl/java/engine/JavaNDManager.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.java.engine;
import ai.djl.Device;
import ai.djl.engine.Engine;
import ai.djl.ndarray.BaseNDManager;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/** {@code JavaNDManager} is the Java engine implementation of {@link NDManager}. */
public class JavaNDManager extends BaseNDManager {
private static final JavaNDManager SYSTEM_MANAGER = new SystemManager();
private JavaNDManager(NDManager parent, Device device) {
super(parent, device);
}
static JavaNDManager getSystemManager() {
return SYSTEM_MANAGER;
}
/** {@inheritDoc} */
@Override
public ByteBuffer allocateDirect(int capacity) {
return ByteBuffer.allocateDirect(capacity).order(ByteOrder.nativeOrder());
}
/** {@inheritDoc} */
@Override
public JavaNDArray from(NDArray array) {
if (array == null || array instanceof JavaNDArray) {
return (JavaNDArray) array;
}
return (JavaNDArray) create(array.toByteBuffer(), array.getShape(), array.getDataType());
}
/** {@inheritDoc} */
@Override
public NDManager newSubManager(Device device) {
JavaNDManager manager = new JavaNDManager(this, device);
attachInternal(manager.uid, manager);
return manager;
}
/** {@inheritDoc} */
@Override
public Engine getEngine() {
return Engine.getEngine(JavaEngine.ENGINE_NAME);
}
/** {@inheritDoc} */
@Override
public NDArray create(Buffer data, Shape shape, DataType dataType) {
int size = Math.toIntExact(shape.size());
BaseNDManager.validateBuffer(data, dataType, size);
if (data instanceof ByteBuffer) {
return new JavaNDArray(this, alternativeManager, (ByteBuffer) data, shape, dataType);
}
ByteBuffer bb = ByteBuffer.allocate(size * dataType.getNumOfBytes());
bb.order(ByteOrder.nativeOrder());
BaseNDManager.copyBuffer(data, bb);
return new JavaNDArray(this, alternativeManager, bb, shape, dataType);
}
/** The SystemManager is the root {@link JavaNDManager} of which all others are children. */
private static final class SystemManager extends JavaNDManager implements SystemNDManager {
SystemManager() {
super(null, null);
}
}
}
|
0
|
java-sources/ai/djl/java/java/0.20.0/ai/djl/java
|
java-sources/ai/djl/java/java/0.20.0/ai/djl/java/engine/package-info.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes to interface with the underlying Java engine. */
package ai.djl.java.engine;
|
0
|
java-sources/ai/djl/jsr381/jsr381/0.4.1/ai/djl/jsr381
|
java-sources/ai/djl/jsr381/jsr381/0.4.1/ai/djl/jsr381/classification/SimpleBinaryClassifier.java
|
package ai.djl.jsr381.classification;
import ai.djl.inference.Predictor;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.translate.TranslateException;
import javax.visrec.ml.ClassificationException;
import javax.visrec.ml.classification.BinaryClassifier;
/** Implementation of a {@link BinaryClassifier} with DJL. */
public class SimpleBinaryClassifier implements BinaryClassifier<float[]> {
private ZooModel<float[], Float> model;
public SimpleBinaryClassifier(ZooModel<float[], Float> model) {
this.model = model;
}
@Override
public Float classify(float[] input) throws ClassificationException {
try (Predictor<float[], Float> predictor = model.newPredictor()) {
return predictor.predict(input);
} catch (TranslateException e) {
throw new ClassificationException("Failed to process output", e);
}
}
}
|
0
|
java-sources/ai/djl/jsr381/jsr381/0.4.1/ai/djl/jsr381
|
java-sources/ai/djl/jsr381/jsr381/0.4.1/ai/djl/jsr381/classification/SimpleImageClassifier.java
|
package ai.djl.jsr381.classification;
import ai.djl.inference.Predictor;
import ai.djl.modality.Classifications;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.translate.TranslateException;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import javax.imageio.ImageIO;
import javax.visrec.ml.ClassificationException;
import javax.visrec.ml.classification.ImageClassifier;
/**
* Implementation of abstract image classifier for BufferedImage-s using DJL.
*
* @author Frank Liu
*/
public class SimpleImageClassifier implements ImageClassifier<BufferedImage> {
private ZooModel<BufferedImage, Classifications> model;
private int topK;
public SimpleImageClassifier(ZooModel<BufferedImage, Classifications> model, int topK) {
this.model = model;
this.topK = topK;
}
@Override
public Map<String, Float> classify(File input) throws ClassificationException {
try {
return classify(ImageIO.read(input));
} catch (IOException e) {
throw new ClassificationException("Couldn't transform input into a BufferedImage", e);
}
}
@Override
public Map<String, Float> classify(InputStream input) throws ClassificationException {
try {
return classify(ImageIO.read(input));
} catch (IOException e) {
throw new ClassificationException("Couldn't transform input into a BufferedImage", e);
}
}
@Override
public Map<String, Float> classify(BufferedImage input) throws ClassificationException {
try (Predictor<BufferedImage, Classifications> predictor = model.newPredictor()) {
Classifications classifications = predictor.predict(input);
List<Classifications.Classification> list = classifications.topK(topK);
return list.stream()
.collect(
Collectors.toMap(
Classifications.Classification::getClassName,
x -> (float) x.getProbability()));
} catch (TranslateException e) {
throw new ClassificationException("Failed to process output", e);
}
}
}
|
0
|
java-sources/ai/djl/jsr381/jsr381/0.4.1/ai/djl/jsr381
|
java-sources/ai/djl/jsr381/jsr381/0.4.1/ai/djl/jsr381/dataset/CsvDataset.java
|
package ai.djl.jsr381.dataset;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.training.dataset.RandomAccessDataset;
import ai.djl.training.dataset.Record;
import java.io.File;
import java.io.IOException;
import java.io.Reader;
import java.nio.file.Files;
import java.util.List;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
public class CsvDataset extends RandomAccessDataset {
private List<CSVRecord> records;
private CsvDataset(Builder builder) {
super(builder);
records = builder.records;
}
public static Builder builder() {
return new Builder();
}
@Override
public Record get(NDManager manager, long index) {
CSVRecord record = records.get(Math.toIntExact(index));
int size = record.size();
float[] data = new float[size - 1];
for (int i = 0; i < size - 1; ++i) {
data[i] = Float.parseFloat(record.get(i));
}
NDArray datum = manager.create(data);
NDArray label = manager.create(Float.parseFloat(record.get(size - 1)));
return new Record(new NDList(datum), new NDList(label));
}
@Override
public long availableSize() {
return records.size();
}
public static final class Builder extends BaseBuilder<Builder> {
List<CSVRecord> records;
private File file;
@Override
protected Builder self() {
return this;
}
public Builder setCsvFile(File file) {
this.file = file;
return this;
}
public CsvDataset build() throws IOException {
try (Reader reader = Files.newBufferedReader(file.toPath());
CSVParser csvParser =
new CSVParser(
reader,
CSVFormat.DEFAULT
.withFirstRecordAsHeader()
.withIgnoreHeaderCase()
.withTrim())) {
records = csvParser.getRecords();
}
return new CsvDataset(this);
}
}
}
|
0
|
java-sources/ai/djl/jsr381/jsr381/0.4.1/ai/djl/jsr381
|
java-sources/ai/djl/jsr381/jsr381/0.4.1/ai/djl/jsr381/detection/SimpleObjectDetector.java
|
package ai.djl.jsr381.detection;
import ai.djl.inference.Predictor;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.translate.TranslateException;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import javax.visrec.ml.ClassificationException;
import javax.visrec.ml.detection.ObjectDetector;
import javax.visrec.util.BoundingBox;
/** A simple object detector implemented with DJL. */
public class SimpleObjectDetector implements ObjectDetector<BufferedImage> {
private ZooModel<BufferedImage, DetectedObjects> model;
public SimpleObjectDetector(ZooModel<BufferedImage, DetectedObjects> model) {
this.model = model;
}
@Override
public Map<String, List<BoundingBox>> detectObject(BufferedImage image)
throws ClassificationException {
try (Predictor<BufferedImage, DetectedObjects> predictor = model.newPredictor()) {
DetectedObjects detectedObjects = predictor.predict(image);
Map<String, List<BoundingBox>> ret = new ConcurrentHashMap<>();
int imageWidth = image.getWidth();
int imageHeight = image.getHeight();
List<DetectedObjects.DetectedObject> detections = detectedObjects.items();
for (DetectedObjects.DetectedObject detection : detections) {
String className = detection.getClassName();
float probability = (float) detection.getProbability();
Rectangle rect = detection.getBoundingBox().getBounds();
int x = (int) (rect.getX() * imageWidth);
int y = (int) (rect.getY() * imageHeight);
float w = (float) (rect.getWidth() * imageWidth);
float h = (float) (rect.getHeight() * imageHeight);
ret.compute(
className,
(k, list) -> {
if (list == null) {
list = new ArrayList<>();
}
list.add(new BoundingBox(className, probability, x, y, w, h));
return list;
});
}
return ret;
} catch (TranslateException e) {
throw new ClassificationException("Failed to process output", e);
}
}
}
|
0
|
java-sources/ai/djl/jsr381/jsr381/0.4.1/ai/djl/jsr381
|
java-sources/ai/djl/jsr381/jsr381/0.4.1/ai/djl/jsr381/spi/DjlBinaryClassifierFactory.java
|
package ai.djl.jsr381.spi;
import ai.djl.Model;
import ai.djl.jsr381.classification.SimpleBinaryClassifier;
import ai.djl.jsr381.dataset.CsvDataset;
import ai.djl.metric.Metrics;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Activation;
import ai.djl.nn.Blocks;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.core.Linear;
import ai.djl.nn.norm.BatchNorm;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.training.DefaultTrainingConfig;
import ai.djl.training.Trainer;
import ai.djl.training.dataset.Batch;
import ai.djl.training.dataset.RandomAccessDataset;
import ai.djl.training.evaluator.BinaryAccuracy;
import ai.djl.training.listener.TrainingListener;
import ai.djl.training.loss.Loss;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import java.io.IOException;
import javax.visrec.ml.ClassifierCreationException;
import javax.visrec.ml.classification.BinaryClassifier;
import javax.visrec.ml.classification.NeuralNetBinaryClassifier;
import javax.visrec.spi.BinaryClassifierFactory;
public class DjlBinaryClassifierFactory implements BinaryClassifierFactory<float[]> {
@Override
public Class<float[]> getTargetClass() {
return float[].class;
}
@Override
public BinaryClassifier<float[]> create(NeuralNetBinaryClassifier.BuildingBlock<float[]> block)
throws ClassifierCreationException {
int inputSize = block.getInputsNum();
int[] hiddenLayers = block.getHiddenLayers();
int epochs = block.getMaxEpochs();
int batchSize = 32;
SequentialBlock mlp = new SequentialBlock().add(Blocks.batchFlattenBlock(inputSize));
for (int size : hiddenLayers) {
mlp.add(Linear.builder().setOutChannels(size).build()).add(Activation::relu);
}
mlp.add(BatchNorm.builder().build())
.add(Linear.builder().setOutChannels(1).build())
.add(arrays -> new NDList(arrays.singletonOrThrow().flatten()));
Model model = Model.newInstance();
model.setBlock(mlp);
RandomAccessDataset[] dataset;
try {
CsvDataset csv =
CsvDataset.builder()
.setCsvFile(block.getTrainingFile())
.setSampling(batchSize, true)
.build();
dataset = csv.randomSplit(8, 2);
} catch (IOException e) {
throw new ClassifierCreationException("Failed to load dataset.", e);
}
// setup training configuration
DefaultTrainingConfig config =
new DefaultTrainingConfig(Loss.sigmoidBinaryCrossEntropyLoss())
.addTrainingListeners(TrainingListener.Defaults.logging())
.addEvaluator(new BinaryAccuracy());
try (Trainer trainer = model.newTrainer(config)) {
trainer.setMetrics(new Metrics());
Shape inputShape = new Shape(1, inputSize);
trainer.initialize(inputShape);
for (int i = 0; i < epochs; i++) {
for (Batch batch : trainer.iterateDataset(dataset[0])) {
trainer.trainBatch(batch);
trainer.step();
batch.close();
}
for (Batch batch : trainer.iterateDataset(dataset[1])) {
trainer.validateBatch(batch);
batch.close();
}
// reset training and validation evaluators at end of epoch
trainer.endEpoch();
}
}
return new SimpleBinaryClassifier(new ZooModel<>(model, new BinaryClassifierTranslator()));
}
private static final class BinaryClassifierTranslator implements Translator<float[], Float> {
@Override
public NDList processInput(TranslatorContext ctx, float[] input) {
NDManager manager = ctx.getNDManager();
NDArray array = manager.create(input);
return new NDList(array);
}
@Override
public Float processOutput(TranslatorContext ctx, NDList list) {
return list.singletonOrThrow().getFloat();
}
}
}
|
0
|
java-sources/ai/djl/jsr381/jsr381/0.4.1/ai/djl/jsr381
|
java-sources/ai/djl/jsr381/jsr381/0.4.1/ai/djl/jsr381/spi/DjlImageClassifierFactory.java
|
package ai.djl.jsr381.spi;
import ai.djl.MalformedModelException;
import ai.djl.Model;
import ai.djl.jsr381.classification.SimpleImageClassifier;
import ai.djl.modality.Classifications;
import ai.djl.modality.cv.transform.CenterCrop;
import ai.djl.modality.cv.transform.Resize;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.modality.cv.translator.ImageClassificationTranslator;
import ai.djl.modality.cv.util.NDImageUtils.Flag;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.translate.Pipeline;
import ai.djl.translate.Translator;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import javax.visrec.ml.ClassifierCreationException;
import javax.visrec.ml.classification.ImageClassifier;
import javax.visrec.ml.classification.NeuralNetImageClassifier;
import javax.visrec.spi.ImageClassifierFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DjlImageClassifierFactory implements ImageClassifierFactory<BufferedImage> {
private static final Logger logger = LoggerFactory.getLogger(DjlImageClassifierFactory.class);
@Override
public Class<BufferedImage> getImageClass() {
return BufferedImage.class;
}
@Override
public ImageClassifier<BufferedImage> create(
NeuralNetImageClassifier.BuildingBlock<BufferedImage> block)
throws ClassifierCreationException {
int width = block.getImageWidth();
int height = block.getImageHeight();
Flag flag = width < 50 ? Flag.GRAYSCALE : Flag.COLOR;
File modelFile = block.getModelFile();
if (modelFile != null) {
// load pre-trained model from model zoo
logger.info("Loading pre-trained model ...");
try {
Pipeline pipeline = new Pipeline();
pipeline.add(new CenterCrop()).add(new Resize(width, height)).add(new ToTensor());
Translator<BufferedImage, Classifications> translator =
ImageClassificationTranslator.builder()
.optFlag(flag)
.setPipeline(pipeline)
.setSynsetArtifactName("synset.txt")
.optApplySoftmax(true)
.build();
Model model = Model.newInstance();
model.load(modelFile.toPath());
ZooModel<BufferedImage, Classifications> zooModel =
new ZooModel<>(model, translator);
return new SimpleImageClassifier(zooModel, 5);
} catch (MalformedModelException | IOException e) {
throw new ClassifierCreationException("Failed load model from model zoo.", e);
}
}
return null;
}
}
|
0
|
java-sources/ai/djl/jsr381/jsr381/0.4.1/ai/djl/jsr381
|
java-sources/ai/djl/jsr381/jsr381/0.4.1/ai/djl/jsr381/spi/DjlImageFactoryService.java
|
package ai.djl.jsr381.spi;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import javax.imageio.ImageIO;
import javax.visrec.ImageFactory;
import javax.visrec.spi.ImageFactoryService;
/**
* DJL implementation of {@link ImageFactoryService} which serves the implementations of {@link
* ImageFactory}.
*
* @author Frank Liu
*/
public final class DjlImageFactoryService implements ImageFactoryService {
private static final Map<Class<?>, ImageFactory<?>> IMAGE_FACTORIES = new ConcurrentHashMap<>();
static {
IMAGE_FACTORIES.put(BufferedImage.class, new ImageFactoryImpl());
}
/**
* Get the {@link ImageFactory} by image type.
*
* @param imageCls image type in {@link Class} object which is able to be processed by the image
* factory implementation.
* @param <T> image type.
* @return {@link ImageFactory} wrapped in {@link Optional}. If the {@link ImageFactory} could
* not be found then the {@link Optional} would contain null.
*/
@Override
@SuppressWarnings("unchecked")
public <T> Optional<ImageFactory<T>> getByImageType(Class<T> imageCls) {
Objects.requireNonNull(imageCls, "imageCls == null");
ImageFactory<?> imageFactory = IMAGE_FACTORIES.get(imageCls);
return Optional.ofNullable((ImageFactory<T>) imageFactory);
}
/** {@link ImageFactory} to provide {@link BufferedImage} as return object. */
public static final class ImageFactoryImpl implements ImageFactory<BufferedImage> {
/** {@inheritDoc} */
@Override
public BufferedImage getImage(File file) throws IOException {
BufferedImage img = ImageIO.read(file);
if (img == null) {
throw new IOException(
"Unable to transform File into BufferedImage due to unknown image encoding");
}
return img;
}
/** {@inheritDoc} */
@Override
public BufferedImage getImage(URL file) throws IOException {
BufferedImage img = ImageIO.read(file);
if (img == null) {
throw new IOException(
"Unable to transform URL into BufferedImage due to unknown image encoding");
}
return img;
}
/** {@inheritDoc} */
@Override
public BufferedImage getImage(InputStream file) throws IOException {
BufferedImage img = ImageIO.read(file);
if (img == null) {
throw new IOException(
"Unable to transform InputStream into BufferedImage due to unknown image encoding");
}
return img;
}
}
}
|
0
|
java-sources/ai/djl/jsr381/jsr381/0.4.1/ai/djl/jsr381
|
java-sources/ai/djl/jsr381/jsr381/0.4.1/ai/djl/jsr381/spi/DjlImplementationService.java
|
package ai.djl.jsr381.spi;
import javax.visrec.spi.ImplementationService;
/**
* DJL' {@link ImplementationService}.
*
* @author Frank Liu
*/
public class DjlImplementationService extends ImplementationService {
/** {@inheritDoc} */
@Override
public String getName() {
return "DJL";
}
/** {@inheritDoc} */
@Override
public String getVersion() {
return "0.3.0";
}
}
|
0
|
java-sources/ai/djl/jsr381/jsr381/0.4.1/ai/djl/jsr381
|
java-sources/ai/djl/jsr381/jsr381/0.4.1/ai/djl/jsr381/spi/DjlServiceProvider.java
|
package ai.djl.jsr381.spi;
import javax.visrec.spi.BuilderService;
import javax.visrec.spi.ImageFactoryService;
import javax.visrec.spi.ImplementationService;
import javax.visrec.spi.ServiceProvider;
/**
* {@link ServiceProvider} implementation with DJL.
*
* @author Frank Liu
*/
public final class DjlServiceProvider extends ServiceProvider {
/** {@inheritDoc} */
@Override
@SuppressWarnings("deprecation")
public BuilderService getBuilderService() {
return null;
}
/** {@inheritDoc} */
@Override
public ImageFactoryService getImageFactoryService() {
return new DjlImageFactoryService();
}
/** {@inheritDoc} */
@Override
public ImplementationService getImplementationService() {
return new DjlImplementationService();
}
}
|
0
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama/engine/LlamaEngine.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.llama.engine;
import ai.djl.Device;
import ai.djl.Model;
import ai.djl.engine.Engine;
import ai.djl.engine.EngineException;
import ai.djl.llama.jni.LibUtils;
import ai.djl.ndarray.NDManager;
import ai.djl.util.Platform;
import ai.djl.util.passthrough.PassthroughNDManager;
/** The {@code LlamaEngine} is an implementation of the {@link Engine} based on the llama.cpp. */
public final class LlamaEngine extends Engine {
public static final String ENGINE_NAME = "Llama";
static final int RANK = 10;
private Engine alternativeEngine;
private boolean initialized;
private LlamaEngine() {
try {
LibUtils.loadLibrary();
} catch (EngineException e) { // NOPMD
throw e;
} catch (Throwable t) {
throw new EngineException("Failed to load llama.cpp native library", t);
}
}
static Engine newInstance() {
return new LlamaEngine();
}
/** {@inheritDoc} */
@Override
public Engine getAlternativeEngine() {
if (!initialized && !Boolean.getBoolean("ai.djl.llama.disable_alternative")) {
Engine engine = Engine.getInstance();
if (engine.getRank() < getRank()) {
// alternativeEngine should not have the same rank as Llama
alternativeEngine = engine;
}
initialized = true;
}
return alternativeEngine;
}
/** {@inheritDoc} */
@Override
public String getEngineName() {
return ENGINE_NAME;
}
/** {@inheritDoc} */
@Override
public int getRank() {
return RANK;
}
/** {@inheritDoc} */
@Override
public String getVersion() {
Platform platform = Platform.detectPlatform("llama");
return platform.getVersion();
}
/** {@inheritDoc} */
@Override
public boolean hasCapability(String capability) {
return false;
}
/** {@inheritDoc} */
@Override
public Model newModel(String name, Device device) {
return new LlamaModel(name, newBaseManager(device));
}
/** {@inheritDoc} */
@Override
public NDManager newBaseManager() {
return newBaseManager(null);
}
/** {@inheritDoc} */
@Override
public NDManager newBaseManager(Device device) {
return PassthroughNDManager.INSTANCE;
}
/** {@inheritDoc} */
@Override
public String toString() {
return getEngineName() + ':' + getVersion() + ", " + getEngineName() + ':' + getVersion();
}
}
|
0
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama/engine/LlamaEngineProvider.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.llama.engine;
import ai.djl.engine.Engine;
import ai.djl.engine.EngineProvider;
/** {@code LlamaEngineProvider} is the Llama implementation of {@link EngineProvider}. */
public class LlamaEngineProvider implements EngineProvider {
/** {@inheritDoc} */
@Override
public String getEngineName() {
return LlamaEngine.ENGINE_NAME;
}
/** {@inheritDoc} */
@Override
public int getEngineRank() {
return LlamaEngine.RANK;
}
/** {@inheritDoc} */
@Override
public Engine getEngine() {
return InstanceHolder.INSTANCE;
}
private static class InstanceHolder {
static final Engine INSTANCE = LlamaEngine.newInstance();
}
}
|
0
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama/engine/LlamaInput.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.llama.engine;
import ai.djl.llama.jni.InputParameters;
import com.google.gson.annotations.SerializedName;
import java.util.Map;
/** A class hold input data for Llama model. */
public class LlamaInput {
private String inputs;
private String prefix;
private String suffix;
private Parameters parameters;
/**
* Returns the input prompt.
*
* @return the input prompt
*/
public String getInputs() {
return inputs;
}
/**
* Sets the input prompt.
*
* @param inputs the input prompt
*/
public void setInputs(String inputs) {
this.inputs = inputs;
}
/**
* Returns the prompt prefix.
*
* @return the prompt prefix
*/
public String getPrefix() {
return prefix;
}
/**
* Sets the prompt prefix.
*
* @param prefix the prompt prefix
*/
public void setPrefix(String prefix) {
this.prefix = prefix;
}
/**
* Returns the prompt suffix.
*
* @return the prompt suffix
*/
public String getSuffix() {
return suffix;
}
/**
* Sets the prompt suffix.
*
* @param suffix the prompt suffix
*/
public void setSuffix(String suffix) {
this.suffix = suffix;
}
/**
* Returns the input parameters.
*
* @return the input parameters
*/
public Parameters getParameters() {
if (parameters == null) {
parameters = new Parameters();
}
return parameters;
}
/**
* Sets the input parameters.
*
* @param parameters the input parameters
*/
public void setParameters(Parameters parameters) {
this.parameters = parameters;
}
/** The input parameters class. */
public static final class Parameters {
@SerializedName("max_new_tokens")
private int nPredict;
@SerializedName("number_keep")
private int nKeep;
@SerializedName("number_probabilities")
private int nProbs;
@SerializedName("top_k")
private int topK;
@SerializedName("top_p")
private float topP;
@SerializedName("tfs_z")
private float tfsZ;
@SerializedName("typical_p")
private float typicalP;
@SerializedName("temperature")
private float temperature;
@SerializedName("repeat_penalty")
private float repeatPenalty;
@SerializedName("repeat_last_n")
private int repeatLastN;
@SerializedName("frequency_penalty")
private float frequencyPenalty;
@SerializedName("presence_penalty")
private float presencePenalty;
@SerializedName("penalize_nl")
private boolean penalizeNl;
@SerializedName("ignore_eos")
private boolean ignoreEos;
@SerializedName("mirostat")
private int mirostat;
@SerializedName("mirostat_tau")
private float mirostatTau;
@SerializedName("mirostat_eta")
private float mirostatEta;
@SerializedName("number_beams")
private int nBeams;
@SerializedName("seed")
private int seed;
@SerializedName("logit_bias")
private Map<Integer, Float> logitBias;
@SerializedName("grammar")
private String grammar;
@SerializedName("anti_prompt")
private String[] antiPrompt;
/**
* Sets the max new tokens.
*
* @param maxNewTokens the max new tokens
*/
public void setMaxNewTokens(int maxNewTokens) {
this.nPredict = maxNewTokens;
}
/**
* Sets the number of keep.
*
* @param nKeep the number of keep
*/
public void setNumberKeep(int nKeep) {
this.nKeep = nKeep;
}
/**
* Sets the number of probabilities.
*
* @param nProbs the number of probabilities
*/
public void setNumberProbabilities(int nProbs) {
this.nProbs = nProbs;
}
/**
* Sets the top K.
*
* @param topK the top K
*/
public void setTopK(int topK) {
this.topK = topK;
}
/**
* Sets the top P.
*
* @param topP the top P
*/
public void setTopP(float topP) {
this.topP = topP;
}
/**
* Sets the tfs Z.
*
* @param tfsZ the tfs Z
*/
public void setTfsZ(float tfsZ) {
this.tfsZ = tfsZ;
}
/**
* Sets the typical P.
*
* @param typicalP the typical P
*/
public void setTypicalP(float typicalP) {
this.typicalP = typicalP;
}
/**
* Sets the temperature.
*
* @param temperature the temperature
*/
public void setTemperature(float temperature) {
this.temperature = temperature;
}
/**
* Sets the repeat penalty.
*
* @param repeatPenalty the repeat penalty
*/
public void setRepeatPenalty(float repeatPenalty) {
this.repeatPenalty = repeatPenalty;
}
/**
* Sets the repeat last N.
*
* @param repeatLastN the repeat last N
*/
public void setRepeatLastN(int repeatLastN) {
this.repeatLastN = repeatLastN;
}
/**
* Sets the frequency penalty.
*
* @param frequencyPenalty the frequency penalty
*/
public void setFrequencyPenalty(float frequencyPenalty) {
this.frequencyPenalty = frequencyPenalty;
}
/**
* Sets the presence penalty.
*
* @param presencePenalty the presence penalty
*/
public void setPresencePenalty(float presencePenalty) {
this.presencePenalty = presencePenalty;
}
/**
* Sets the penalize nl.
*
* @param penalizeNl the penalize nl
*/
public void setPenalizeNl(boolean penalizeNl) {
this.penalizeNl = penalizeNl;
}
/**
* Sets if ignore EOS.
*
* @param ignoreEos if ignore EOS
*/
public void setIgnoreEos(boolean ignoreEos) {
this.ignoreEos = ignoreEos;
}
/**
* Sets the mirostat.
*
* @param mirostat the mirostat
*/
public void setMirostat(int mirostat) {
this.mirostat = mirostat;
}
/**
* Sets the mirostat TAU.
*
* @param mirostatTau the mirostat TAU
*/
public void setMirostatTau(float mirostatTau) {
this.mirostatTau = mirostatTau;
}
/**
* Sets the mirostat ETA.
*
* @param mirostatEta the mirostat ETA
*/
public void setMirostatEta(float mirostatEta) {
this.mirostatEta = mirostatEta;
}
/**
* Sets the number of beams.
*
* @param nBeams the number of beams
*/
public void setNumberBeams(int nBeams) {
this.nBeams = nBeams;
}
/**
* Sets the seed.
*
* @param seed the seed
*/
public void setSeed(int seed) {
this.seed = seed;
}
/**
* Sets the logit bias.
*
* @param logitBias the logit bias
*/
public void setLogitBias(Map<Integer, Float> logitBias) {
this.logitBias = logitBias;
}
/**
* Sets the grammar template.
*
* @param grammar the grammar template
*/
public void setGrammar(String grammar) {
this.grammar = grammar;
}
/**
* Sets the anti prompt.
*
* @param antiPrompt the anti prompt
*/
public void setAntiPrompt(String[] antiPrompt) {
this.antiPrompt = antiPrompt;
}
/**
* Returns the {@link InputParameters} object.
*
* @return the {@link InputParameters} object
*/
public InputParameters toInputParameters() {
setDefaultValue();
return new InputParameters(
nPredict,
nKeep,
nProbs,
topK,
topP,
tfsZ,
typicalP,
temperature,
repeatPenalty,
repeatLastN,
frequencyPenalty,
presencePenalty,
penalizeNl,
ignoreEos,
mirostat,
mirostatTau,
mirostatEta,
nBeams,
seed,
logitBias,
grammar,
antiPrompt);
}
private void setDefaultValue() {
if (nPredict == 0) {
nPredict = -1;
}
if (topK == 0) {
topK = 40;
}
if (topP == 0) {
topP = 0.95f;
}
if (tfsZ == 0) {
tfsZ = 1f;
}
if (typicalP == 0) {
typicalP = 1f;
}
if (temperature == 0) {
temperature = 0.8f;
}
if (repeatPenalty == 0) {
repeatPenalty = 1.10f;
}
if (repeatLastN == 0) {
repeatLastN = 64;
}
}
}
}
|
0
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama/engine/LlamaModel.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.llama.engine;
import ai.djl.BaseModel;
import ai.djl.Model;
import ai.djl.llama.jni.LlamaLibrary;
import ai.djl.llama.jni.ModelParameters;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.nn.Blocks;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Map;
/** {@code LlamaModel} is the llama.cpp implementation of {@link Model}. */
public class LlamaModel extends BaseModel {
private long handle = -1;
/**
* Constructs a new Model on a given device.
*
* @param name the model name
* @param manager the {@link NDManager} to holds the NDArray
*/
LlamaModel(String name, NDManager manager) {
super(name);
this.manager = manager;
this.manager.setName("llamaModel");
dataType = DataType.FLOAT32;
}
/** {@inheritDoc} */
@Override
public void load(Path modelPath, String prefix, Map<String, ?> options) throws IOException {
setModelDir(modelPath);
wasLoaded = true;
if (block != null) {
throw new UnsupportedOperationException("Llama does not support dynamic blocks");
}
if (prefix == null) {
prefix = modelName;
}
// search for .onnx file with prefix, folder name or "model.onnx"
Path modelFile = findModelFile(prefix, modelDir.toFile().getName(), "model.gguf");
if (modelFile == null) {
throw new FileNotFoundException(".gguf file not found in: " + modelPath);
}
ModelParameters param = new ModelParameters(options);
handle = LlamaLibrary.loadModel(modelFile.toString(), param);
block = Blocks.identityBlock();
}
long getHandle() {
return handle;
}
private Path findModelFile(String... prefixes) {
if (Files.isRegularFile(modelDir)) {
Path file = modelDir;
modelDir = modelDir.getParent();
String fileName = file.toFile().getName();
if (fileName.endsWith(".gguf")) {
modelName = fileName.substring(0, fileName.length() - 5);
} else {
modelName = fileName;
}
return file;
}
for (String prefix : prefixes) {
Path modelFile = modelDir.resolve(prefix);
if (Files.isRegularFile(modelFile)) {
return modelFile;
}
if (!prefix.endsWith(".gguf")) {
modelFile = modelDir.resolve(prefix + ".gguf");
if (Files.isRegularFile(modelFile)) {
return modelFile;
}
}
}
return null;
}
/** {@inheritDoc} */
@Override
public void close() {
if (handle == -1) {
return;
}
LlamaLibrary.delete(handle);
handle = -1;
super.close();
}
}
|
0
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama/engine/LlamaTranslator.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.llama.engine;
import ai.djl.inference.streaming.IteratorBytesSupplier;
import ai.djl.llama.jni.InputParameters;
import ai.djl.llama.jni.LlamaLibrary;
import ai.djl.llama.jni.Token;
import ai.djl.llama.jni.TokenIterator;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.ndarray.BytesSupplier;
import ai.djl.ndarray.NDList;
import ai.djl.translate.NoBatchifyTranslator;
import ai.djl.translate.TranslatorContext;
import ai.djl.util.JsonUtils;
import java.util.Iterator;
/** Built-in {@code Translator} that provides preprocessing and postprocessing for llama.cpp. */
public class LlamaTranslator<I, O> implements NoBatchifyTranslator<I, O> {
private long handle;
/** {@inheritDoc} */
@Override
public void prepare(TranslatorContext ctx) {
LlamaModel model = (LlamaModel) ctx.getModel();
handle = model.getHandle();
}
/** {@inheritDoc} */
@Override
public NDList processInput(TranslatorContext ctx, I input) {
if (input instanceof String) {
ctx.setAttachment("out", generate((String) input));
} else if (input instanceof LlamaInput) {
ctx.setAttachment("out", generate((LlamaInput) input));
} else if (input instanceof Input) {
String prompt = ((Input) input).getData().getAsString();
TokenIterator it = generate(prompt);
Output output = new Output();
output.add(new IteratorBytesSupplier(new OutputIterator(it)));
ctx.setAttachment("out", output);
}
return new NDList();
}
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public O processOutput(TranslatorContext ctx, NDList list) {
return (O) ctx.getAttachment("out");
}
private TokenIterator generate(String input) {
LlamaInput in = JsonUtils.GSON.fromJson(input, LlamaInput.class);
return generate(in);
}
private TokenIterator generate(LlamaInput in) {
InputParameters param = in.getParameters().toInputParameters();
String prefix = in.getPrefix();
String suffix = in.getSuffix();
String inputs = in.getInputs();
if (prefix != null && suffix != null) {
LlamaLibrary.infill(handle, prefix, prefix, param);
} else if (inputs != null && !inputs.isEmpty()) {
LlamaLibrary.generate(handle, inputs, param);
} else {
throw new IllegalArgumentException("Unsupported input format");
}
return new TokenIterator(handle);
}
private static final class OutputIterator implements Iterator<BytesSupplier> {
private TokenIterator it;
public OutputIterator(TokenIterator it) {
this.it = it;
}
/** {@inheritDoc} */
@Override
public boolean hasNext() {
return it.hasNext();
}
/** {@inheritDoc} */
@Override
public BytesSupplier next() {
Token token = it.next();
return BytesSupplier.wrap(JsonUtils.GSON.toJson(token) + "\n");
}
}
}
|
0
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama/engine/LlamaTranslatorFactory.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.llama.engine;
import ai.djl.Model;
import ai.djl.llama.jni.TokenIterator;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorFactory;
import ai.djl.util.Pair;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/** A {@link TranslatorFactory} that creates a {@link LlamaTranslator} instance. */
public class LlamaTranslatorFactory implements TranslatorFactory, Serializable {
private static final long serialVersionUID = 1L;
private static final Set<Pair<Type, Type>> SUPPORTED_TYPES = new HashSet<>();
static {
SUPPORTED_TYPES.add(new Pair<>(String.class, TokenIterator.class));
SUPPORTED_TYPES.add(new Pair<>(LlamaInput.class, TokenIterator.class));
SUPPORTED_TYPES.add(new Pair<>(Input.class, Output.class));
}
/** {@inheritDoc} */
@Override
public Set<Pair<Type, Type>> getSupportedTypes() {
return SUPPORTED_TYPES;
}
/** {@inheritDoc} */
@Override
public boolean isSupported(Class<?> input, Class<?> output) {
return true;
}
/** {@inheritDoc} */
@Override
public <I, O> Translator<I, O> newInstance(
Class<I> input, Class<O> output, Model model, Map<String, ?> arguments) {
return new LlamaTranslator<>();
}
}
|
0
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama/engine/package-info.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes to interface with the underlying Llama Engine. */
package ai.djl.llama.engine;
|
0
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama/jni/InputParameters.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.llama.jni;
import java.util.Map;
/** A class holds input parameters. */
@SuppressWarnings({"PMD.UnusedPrivateField", "PMD.UnusedAssignment"})
public class InputParameters {
private int nPredict;
private int nKeep;
private int nProbs;
private int topK;
private float topP;
private float tfsZ;
private float typicalP;
private float temperature;
private float repeatPenalty;
private int repeatLastN;
private float frequencyPenalty;
private float presencePenalty;
private boolean penalizeNl;
private boolean ignoreEos;
private int mirostat;
private float mirostatTau;
private float mirostatEta;
private int nBeams;
private int seed;
private Map<Integer, Float> logitBias;
private String grammar;
private String[] antiPrompt;
/**
* Constructs new {@code InputParameters} instance.
*
* @param nPredict the max new tokens
* @param nKeep the number of keep
* @param nProbs the number of probabilities
* @param topK the top K
* @param topP the top P
* @param tfsZ the tfs Z
* @param typicalP the typical P
* @param temperature the temperature
* @param repeatPenalty the repeat penalty
* @param repeatLastN the repeat last N
* @param frequencyPenalty the frequency penalty
* @param presencePenalty the presence penalty
* @param penalizeNl the penalize nl
* @param ignoreEos the ignore EOS
* @param mirostat the mirostat
* @param mirostatTau the mirostat TAU
* @param mirostatEta the mirostat ETA
* @param nBeams the number of beams
* @param seed the seed
* @param logitBias the logit bias
* @param grammar the grammar
* @param antiPrompt the anti prompt
*/
public InputParameters(
int nPredict,
int nKeep,
int nProbs,
int topK,
float topP,
float tfsZ,
float typicalP,
float temperature,
float repeatPenalty,
int repeatLastN,
float frequencyPenalty,
float presencePenalty,
boolean penalizeNl,
boolean ignoreEos,
int mirostat,
float mirostatTau,
float mirostatEta,
int nBeams,
int seed,
Map<Integer, Float> logitBias,
String grammar,
String[] antiPrompt) {
this.nPredict = nPredict;
this.nKeep = nKeep;
this.nProbs = nProbs;
this.topK = topK;
this.topP = topP;
this.tfsZ = tfsZ;
this.typicalP = typicalP;
this.temperature = temperature;
this.repeatPenalty = repeatPenalty;
this.repeatLastN = repeatLastN;
this.frequencyPenalty = frequencyPenalty;
this.presencePenalty = presencePenalty;
this.penalizeNl = penalizeNl;
this.ignoreEos = ignoreEos;
this.mirostat = mirostat;
this.mirostatTau = mirostatTau;
this.mirostatEta = mirostatEta;
this.nBeams = nBeams;
this.seed = seed;
this.logitBias = logitBias;
this.grammar = grammar;
this.antiPrompt = antiPrompt;
}
/**
* Returns the max new tokens.
*
* @return the max new tokens
*/
public int getMaxNewTokens() {
return nPredict;
}
/**
* Returns the number of keep.
*
* @return the number of keep
*/
public int getNumberKeep() {
return nKeep;
}
/**
* Returns the number of probabilities.
*
* @return the number of probabilities
*/
public int getNumberProbabilities() {
return nProbs;
}
/**
* Returns the top K.
*
* @return the top K
*/
public int getTopK() {
return topK;
}
/**
* Return the top P.
*
* @return the top P
*/
public float getTopP() {
return topP;
}
/**
* Return the TfsZ.
*
* @return the TfsZ
*/
public float getTfsZ() {
return tfsZ;
}
/**
* Return the typical P.
*
* @return the typical P
*/
public float getTypicalP() {
return typicalP;
}
/**
* Return the temperature.
*
* @return the temperature
*/
public float getTemperature() {
return temperature;
}
/**
* Return the repeat penalty.
*
* @return the repeat penalty
*/
public float getRepeatPenalty() {
return repeatPenalty;
}
/**
* Return the repeat last N.
*
* @return the repeat last N
*/
public int getRepeatLastN() {
return repeatLastN;
}
/**
* Return the frequency penalty.
*
* @return the frequency penalty
*/
public float getFrequencyPenalty() {
return frequencyPenalty;
}
/**
* Return the presence penalty.
*
* @return the presence penalty
*/
public float getPresencePenalty() {
return presencePenalty;
}
/**
* Return the penalize NL.
*
* @return the penalize NL
*/
public boolean isPenalizeNl() {
return penalizeNl;
}
/**
* Returns {@code true} if ignore EOS.
*
* @return {@code true} if ignore EOS
*/
public boolean isIgnoreEos() {
return ignoreEos;
}
/**
* Returns the mirostat.
*
* @return the mirostat
*/
public int getMirostat() {
return mirostat;
}
/**
* Returns the mirostat TAU.
*
* @return the mirostat TAU
*/
public float getMirostatTau() {
return mirostatTau;
}
/**
* Returns the mirostat ETA.
*
* @return the mirostat ETA
*/
public float getMirostatEta() {
return mirostatEta;
}
/**
* Returns the number of beams.
*
* @return the number of beams
*/
public int getNumberBeams() {
return nBeams;
}
/**
* Returns the seed.
*
* @return the seed
*/
public int getSeed() {
return seed;
}
/**
* Returns the logit bias.
*
* @return the logit bias
*/
public Map<Integer, Float> getLogitBias() {
return logitBias;
}
/**
* Returns the grammar template.
*
* @return the grammar template
*/
public String getGrammar() {
return grammar;
}
/**
* Returns the anti-prompt.
*
* @return the anti-prompt
*/
public String[] getAntiPrompt() {
return antiPrompt;
}
}
|
0
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama/jni/LibUtils.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.llama.jni;
import ai.djl.util.ClassLoaderUtils;
import ai.djl.util.Platform;
import ai.djl.util.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.List;
/** Utilities for finding the llama.cpp native binary on the System. */
public final class LibUtils {
private static final Logger logger = LoggerFactory.getLogger(LibUtils.class);
private static final String LIB_NAME = System.mapLibraryName("djl_llama");
private static final String LLAMA_NAME = System.mapLibraryName("llama");
private LibUtils() {}
/** Loads llama.cpp native library. */
public static void loadLibrary() {
List<String> libs = new ArrayList<>(3);
libs.add(LLAMA_NAME);
libs.add(LIB_NAME);
if (System.getProperty("os.name").startsWith("Mac")) {
libs.add("ggml-metal.metal");
}
Path dir = copyJniLibraryFromClasspath(libs.toArray(new String[0]));
logger.debug("Loading llama.cpp library from: {}", dir);
for (int i = 0; i < 2; ++i) {
String lib = libs.get(i);
String path = dir.resolve(lib).toString();
logger.debug("Loading native library: {}", path);
String nativeHelper = System.getProperty("ai.djl.llama.native_helper");
if (nativeHelper != null && !nativeHelper.isEmpty()) {
ClassLoaderUtils.nativeLoad(nativeHelper, path);
} else {
System.load(path); // NOPMD
}
}
}
private static Path copyJniLibraryFromClasspath(String... libs) {
Path cacheDir = Utils.getEngineCacheDir("llama");
Platform platform = Platform.detectPlatform("llama");
String classifier = platform.getClassifier();
String version = platform.getVersion();
Path dir = cacheDir.resolve(version + '-' + classifier);
Path path = dir.resolve(LIB_NAME);
logger.debug("Using cache dir: {}", dir);
if (Files.exists(path)) {
return dir.toAbsolutePath();
}
Path tmp = null;
try {
Files.createDirectories(cacheDir);
tmp = Files.createTempDirectory(cacheDir, "tmp");
for (String libName : libs) {
String libPath = "native/lib/" + classifier + "/" + libName;
logger.info("Extracting {} to cache ...", libPath);
try (InputStream is = ClassLoaderUtils.getResourceAsStream(libPath)) {
Path target = tmp.resolve(libName);
Files.copy(is, target, StandardCopyOption.REPLACE_EXISTING);
}
}
Utils.moveQuietly(tmp, dir);
return dir.toAbsolutePath();
} catch (IOException e) {
throw new IllegalStateException("Cannot copy jni files", e);
} finally {
if (tmp != null) {
Utils.deleteQuietly(tmp);
}
}
}
}
|
0
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama/jni/LlamaLibrary.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.llama.jni;
/** Native library for llama.cpp. */
@SuppressWarnings("MissingJavadocMethod")
public final class LlamaLibrary {
private LlamaLibrary() {}
public static native long loadModel(String filePath, ModelParameters param);
public static native void generate(long handle, String prompt, InputParameters param);
public static native void infill(
long handle, String prefix, String suffix, InputParameters param);
public static native Token getNext(long handle, long count, long pos);
public static native float[] embed(long handle, String prompt);
public static native int[] encode(long handle, String prompt);
public static native byte[] decodeBytes(long handle, int[] tokens);
public static native void delete(long handle);
}
|
0
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama/jni/ModelParameters.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.llama.jni;
import java.util.Map;
/** A class holds llama.cpp model loading parameters. */
@SuppressWarnings("PMD.SingularField")
public final class ModelParameters {
private int nThreads;
private int nCtx;
private int nBatch;
private int nGpuLayers;
private int mainGpu;
private float ropeFreqBase;
private float ropeFreqScale;
private boolean mulMatQ;
private boolean f16Kv;
private boolean logitsAll;
private boolean vocabOnly;
private boolean useMmap;
private boolean useMlock;
private boolean embedding;
private boolean memoryF16;
private boolean memTest;
private boolean numa;
private boolean verbosePrompt;
private float[] tensorSplit;
private String loraAdapter;
private String loraBase;
/**
* Constructs a new {@code ModelParameters} instance.
*
* @param options the model loading options
*/
public ModelParameters(Map<String, ?> options) {
nThreads = intValue(options, "number_threads", Runtime.getRuntime().availableProcessors());
nCtx = intValue(options, "max_context_length", 512);
nBatch = intValue(options, "max_rolling_batch", 512);
nGpuLayers = intValue(options, "number_gpu_layers", -1);
mainGpu = intValue(options, "tensor_parallel_degree", 0);
ropeFreqBase = floatValue(options, "rope_freq_base");
ropeFreqScale = floatValue(options, "ropeFreqScale");
f16Kv = booleanValue(options, "f16_kv");
mulMatQ = booleanValue(options, "mulmat_q", true);
logitsAll = booleanValue(options, "logits_all");
vocabOnly = booleanValue(options, "vocab_only");
useMmap = booleanValue(options, "use_mmap", true);
useMlock = booleanValue(options, "use_mlock");
embedding = booleanValue(options, "embedding");
memoryF16 = booleanValue(options, "memory_f16", true);
memTest = booleanValue(options, "mem_test");
numa = booleanValue(options, "numa");
verbosePrompt = booleanValue(options, "verbose_prompt");
String val = stringValue(options, "tensor_split");
if (val != null && !val.isEmpty()) {
String[] tokens = val.split(",");
tensorSplit = new float[tokens.length];
for (int i = 0; i < tokens.length; ++i) {
tensorSplit[i] = Float.parseFloat(tokens[i].trim());
}
}
loraAdapter = stringValue(options, "lora_adapter");
loraBase = stringValue(options, "loraBase");
}
private static int intValue(Map<String, ?> arguments, String key, int def) {
Object value = arguments.get(key);
if (value == null) {
return def;
}
return (int) Double.parseDouble(value.toString());
}
private static float floatValue(Map<String, ?> arguments, String key) {
Object value = arguments.get(key);
if (value == null) {
return 0f;
}
return (float) Double.parseDouble(value.toString());
}
private static boolean booleanValue(Map<String, ?> arguments, String key) {
return booleanValue(arguments, key, false);
}
private static boolean booleanValue(Map<String, ?> arguments, String key, boolean def) {
Object value = arguments.get(key);
if (value == null) {
return def;
}
return Boolean.parseBoolean(value.toString());
}
private static String stringValue(Map<String, ?> arguments, String key) {
Object value = arguments.get(key);
if (value == null) {
return null;
}
return value.toString();
}
}
|
0
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama/jni/Token.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.llama.jni;
import ai.djl.util.JsonUtils;
import java.nio.charset.StandardCharsets;
import java.util.Map;
/** The output token class. */
public final class Token {
private int token;
private String text;
private Map<Integer, Float> probabilities;
transient long count;
transient long pos;
transient boolean hasNext;
/**
* Constructs a new {@code Token} instance.
*
* @param token the token id
* @param generated the token text
* @param probabilities the token probabilities
* @param count the generated token count
* @param pos the token index
* @param hasNext has more tokens
*/
public Token(
int token,
byte[] generated,
Map<Integer, Float> probabilities,
long count,
long pos,
boolean hasNext) {
this.token = token;
this.text = new String(generated, StandardCharsets.UTF_8);
this.probabilities = probabilities;
this.count = count;
this.pos = pos;
this.hasNext = hasNext;
}
/**
* Returns the token id.
*
* @return the token id
*/
public int getToken() {
return token;
}
/**
* Returns the token text.
*
* @return the token text
*/
public String getText() {
return text;
}
/**
* Returns the token probabilities.
*
* @return the token probabilities
*/
public Map<Integer, Float> getProbabilities() {
return probabilities;
}
/** {@inheritDoc} */
@Override
public String toString() {
return JsonUtils.GSON.toJson(this) + '\n';
}
}
|
0
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama/jni/TokenIterator.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.llama.jni;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.concurrent.atomic.AtomicBoolean;
/** A iterator class holds generated tokens. */
public class TokenIterator implements Iterator<Token> {
private static final Logger logger = LoggerFactory.getLogger(TokenIterator.class);
private static AtomicBoolean active = new AtomicBoolean();
private long handle;
private long count;
private long pos;
private boolean hasNext;
/**
* Constructs a new {@code TokenIterator} instance.
*
* @param handle the llama.cpp handle
*/
public TokenIterator(long handle) {
this.handle = handle;
hasNext = true;
if (!active.compareAndSet(false, true)) {
active.set(true);
logger.warn("Previous inference has been reset");
}
}
/** {@inheritDoc} */
@Override
public boolean hasNext() {
return hasNext;
}
/** {@inheritDoc} */
@Override
public Token next() {
if (!hasNext) {
throw new NoSuchElementException();
}
Token token = LlamaLibrary.getNext(handle, count, pos);
count = token.count;
pos = token.pos;
hasNext = token.hasNext;
if (!hasNext) {
active.set(false);
}
return token;
}
}
|
0
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama/jni/package-info.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes to interface with the native llama.cpp code. */
package ai.djl.llama.jni;
|
0
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama/zoo/LlamaModelZoo.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.llama.zoo;
import ai.djl.Application;
import ai.djl.repository.Repository;
import ai.djl.repository.zoo.ModelLoader;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.util.ClassLoaderUtils;
import ai.djl.util.JsonUtils;
import ai.djl.util.Utils;
import com.google.gson.reflect.TypeToken;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.io.Writer;
import java.lang.reflect.Type;
import java.net.URI;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Duration;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.zip.GZIPInputStream;
/** LlamaModelZoo is a repository that contains llama.cpp models. */
public class LlamaModelZoo extends ModelZoo {
private static final Logger logger = LoggerFactory.getLogger(LlamaModelZoo.class);
private static final String REPO = "https://mlrepo.djl.ai/";
private static final Repository REPOSITORY = Repository.newInstance("gguf", REPO);
private static final String GROUP_ID = "ai.djl.huggingface.gguf";
private static final long ONE_DAY = Duration.ofDays(1).toMillis();
private volatile boolean initialized; // NOPMD
LlamaModelZoo() {}
/** {@inheritDoc} */
@Override
public String getGroupId() {
return GROUP_ID;
}
/** {@inheritDoc} */
@Override
public Set<String> getSupportedEngines() {
return Collections.singleton("Llama");
}
/** {@inheritDoc} */
@Override
public Collection<ModelLoader> getModelLoaders() {
init();
return super.getModelLoaders();
}
/** {@inheritDoc} */
@Override
public ModelLoader getModelLoader(String name) {
init();
return super.getModelLoader(name);
}
private void init() {
if (!initialized) {
synchronized (LlamaModelZoo.class) {
if (!initialized) {
Application app = Application.NLP.TEXT_GENERATION;
Map<String, ModelDetail> map = listModels(app);
for (Map.Entry<String, ModelDetail> entry : map.entrySet()) {
String artifactId = entry.getKey();
Map<String, Object> gguf = entry.getValue().getGguf();
if (gguf != null) {
for (String key : gguf.keySet()) {
addModel(REPOSITORY.model(app, GROUP_ID, artifactId, "0.0.1", key));
}
}
}
initialized = true;
}
}
}
}
private Map<String, ModelDetail> listModels(Application app) {
try {
String path = "model/" + app.getPath() + "/ai/djl/huggingface/gguf/";
Path dir = Utils.getCacheDir().resolve("cache/repo/" + path);
if (Files.notExists(dir)) {
Files.createDirectories(dir);
} else if (!Files.isDirectory(dir)) {
logger.warn("Failed initialize cache directory: {}", dir);
return Collections.emptyMap();
}
Type type = new TypeToken<Map<String, ModelDetail>>() {}.getType();
Path file = dir.resolve("models.json");
if (Files.exists(file)) {
long lastModified = Files.getLastModifiedTime(file).toMillis();
if (Utils.isOfflineMode() || System.currentTimeMillis() - lastModified < ONE_DAY) {
try (Reader reader = Files.newBufferedReader(file)) {
return JsonUtils.GSON.fromJson(reader, type);
}
}
}
URL url = URI.create(REPO).resolve(path + "models.json.gz").toURL();
Path tmp = Files.createTempFile(dir, "models", ".tmp");
try (GZIPInputStream gis = new GZIPInputStream(Utils.openUrl(url))) {
String json = Utils.toString(gis);
try (Writer writer = Files.newBufferedWriter(tmp)) {
writer.write(json);
}
Utils.moveQuietly(tmp, file);
return JsonUtils.GSON.fromJson(json, type);
} catch (IOException e) {
logger.warn("Failed to download Huggingface gguf index: {}", app);
if (Files.exists(file)) {
try (Reader reader = Files.newBufferedReader(file)) {
return JsonUtils.GSON.fromJson(reader, type);
}
}
String resource = app.getPath() + "/" + GROUP_ID + ".json";
try (InputStream is = ClassLoaderUtils.getResourceAsStream(resource)) {
String json = Utils.toString(is);
try (Writer writer = Files.newBufferedWriter(tmp)) {
writer.write(json);
}
Utils.moveQuietly(tmp, file);
return JsonUtils.GSON.fromJson(json, type);
}
} finally {
Utils.deleteQuietly(tmp);
}
} catch (IOException e) {
logger.warn("Failed load gguf index file", e);
}
return Collections.emptyMap();
}
private static final class ModelDetail {
private Map<String, Object> gguf;
public Map<String, Object> getGguf() {
return gguf;
}
public void setGguf(Map<String, Object> gguf) {
this.gguf = gguf;
}
}
}
|
0
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama/zoo/LlamaZooProvider.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.llama.zoo;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooProvider;
/**
* An Huggingface llama.cpp model zoo provider implements the {@link
* ai.djl.repository.zoo.ZooProvider} interface.
*/
public class LlamaZooProvider implements ZooProvider {
/** {@inheritDoc} */
@Override
public ModelZoo getModelZoo() {
return new LlamaModelZoo();
}
}
|
0
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama
|
java-sources/ai/djl/llama/llama/0.28.0/ai/djl/llama/zoo/package-info.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains the built-in {@link ai.djl.llama.zoo.LlamaModelZoo}. */
package ai.djl.llama.zoo;
|
0
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml/lightgbm/LgbmDataset.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.ml.lightgbm;
import ai.djl.ml.lightgbm.jni.JniUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrayAdapter;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import com.microsoft.ml.lightgbm.SWIGTYPE_p_p_void;
import java.nio.ByteBuffer;
import java.nio.file.Path;
import java.util.concurrent.atomic.AtomicReference;
/** A special {@link NDArray} used by LightGBM for training models. */
public class LgbmDataset extends NDArrayAdapter {
private AtomicReference<SWIGTYPE_p_p_void> handle;
// Track Dataset source for inference calls
private SrcType srcType;
private Path srcFile;
private NDArray srcArray;
LgbmDataset(NDManager manager, NDManager alternativeManager, LgbmNDArray array) {
super(
manager,
alternativeManager,
array.getShape(),
array.getDataType(),
NDManager.nextUid());
srcType = SrcType.ARRAY;
srcArray = array;
handle = new AtomicReference<>();
}
LgbmDataset(NDManager manager, NDManager alternativeManager, Path file) {
super(manager, alternativeManager, null, DataType.FLOAT32, NDManager.nextUid());
srcType = SrcType.FILE;
srcFile = file;
handle = new AtomicReference<>();
}
/**
* Gets the native LightGBM Dataset pointer.
*
* @return the pointer
*/
public SWIGTYPE_p_p_void getHandle() {
SWIGTYPE_p_p_void pointer = handle.get();
if (pointer == null) {
synchronized (this) {
switch (getSrcType()) {
case FILE:
handle.set(JniUtils.datasetFromFile(getSrcFile().toString()));
break;
case ARRAY:
handle.set(JniUtils.datasetFromArray(getSrcArrayConverted()));
break;
default:
throw new IllegalArgumentException("Unexpected SrcType");
}
}
}
return pointer;
}
/** {@inheritDoc} */
@Override
public Shape getShape() {
if (shape == null) {
shape =
new Shape(
JniUtils.datasetGetRows(handle.get()),
JniUtils.datasetGetCols(handle.get()));
}
return shape;
}
/**
* Returns the type of source data for the {@link LgbmDataset}.
*
* @return the type of source data for the {@link LgbmDataset}
*/
public SrcType getSrcType() {
return srcType;
}
/**
* Returns the file used to create this (if applicable).
*
* @return the file used to create this (if applicable)
*/
public Path getSrcFile() {
return srcFile;
}
/**
* Returns the array used to create this (if applicable).
*
* @return the array used to create this (if applicable)
*/
public NDArray getSrcArray() {
return srcArray;
}
/**
* Returns the array used to create this (if applicable) converted into an {@link LgbmNDArray}.
*
* @return the array used to create this (if applicable) converted into an {@link LgbmNDArray}
*/
public LgbmNDArray getSrcArrayConverted() {
NDArray a = getSrcArray();
if (a instanceof LgbmNDArray) {
return (LgbmNDArray) a;
} else {
return new LgbmNDArray(
manager, alternativeManager, a.toByteBuffer(), a.getShape(), a.getDataType());
}
}
/** {@inheritDoc} */
@Override
public void intern(NDArray replaced) {
throw new UnsupportedOperationException("Not supported by the LgbmDataset yet");
}
/** {@inheritDoc} */
@Override
public void detach() {
manager.detachInternal(getUid());
manager = LgbmNDManager.getSystemManager();
}
/** {@inheritDoc} */
@Override
public ByteBuffer toByteBuffer(boolean tryDirect) {
throw new UnsupportedOperationException("Not supported by the LgbmDataset yet");
}
/** {@inheritDoc} */
@Override
public void close() {
SWIGTYPE_p_p_void pointer = handle.getAndSet(null);
if (pointer != null) {
JniUtils.freeDataset(pointer);
}
}
/** The type of data used to create the {@link LgbmDataset}. */
public enum SrcType {
FILE,
ARRAY
}
}
|
0
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml/lightgbm/LgbmEngine.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.ml.lightgbm;
import ai.djl.Device;
import ai.djl.Model;
import ai.djl.engine.Engine;
import ai.djl.engine.EngineException;
import ai.djl.ml.lightgbm.jni.LibUtils;
import ai.djl.ndarray.NDManager;
import java.io.IOException;
/**
* The {@code LgbmEngine} is an implementation of the {@link Engine} based on the <a
* href="https://github.com/microsoft/LightGBM">LightGBM</a>.
*
* <p>To get an instance of the {@code LgbmEngine} when it is not the default Engine, call {@link
* Engine#getEngine(String)} with the Engine name "LightGBM".
*/
public final class LgbmEngine extends Engine {
public static final String ENGINE_NAME = "LightGBM";
public static final String ENGINE_VERSION = "3.2.110";
static final int RANK = 10;
private Engine alternativeEngine;
private boolean initialized;
private LgbmEngine() {
try {
LibUtils.loadNative();
} catch (IOException e) {
throw new EngineException("Failed to initialize LightGBMEngine", e);
}
}
static Engine newInstance() {
return new LgbmEngine();
}
/** {@inheritDoc} */
@Override
public Engine getAlternativeEngine() {
if (!initialized && !Boolean.getBoolean("ai.djl.lightgbm.disable_alternative")) {
Engine engine = Engine.getInstance();
if (engine.getRank() < getRank()) {
// alternativeEngine should not have the same rank as OnnxRuntime
alternativeEngine = engine;
}
initialized = true;
}
return alternativeEngine;
}
/** {@inheritDoc} */
@Override
public String getEngineName() {
return ENGINE_NAME;
}
/** {@inheritDoc} */
@Override
public int getRank() {
return RANK;
}
/** {@inheritDoc} */
@Override
public String getVersion() {
return ENGINE_VERSION;
}
/** {@inheritDoc} */
@Override
public boolean hasCapability(String capability) {
return false;
}
/** {@inheritDoc} */
@Override
public Model newModel(String name, Device device) {
return new LgbmModel(name, newBaseManager(device));
}
/** {@inheritDoc} */
@Override
public NDManager newBaseManager() {
return newBaseManager(null);
}
/** {@inheritDoc} */
@Override
public NDManager newBaseManager(Device device) {
return LgbmNDManager.getSystemManager().newSubManager(device);
}
}
|
0
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml/lightgbm/LgbmEngineProvider.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.ml.lightgbm;
import ai.djl.engine.Engine;
import ai.djl.engine.EngineProvider;
/** {@code LgbmEngineProvider} is the LightGBM implementation of {@link EngineProvider}. */
public class LgbmEngineProvider implements EngineProvider {
/** {@inheritDoc} */
@Override
public String getEngineName() {
return LgbmEngine.ENGINE_NAME;
}
/** {@inheritDoc} */
@Override
public int getEngineRank() {
return LgbmEngine.RANK;
}
/** {@inheritDoc} */
@Override
public Engine getEngine() {
return InstanceHolder.INSTANCE;
}
private static class InstanceHolder {
static final Engine INSTANCE = LgbmEngine.newInstance();
}
}
|
0
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml/lightgbm/LgbmModel.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.ml.lightgbm;
import ai.djl.BaseModel;
import ai.djl.Model;
import ai.djl.ml.lightgbm.jni.JniUtils;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.translate.ArgumentsUtil;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Map;
/** {@code LgbmModel} is the LightGBM implementation of {@link Model}. */
public class LgbmModel extends BaseModel {
/**
* Constructs a new Model on a given device.
*
* @param modelName the model name
* @param manager the {@link NDManager} to holds the NDArray
*/
LgbmModel(String modelName, NDManager manager) {
super(modelName);
dataType = DataType.FLOAT32;
this.manager = manager;
manager.setName("LgbmModel");
}
/** {@inheritDoc} */
@Override
public void load(Path modelPath, String prefix, Map<String, ?> options) throws IOException {
setModelDir(modelPath);
wasLoaded = true;
if (block != null) {
throw new UnsupportedOperationException("LightGBM does not support dynamic blocks");
}
Path modelFile = findModelFile(prefix);
if (modelFile == null) {
modelFile = findModelFile(modelDir.toFile().getName());
if (modelFile == null) {
throw new FileNotFoundException(".json file not found in: " + modelPath);
}
}
block = JniUtils.loadModel((LgbmNDManager) manager, modelFile.toAbsolutePath().toString());
if (options != null) {
String inferenceType = ArgumentsUtil.stringValue(options, "inference_type");
if (inferenceType != null) {
setInferenceType(inferenceType);
}
}
}
private Path findModelFile(String prefix) {
if (Files.isRegularFile(modelDir)) {
Path file = modelDir;
modelDir = modelDir.getParent();
String fileName = file.toFile().getName();
if (fileName.endsWith(".txt")) {
modelName = fileName.substring(0, fileName.length() - 4);
} else {
modelName = fileName;
}
return file;
}
if (prefix == null) {
prefix = modelName;
}
Path modelFile = modelDir.resolve(prefix);
if (Files.notExists(modelFile) || !Files.isRegularFile(modelFile)) {
if (prefix.endsWith(".txt")) {
return null;
}
modelFile = modelDir.resolve(prefix + ".txt");
if (Files.notExists(modelFile) || !Files.isRegularFile(modelFile)) {
return null;
}
}
return modelFile;
}
/** {@inheritDoc} */
@Override
public void close() {
if (block != null) {
((LgbmSymbolBlock) block).close();
block = null;
}
super.close();
}
/**
* Sets the inference type of the model based on the given string.
*
* <p>Supported inference types include NORMAL, RAW_SCORE, LEAF_INDEX, CONTRIB.
*
* @param inferenceType the string representing the inference type
* @throws IllegalArgumentException if the given inference type is not supported
*/
public void setInferenceType(String inferenceType) {
((LgbmSymbolBlock) block).setInferenceType(inferenceType);
}
/**
* Returns the string representation of the current model inference type.
*
* @return the string representation of the current inference type
* @throws IllegalStateException if the current inference type is unknown
*/
public String getInferenceType() {
return ((LgbmSymbolBlock) block).getInferenceType();
}
}
|
0
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml/lightgbm/LgbmNDArray.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.ml.lightgbm;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrayAdapter;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.ndarray.types.SparseFormat;
import com.microsoft.ml.lightgbm.SWIGTYPE_p_double;
import com.microsoft.ml.lightgbm.SWIGTYPE_p_float;
import com.microsoft.ml.lightgbm.SWIGTYPE_p_void;
import com.microsoft.ml.lightgbm.lightgbmlib;
import com.microsoft.ml.lightgbm.lightgbmlibConstants;
import java.nio.ByteBuffer;
import java.nio.DoubleBuffer;
import java.nio.FloatBuffer;
import java.util.concurrent.atomic.AtomicReference;
/** {@code LgbmNDArray} is the LightGBM implementation of {@link NDArray}. */
public class LgbmNDArray extends NDArrayAdapter {
private ByteBuffer data;
private SparseFormat format;
private AtomicReference<SWIGTYPE_p_void> handle;
private int typeConstant;
private AtomicReference<SWIGTYPE_p_float> floatDataRef;
private AtomicReference<SWIGTYPE_p_double> doubleDataRef;
LgbmNDArray(
NDManager manager,
NDManager alternativeManager,
ByteBuffer data,
Shape shape,
DataType dataType) {
super(manager, alternativeManager, shape, dataType, NDManager.nextUid());
this.data = data;
this.format = SparseFormat.DENSE;
manager.attachInternal(uid, this);
handle = new AtomicReference<>();
floatDataRef = new AtomicReference<>();
doubleDataRef = new AtomicReference<>();
}
/** {@inheritDoc} */
@Override
public void returnResource(NDManager manager) {
detach();
this.manager = manager;
manager.attachUncappedInternal(getUid(), this);
}
/**
* Returns the native LightGBM handle to the array.
*
* @return the native LightGBM handle to the array
*/
public SWIGTYPE_p_void getHandle() {
if (handle.get() == null) {
if (shape.dimension() != 2) {
throw new IllegalArgumentException(
"The LightGBM operation can only be performed with a 2-dimensional matrix,"
+ " but was passed an NDArray with "
+ shape.dimension()
+ " dimensions");
}
int size = Math.toIntExact(size());
if (getDataType() == DataType.FLOAT32) {
typeConstant = lightgbmlibConstants.C_API_DTYPE_FLOAT32;
FloatBuffer d1 = toByteBuffer().asFloatBuffer();
floatDataRef.set(lightgbmlib.new_floatArray(size));
for (int i = 0; i < size; i++) {
lightgbmlib.floatArray_setitem(floatDataRef.get(), i, d1.get(i));
}
handle.set(lightgbmlib.float_to_voidp_ptr(floatDataRef.get()));
} else if (getDataType() == DataType.FLOAT64) {
typeConstant = lightgbmlibConstants.C_API_DTYPE_FLOAT64;
DoubleBuffer d1 = toByteBuffer().asDoubleBuffer();
doubleDataRef.set(lightgbmlib.new_doubleArray(size));
for (int i = 0; i < size; i++) {
lightgbmlib.doubleArray_setitem(doubleDataRef.get(), i, d1.get(i));
}
handle.set(lightgbmlib.double_to_voidp_ptr(doubleDataRef.get()));
} else {
throw new IllegalArgumentException(
"The LightGBM operation can only be performed with a Float32 or Float64"
+ " array, but was given a "
+ getDataType());
}
}
return handle.get();
}
/**
* Returns the number of data rows (assuming a 2D matrix).
*
* @return the number of data rows (assuming a 2D matrix)
*/
public int getRows() {
return Math.toIntExact(shape.get(0));
}
/**
* Returns the number of data cols (assuming a 2D matrix).
*
* @return the number of data cols (assuming a 2D matrix)
*/
public int getCols() {
return Math.toIntExact(shape.get(1));
}
/**
* Returns the LightGBM type constant of the array.
*
* @return the LightGBM type constant of the array
*/
public int getTypeConstant() {
return typeConstant;
}
/** {@inheritDoc} */
@Override
public SparseFormat getSparseFormat() {
return format;
}
/** {@inheritDoc} */
@Override
public ByteBuffer toByteBuffer(boolean tryDirect) {
if (data == null) {
throw new UnsupportedOperationException("Cannot obtain value from DMatrix");
}
data.rewind();
return data;
}
/** {@inheritDoc} */
@Override
public void intern(NDArray replaced) {
LgbmNDArray array = (LgbmNDArray) replaced;
SWIGTYPE_p_float floatData = floatDataRef.getAndSet(array.floatDataRef.getAndSet(null));
if (floatData != null) {
lightgbmlib.delete_floatArray(floatData);
}
SWIGTYPE_p_double doubleData = doubleDataRef.getAndSet(array.doubleDataRef.getAndSet(null));
if (doubleData != null) {
lightgbmlib.delete_doubleArray(doubleData);
}
handle.set(array.handle.getAndSet(null));
data = array.data;
format = array.format;
typeConstant = array.typeConstant;
shape = array.shape;
dataType = array.dataType;
replaced.close();
}
/** {@inheritDoc} */
@Override
public void detach() {
manager.detachInternal(getUid());
manager = LgbmNDManager.getSystemManager();
}
/** {@inheritDoc} */
@Override
public void close() {
super.close();
SWIGTYPE_p_float floatData = floatDataRef.getAndSet(null);
if (floatData != null) {
lightgbmlib.delete_floatArray(floatData);
}
SWIGTYPE_p_double doubleData = doubleDataRef.getAndSet(null);
if (doubleData != null) {
lightgbmlib.delete_doubleArray(doubleData);
}
handle.set(null);
data = null;
}
}
|
0
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml/lightgbm/LgbmNDManager.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.ml.lightgbm;
import ai.djl.Device;
import ai.djl.engine.Engine;
import ai.djl.ndarray.BaseNDManager;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.DoubleBuffer;
import java.nio.FloatBuffer;
import java.nio.file.Path;
/** {@code LgbmNDManager} is the LightGBM implementation of {@link NDManager}. */
public class LgbmNDManager extends BaseNDManager {
private static final LgbmNDManager SYSTEM_MANAGER = new SystemManager();
private LgbmNDManager(NDManager parent, Device device) {
super(parent, device);
}
static LgbmNDManager getSystemManager() {
return SYSTEM_MANAGER;
}
/** {@inheritDoc} */
@Override
public ByteBuffer allocateDirect(int capacity) {
return ByteBuffer.allocateDirect(capacity).order(ByteOrder.nativeOrder());
}
/** {@inheritDoc} */
@Override
public LgbmNDArray from(NDArray array) {
if (array == null || array instanceof LgbmNDArray) {
return (LgbmNDArray) array;
}
LgbmNDArray result =
(LgbmNDArray) create(array.toByteBuffer(), array.getShape(), array.getDataType());
result.setName(array.getName());
return result;
}
/** {@inheritDoc} */
@Override
public NDManager newSubManager(Device device) {
LgbmNDManager manager = new LgbmNDManager(this, device);
attachInternal(manager.uid, manager);
return manager;
}
/** {@inheritDoc} */
@Override
public Engine getEngine() {
return Engine.getEngine(LgbmEngine.ENGINE_NAME);
}
/** {@inheritDoc} */
@Override
public NDArray create(Buffer data, Shape shape, DataType dataType) {
if (data instanceof ByteBuffer) {
// output only NDArray
return new LgbmNDArray(this, alternativeManager, (ByteBuffer) data, shape, dataType);
} else if (data instanceof FloatBuffer && dataType == DataType.FLOAT32) {
ByteBuffer bb = allocateDirect(data.capacity() * 4);
bb.asFloatBuffer().put((FloatBuffer) data);
bb.rewind();
return new LgbmNDArray(this, alternativeManager, bb, shape, dataType);
} else if (data instanceof DoubleBuffer && dataType == DataType.FLOAT64) {
ByteBuffer bb = allocateDirect(data.capacity() * 8);
bb.asDoubleBuffer().put((DoubleBuffer) data);
bb.rewind();
return new LgbmNDArray(this, alternativeManager, bb, shape, dataType);
}
if (alternativeManager != null) {
return alternativeManager.create(data, shape, dataType);
}
throw new UnsupportedOperationException(
"LgbmNDArray only supports float32 and float64. Please pass either a ByteBuffer, a"
+ " FloatBuffer with Float32, or a DoubleBuffer with Float64.");
}
/** {@inheritDoc} */
@Override
public NDList load(Path path) {
return new NDList(new LgbmDataset(this, null, path));
}
/** {@inheritDoc} */
@Override
public void close() {
super.close();
if (alternativeManager != null) {
alternativeManager.close();
alternativeManager = null;
}
}
/** The SystemManager is the root {@link LgbmNDManager} of which all others are children. */
private static final class SystemManager extends LgbmNDManager implements SystemNDManager {
SystemManager() {
super(null, null);
}
}
}
|
0
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml/lightgbm/LgbmSymbolBlock.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.ml.lightgbm;
import ai.djl.ml.lightgbm.jni.JniUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.AbstractSymbolBlock;
import ai.djl.nn.ParameterList;
import ai.djl.nn.SymbolBlock;
import ai.djl.training.ParameterStore;
import ai.djl.util.Pair;
import ai.djl.util.PairList;
import com.microsoft.ml.lightgbm.SWIGTYPE_p_p_void;
import com.microsoft.ml.lightgbm.lightgbmlibConstants;
import java.nio.ByteBuffer;
import java.util.concurrent.atomic.AtomicReference;
/** {@code LgbmSymbolBlock} is the LightGBM implementation of {@link SymbolBlock}. */
public class LgbmSymbolBlock extends AbstractSymbolBlock implements AutoCloseable {
private AtomicReference<SWIGTYPE_p_p_void> handle;
private int iterations;
private String uid;
private LgbmNDManager manager;
private int inferenceType;
/**
* Constructs a {@code LgbmSymbolBlock}.
*
* <p>You can create a {@code LgbmSymbolBlock} using {@link
* ai.djl.Model#load(java.nio.file.Path, String)}.
*
* @param manager the manager to use for the block
* @param iterations the number of iterations the model was trained for
* @param handle the Booster handle
*/
@SuppressWarnings("this-escape")
public LgbmSymbolBlock(LgbmNDManager manager, int iterations, SWIGTYPE_p_p_void handle) {
this.handle = new AtomicReference<>(handle);
this.iterations = iterations;
this.manager = manager;
uid = String.valueOf(handle);
manager.attachInternal(uid, this);
this.inferenceType = lightgbmlibConstants.C_API_PREDICT_NORMAL;
}
/** {@inheritDoc} */
@Override
protected NDList forwardInternal(
ParameterStore parameterStore,
NDList inputs,
boolean training,
PairList<String, Object> params) {
NDArray array = inputs.singletonOrThrow();
try (LgbmNDManager sub = (LgbmNDManager) manager.newSubManager()) {
LgbmNDArray lgbmNDArray = sub.from(array);
Pair<Integer, ByteBuffer> result =
JniUtils.inference(getHandle(), iterations, lgbmNDArray, inferenceType);
NDArray ret =
manager.create(
result.getValue(),
new Shape(result.getKey()),
lgbmNDArray.getDataType());
ret.attach(array.getManager());
return new NDList(ret);
}
}
/** {@inheritDoc} */
@Override
public void close() {
SWIGTYPE_p_p_void pointer = handle.getAndSet(null);
if (pointer != null) {
JniUtils.freeModel(pointer);
manager.detachInternal(uid);
manager = null;
}
}
/**
* Gets the native LightGBM Booster pointer.
*
* @return the pointer
*/
public SWIGTYPE_p_p_void getHandle() {
SWIGTYPE_p_p_void pointer = handle.get();
if (pointer == null) {
throw new IllegalStateException("LightGBM model handle has been released!");
}
return pointer;
}
/** {@inheritDoc} */
@Override
public ParameterList getDirectParameters() {
throw new UnsupportedOperationException("Not yet supported");
}
void setInferenceType(String inferenceType) {
switch (inferenceType) {
case "NORMAL":
this.inferenceType = lightgbmlibConstants.C_API_PREDICT_NORMAL;
break;
case "RAW_SCORE":
this.inferenceType = lightgbmlibConstants.C_API_PREDICT_RAW_SCORE;
break;
case "LEAF_INDEX":
this.inferenceType = lightgbmlibConstants.C_API_PREDICT_LEAF_INDEX;
break;
case "CONTRIB":
this.inferenceType = lightgbmlibConstants.C_API_PREDICT_CONTRIB;
break;
default:
throw new IllegalArgumentException(
"Unsupported inference type: "
+ inferenceType
+ ". Supported types include: NORMAL, RAW_SCORE, LEAF_INDEX,"
+ " CONTRIB.");
}
}
String getInferenceType() {
if (inferenceType == lightgbmlibConstants.C_API_PREDICT_NORMAL) {
return "NORMAL";
} else if (inferenceType == lightgbmlibConstants.C_API_PREDICT_RAW_SCORE) {
return "RAW_SCORE";
} else if (inferenceType == lightgbmlibConstants.C_API_PREDICT_LEAF_INDEX) {
return "LEAF_INDEX";
} else if (inferenceType == lightgbmlibConstants.C_API_PREDICT_CONTRIB) {
return "CONTRIB";
} else {
throw new AssertionError("Unexpected inference type: " + inferenceType);
}
}
}
|
0
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml/lightgbm/package-info.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains implementations of interfaces within the DJL API for the LightGBM Engine. */
package ai.djl.ml.lightgbm;
|
0
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml/lightgbm
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml/lightgbm/jni/JniUtils.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.ml.lightgbm.jni;
import ai.djl.engine.EngineException;
import ai.djl.ml.lightgbm.LgbmDataset;
import ai.djl.ml.lightgbm.LgbmNDArray;
import ai.djl.ml.lightgbm.LgbmNDManager;
import ai.djl.ml.lightgbm.LgbmSymbolBlock;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.types.DataType;
import ai.djl.util.Pair;
import com.microsoft.ml.lightgbm.SWIGTYPE_p_double;
import com.microsoft.ml.lightgbm.SWIGTYPE_p_int;
import com.microsoft.ml.lightgbm.SWIGTYPE_p_long_long;
import com.microsoft.ml.lightgbm.SWIGTYPE_p_p_void;
import com.microsoft.ml.lightgbm.lightgbmlib;
import com.microsoft.ml.lightgbm.lightgbmlibConstants;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.DoubleBuffer;
import java.nio.FloatBuffer;
/** DJL class that has access to LightGBM JNI. */
@SuppressWarnings("MissingJavadocMethod")
public final class JniUtils {
private JniUtils() {}
public static void checkCall(int result) {
if (result != 0) {
throw new EngineException("LightGBM Engine Error: " + lightgbmlib.LGBM_GetLastError());
}
}
public static LgbmSymbolBlock loadModel(LgbmNDManager manager, String path) {
SWIGTYPE_p_p_void handle = lightgbmlib.new_voidpp();
SWIGTYPE_p_int outIterations = lightgbmlib.new_intp();
int result = lightgbmlib.LGBM_BoosterCreateFromModelfile(path, outIterations, handle);
checkCall(result);
int iterations = lightgbmlib.intp_value(outIterations);
lightgbmlib.delete_intp(outIterations);
return new LgbmSymbolBlock(manager, iterations, handle);
}
public static void freeModel(SWIGTYPE_p_p_void handle) {
int result = lightgbmlib.LGBM_BoosterFree(lightgbmlib.voidpp_value(handle));
checkCall(result);
}
public static Pair<Integer, ByteBuffer> inference(
SWIGTYPE_p_p_void model, int iterations, NDArray a, int inferenceType) {
if (a instanceof LgbmDataset) {
LgbmDataset dataset = (LgbmDataset) a;
switch (dataset.getSrcType()) {
case FILE:
throw new IllegalArgumentException(
"LightGBM can only do inference with an Array LightGBMDataset");
case ARRAY:
return inferenceMat(
model, iterations, dataset.getSrcArrayConverted(), inferenceType);
default:
throw new IllegalArgumentException("Unexpected LgbmDataset SrcType");
}
}
if (a instanceof LgbmNDArray) {
return inferenceMat(model, iterations, (LgbmNDArray) a, inferenceType);
}
throw new IllegalArgumentException("LightGBM inference must be called with a LgbmNDArray");
}
public static Pair<Integer, ByteBuffer> inferenceMat(
SWIGTYPE_p_p_void model, int iterations, LgbmNDArray a, int inferenceType) {
SWIGTYPE_p_long_long outLength = lightgbmlib.new_int64_tp();
SWIGTYPE_p_double outBuffer = null;
try {
int bufferLength = calculateBufferLength(model, inferenceType, a.getRows(), iterations);
outBuffer = lightgbmlib.new_doubleArray(bufferLength);
int result =
lightgbmlib.LGBM_BoosterPredictForMat(
lightgbmlib.voidpp_value(model),
a.getHandle(),
a.getTypeConstant(),
a.getRows(),
a.getCols(),
1,
inferenceType,
0,
iterations,
"",
outLength,
outBuffer);
checkCall(result);
int length = Math.toIntExact(lightgbmlib.int64_tp_value(outLength));
if (a.getDataType() == DataType.FLOAT32) {
ByteBuffer bb =
ByteBuffer.allocateDirect(length * 4).order(ByteOrder.nativeOrder());
FloatBuffer wrapped = bb.asFloatBuffer();
for (int i = 0; i < length; i++) {
wrapped.put((float) lightgbmlib.doubleArray_getitem(outBuffer, i));
}
bb.rewind();
return new Pair<>(length, bb);
} else if (a.getDataType() == DataType.FLOAT64) {
ByteBuffer bb =
ByteBuffer.allocateDirect(length * 8).order(ByteOrder.nativeOrder());
DoubleBuffer wrapped = bb.asDoubleBuffer();
for (int i = 0; i < length; i++) {
wrapped.put(lightgbmlib.doubleArray_getitem(outBuffer, i));
}
bb.rewind();
return new Pair<>(length, bb);
} else {
throw new IllegalArgumentException(
"Unexpected data type for LightGBM inference. Expected Float32 or Float64,"
+ " but found "
+ a.getDataType());
}
} catch (EngineException e) {
throw new EngineException("Failed to run inference using LightGBM native engine", e);
} finally {
lightgbmlib.delete_int64_tp(outLength);
if (outBuffer != null) {
lightgbmlib.delete_doubleArray(outBuffer);
}
}
}
private static int calculateBufferLength(
SWIGTYPE_p_p_void model, int inferenceType, int rows, int iterations) {
SWIGTYPE_p_int numClasses = lightgbmlib.new_intp();
SWIGTYPE_p_int numFeatures = lightgbmlib.new_intp();
try {
int outFlag =
lightgbmlib.LGBM_BoosterGetNumClasses(
lightgbmlib.voidpp_value(model), numClasses);
checkCall(outFlag);
int classes = lightgbmlib.intp_value(numClasses);
if (inferenceType == lightgbmlibConstants.C_API_PREDICT_NORMAL
|| inferenceType == lightgbmlibConstants.C_API_PREDICT_RAW_SCORE) {
return classes * rows;
} else if (inferenceType == lightgbmlibConstants.C_API_PREDICT_LEAF_INDEX) {
return classes * rows * iterations;
} else if (inferenceType == lightgbmlibConstants.C_API_PREDICT_CONTRIB) {
int outFlag2 =
lightgbmlib.LGBM_BoosterGetNumFeature(
lightgbmlib.voidpp_value(model), numFeatures);
checkCall(outFlag2);
int features = lightgbmlib.intp_value(numFeatures);
return classes * rows * (features + 1);
} else {
throw new IllegalArgumentException("Unsupported inference type: " + inferenceType);
}
} finally {
lightgbmlib.delete_intp(numClasses);
lightgbmlib.delete_intp(numFeatures);
}
}
public static SWIGTYPE_p_p_void datasetFromFile(String fileName) {
SWIGTYPE_p_p_void handle = lightgbmlib.new_voidpp();
int result = lightgbmlib.LGBM_DatasetCreateFromFile(fileName, "", null, handle);
checkCall(result);
return handle;
}
public static SWIGTYPE_p_p_void datasetFromArray(LgbmNDArray a) {
SWIGTYPE_p_p_void handle = lightgbmlib.new_voidpp();
int result =
lightgbmlib.LGBM_DatasetCreateFromMat(
a.getHandle(),
a.getTypeConstant(),
a.getRows(),
a.getCols(),
1,
"",
null,
handle);
checkCall(result);
return handle;
}
public static int datasetGetRows(SWIGTYPE_p_p_void handle) {
SWIGTYPE_p_int outp = lightgbmlib.new_intp();
try {
int result = lightgbmlib.LGBM_DatasetGetNumData(lightgbmlib.voidpp_value(handle), outp);
checkCall(result);
return lightgbmlib.intp_value(outp);
} finally {
lightgbmlib.delete_intp(outp);
}
}
public static int datasetGetCols(SWIGTYPE_p_p_void handle) {
SWIGTYPE_p_int outp = lightgbmlib.new_intp();
try {
int result =
lightgbmlib.LGBM_DatasetGetNumFeature(lightgbmlib.voidpp_value(handle), outp);
checkCall(result);
return lightgbmlib.intp_value(outp);
} finally {
lightgbmlib.delete_intp(outp);
}
}
public static void freeDataset(SWIGTYPE_p_p_void handle) {
int result = lightgbmlib.LGBM_DatasetFree(lightgbmlib.voidpp_value(handle));
checkCall(result);
}
}
|
0
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml/lightgbm
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml/lightgbm/jni/LibUtils.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.ml.lightgbm.jni;
import ai.djl.engine.EngineException;
import ai.djl.util.ClassLoaderUtils;
import ai.djl.util.Platform;
import ai.djl.util.Utils;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
/** Utilities for the {@link ai.djl.ml.lightgbm.LgbmEngine} to load the native binary. */
public final class LibUtils {
private LibUtils() {}
/**
* Loads the native binary for LightGBM.
*
* @throws IOException if it fails to download the native library
*/
public static synchronized void loadNative() throws IOException {
Platform platform = Platform.fromSystem("lightgbm");
if (!"x86_64".equals(platform.getOsArch())) {
throw new IllegalStateException("Only x86 is supported");
}
if ("linux".equals(platform.getOsPrefix())) {
loadNative("linux/x86_64/lib_lightgbm.so", "lib_lightgbm.so");
loadNative("linux/x86_64/lib_lightgbm_swig.so", "lib_lightgbm_swig.so");
return;
}
if ("osx".equals(platform.getOsPrefix())) {
loadNative("osx/x86_64/lib_lightgbm.dylib", "lib_lightgbm.dylib");
loadNative("osx/x86_64/lib_lightgbm_swig.dylib", "lib_lightgbm_swig.dylib");
return;
}
if ("win".equals(platform.getOsPrefix())) {
loadNative("windows/x86_64/lib_lightgbm.dll", "lib_lightgbm.dll");
loadNative("windows/x86_64/lib_lightgbm_swig.dll", "lib_lightgbm_swig.dll");
return;
}
throw new IllegalStateException("No LightGBM Engine matches your platform");
}
private static void loadNative(String resourcePath, String name) throws IOException {
Path cacheFolder = Utils.getEngineCacheDir("lightgbm");
Path libFile = cacheFolder.resolve(name);
if (!Files.exists(libFile)) {
Files.createDirectories(cacheFolder);
resourcePath = "com/microsoft/ml/lightgbm/" + resourcePath;
Path tmp = Files.createTempFile(cacheFolder, "lightgbm-", "tmp");
try {
try (InputStream is = ClassLoaderUtils.getResourceAsStream(resourcePath)) {
Files.copy(is, tmp, StandardCopyOption.REPLACE_EXISTING);
}
Utils.moveQuietly(tmp, libFile);
} finally {
Utils.deleteQuietly(tmp);
}
}
try {
System.load(libFile.toString());
} catch (UnsatisfiedLinkError err) {
throw new EngineException("Cannot load library: " + name, err);
}
}
}
|
0
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml/lightgbm
|
java-sources/ai/djl/ml/lightgbm/lightgbm/0.34.0/ai/djl/ml/lightgbm/jni/package-info.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains Helper class to access LightGBM JNI. */
package ai.djl.ml.lightgbm.jni;
|
0
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/ai/djl/ml
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/ai/djl/ml/xgboost/XgbEngine.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.ml.xgboost;
import ai.djl.Device;
import ai.djl.Model;
import ai.djl.engine.Engine;
import ai.djl.engine.StandardCapabilities;
import ai.djl.ndarray.NDManager;
import ml.dmlc.xgboost4j.java.JniUtils;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
/**
* The {@code XgbEngine} is an implementation of the {@link Engine} based on the <a
* href="https://github.com/dmlc/xgboost">XGBoost</a>.
*
* <p>To get an instance of the {@code XgbEngine} when it is not the default Engine, call {@link
* Engine#getEngine(String)} with the Engine name "XGBoost".
*/
public final class XgbEngine extends Engine {
public static final String ENGINE_NAME = "XGBoost";
static final int RANK = 10;
private Engine alternativeEngine;
private boolean initialized;
private XgbEngine() {}
static Engine newInstance() {
JniUtils.checkCall(0); // Load the native
return new XgbEngine();
}
/** {@inheritDoc} */
@Override
public Engine getAlternativeEngine() {
if (!initialized && !Boolean.getBoolean("ai.djl.xgboost.disable_alternative")) {
Engine engine = Engine.getInstance();
if (engine.getRank() < getRank()) {
// alternativeEngine should not have the same rank as OnnxRuntime
alternativeEngine = engine;
}
initialized = true;
}
return alternativeEngine;
}
/** {@inheritDoc} */
@Override
public String getEngineName() {
return ENGINE_NAME;
}
/** {@inheritDoc} */
@Override
public int getRank() {
return RANK;
}
/** {@inheritDoc} */
@Override
public String getVersion() {
try (InputStream is =
XgbEngine.class.getResourceAsStream("/xgboost4j-version.properties")) {
Properties prop = new Properties();
prop.load(is);
return prop.getProperty("version");
} catch (IOException e) {
throw new AssertionError("Failed to load xgboost4j-version.properties", e);
}
}
/** {@inheritDoc} */
@Override
public boolean hasCapability(String capability) {
if (StandardCapabilities.CUDA.equals(capability)) {
try {
Class.forName("ml.dmlc.xgboost4j.gpu.java.CudfColumn");
return true;
} catch (ClassNotFoundException ignore) {
return false;
}
}
return false;
}
/** {@inheritDoc} */
@Override
public Model newModel(String name, Device device) {
return new XgbModel(name, newBaseManager(device));
}
/** {@inheritDoc} */
@Override
public NDManager newBaseManager() {
return newBaseManager(null);
}
/** {@inheritDoc} */
@Override
public NDManager newBaseManager(Device device) {
return XgbNDManager.getSystemManager().newSubManager(device);
}
/** {@inheritDoc} */
@Override
public String toString() {
StringBuilder sb = new StringBuilder(200);
sb.append(getEngineName()).append(':').append(getVersion()).append(", ");
sb.append(getEngineName()).append(':').append(getVersion()).append(", capabilities: [\n");
if (hasCapability(StandardCapabilities.CUDA)) {
sb.append("\t").append(StandardCapabilities.CUDA).append(",\n"); // NOPMD
}
return sb.toString();
}
}
|
0
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/ai/djl/ml
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/ai/djl/ml/xgboost/XgbEngineProvider.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.ml.xgboost;
import ai.djl.engine.Engine;
import ai.djl.engine.EngineProvider;
/** {@code XgbEngineProvider} is the XGBoost implementation of {@link EngineProvider}. */
public class XgbEngineProvider implements EngineProvider {
/** {@inheritDoc} */
@Override
public String getEngineName() {
return XgbEngine.ENGINE_NAME;
}
/** {@inheritDoc} */
@Override
public int getEngineRank() {
return XgbEngine.RANK;
}
/** {@inheritDoc} */
@Override
public Engine getEngine() {
return InstanceHolder.INSTANCE;
}
private static class InstanceHolder {
static final Engine INSTANCE = XgbEngine.newInstance();
}
}
|
0
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/ai/djl/ml
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/ai/djl/ml/xgboost/XgbModel.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.ml.xgboost;
import ai.djl.BaseModel;
import ai.djl.Model;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ml.dmlc.xgboost4j.java.JniUtils;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Locale;
import java.util.Map;
/** {@code XgbModel} is the XGBoost implementation of {@link Model}. */
public class XgbModel extends BaseModel {
/**
* Constructs a new Model on a given device.
*
* @param modelName the model name
* @param manager the {@link NDManager} to holds the NDArray
*/
XgbModel(String modelName, NDManager manager) {
super(modelName);
dataType = DataType.FLOAT32;
this.manager = manager;
manager.setName("XgbModel");
}
/** {@inheritDoc} */
@Override
public void load(Path modelPath, String prefix, Map<String, ?> options) throws IOException {
setModelDir(modelPath);
wasLoaded = true;
if (block != null) {
throw new UnsupportedOperationException("XGBoost does not support dynamic blocks");
}
Path modelFile = findModelFile(prefix);
if (modelFile == null) {
modelFile = findModelFile(modelDir.toFile().getName());
if (modelFile == null) {
throw new FileNotFoundException(".json file not found in: " + modelPath);
}
}
block = JniUtils.loadModel((XgbNDManager) manager, modelFile.toAbsolutePath().toString());
// set extra options
if (options != null) {
if (options.containsKey("Mode")) {
((XgbSymbolBlock) block)
.setMode(
XgbSymbolBlock.Mode.valueOf(
((String) options.get("Mode")).toUpperCase(Locale.ROOT)));
}
if (options.containsKey("TreeLimit")) {
((XgbSymbolBlock) block)
.setTreeLimit(Integer.parseInt((String) options.get("TreeLimit")));
}
}
}
private Path findModelFile(String prefix) {
if (Files.isRegularFile(modelDir)) {
Path file = modelDir;
modelDir = modelDir.getParent();
String fileName = file.toFile().getName();
if (fileName.endsWith(".json")) {
modelName = fileName.substring(0, fileName.length() - 5);
} else if (fileName.endsWith(".xgb")) {
modelName = fileName.substring(0, fileName.length() - 4);
} else {
modelName = fileName;
}
return file;
}
if (prefix == null) {
prefix = modelName;
}
Path modelFile = modelDir.resolve(prefix);
if (Files.notExists(modelFile) || !Files.isRegularFile(modelFile)) {
if (prefix.endsWith(".json") || prefix.endsWith(".xgb")) {
return null;
}
modelFile = modelDir.resolve(prefix + ".json");
if (Files.isRegularFile(modelFile)) {
return modelFile;
}
modelFile = modelDir.resolve(prefix + ".xgb");
if (Files.isRegularFile(modelFile)) {
return modelFile;
}
modelFile = modelDir.resolve("model.xgb");
if (Files.isRegularFile(modelFile)) {
return modelFile;
}
return null;
}
return modelFile;
}
/** {@inheritDoc} */
@Override
public void close() {
if (block != null) {
((XgbSymbolBlock) block).close();
block = null;
}
super.close();
}
}
|
0
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/ai/djl/ml
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/ai/djl/ml/xgboost/XgbNDArray.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.ml.xgboost;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrayAdapter;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.ndarray.types.SparseFormat;
import ml.dmlc.xgboost4j.java.JniUtils;
import java.nio.ByteBuffer;
import java.util.concurrent.atomic.AtomicLong;
/** {@code XgbNDArray} is the XGBoost implementation of {@link NDArray}. */
public class XgbNDArray extends NDArrayAdapter {
private AtomicLong handle;
private ByteBuffer data;
private SparseFormat format;
XgbNDArray(
NDManager manager,
NDManager alternativeManager,
long handle,
Shape shape,
SparseFormat format) {
super(manager, alternativeManager, shape, DataType.FLOAT32, String.valueOf(handle));
this.handle = new AtomicLong(handle);
this.format = format;
manager.attachInternal(uid, this);
}
XgbNDArray(
NDManager manager,
NDManager alternativeManager,
ByteBuffer data,
Shape shape,
DataType dataType) {
super(manager, alternativeManager, shape, dataType, NDManager.nextUid());
this.data = data;
this.format = SparseFormat.DENSE;
manager.attachInternal(uid, this);
}
/**
* Returns the native XGBoost Booster pointer.
*
* @return the pointer
*/
public long getHandle() {
if (handle == null) {
throw new UnsupportedOperationException(
"XgbNDArray only support float32 and shape must be in two dimension.");
}
return handle.get();
}
/** {@inheritDoc} */
@Override
public SparseFormat getSparseFormat() {
return format;
}
/** {@inheritDoc} */
@Override
public ByteBuffer toByteBuffer(boolean tryDirect) {
if (data == null) {
throw new UnsupportedOperationException("Cannot obtain value from DMatrix");
}
data.rewind();
return data;
}
/** {@inheritDoc} */
@Override
public void intern(NDArray replaced) {
if (!(replaced instanceof XgbNDArray)) {
throw new IllegalArgumentException(
"The replaced NDArray must be an instance of XgbNDArray.");
}
XgbNDArray array = (XgbNDArray) replaced;
if (isReleased()) {
throw new IllegalArgumentException("This array is already closed");
}
if (replaced.isReleased()) {
throw new IllegalArgumentException("This target array is already closed");
}
if (handle != null && handle.get() != 0L) {
long pointer = handle.getAndSet(0L);
JniUtils.deleteDMatrix(pointer);
}
if (alternativeArray != null) {
alternativeArray.close();
}
data = array.data;
handle = array.handle;
format = array.format;
alternativeArray = array.alternativeArray;
array.handle = null;
array.alternativeArray = null;
array.close();
}
/** {@inheritDoc} */
@Override
public void returnResource(NDManager manager) {
detach();
this.manager = manager;
manager.attachUncappedInternal(getUid(), this);
}
/** {@inheritDoc} */
@Override
public void detach() {
manager.detachInternal(getUid());
manager = XgbNDManager.getSystemManager();
}
/** {@inheritDoc} */
@Override
public void close() {
super.close();
if (handle != null && handle.get() != 0L) {
long pointer = handle.getAndSet(0L);
JniUtils.deleteDMatrix(pointer);
}
}
}
|
0
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/ai/djl/ml
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/ai/djl/ml/xgboost/XgbNDManager.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.ml.xgboost;
import ai.djl.Device;
import ai.djl.engine.Engine;
import ai.djl.ndarray.BaseNDManager;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.ndarray.types.SparseFormat;
import ai.djl.util.JsonUtils;
import com.google.gson.JsonArray;
import ml.dmlc.xgboost4j.java.ColumnBatch;
import ml.dmlc.xgboost4j.java.JniUtils;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.Arrays;
/** {@code XgbNDManager} is the XGBoost implementation of {@link NDManager}. */
public class XgbNDManager extends BaseNDManager {
private static final XgbNDManager SYSTEM_MANAGER = new SystemManager();
private float missingValue = Float.NaN;
private int nthread = 1;
private XgbNDManager(NDManager parent, Device device) {
super(parent, device);
}
static XgbNDManager getSystemManager() {
return SYSTEM_MANAGER;
}
/**
* Sets the default missing value.
*
* @param missingValue the default missing value
*/
public void setMissingValue(float missingValue) {
this.missingValue = missingValue;
}
/**
* Sets the default number of threads.
*
* @param nthread the default number of threads
*/
public void setNthread(int nthread) {
this.nthread = nthread;
}
/** {@inheritDoc} */
@Override
public ByteBuffer allocateDirect(int capacity) {
return ByteBuffer.allocateDirect(capacity).order(ByteOrder.nativeOrder());
}
/** {@inheritDoc} */
@Override
public XgbNDArray from(NDArray array) {
if (array == null || array instanceof XgbNDArray) {
return (XgbNDArray) array;
}
XgbNDArray result =
(XgbNDArray) create(array.toByteBuffer(), array.getShape(), array.getDataType());
result.setName(array.getName());
return result;
}
/** {@inheritDoc} */
@Override
public NDManager newSubManager(Device device) {
XgbNDManager manager = new XgbNDManager(this, device);
attachInternal(manager.uid, manager);
return manager;
}
/** {@inheritDoc} */
@Override
public Engine getEngine() {
return Engine.getEngine(XgbEngine.ENGINE_NAME);
}
/**
* Creates {@link XgbNDArray} from column array interface.
*
* @param columnBatch – the XGBoost ColumnBatch to provide the cuda array interface of feature
* columns
* @param missing – missing value
* @param nthread – threads number
* @return a new instance of {@link NDArray}
*/
public NDArray create(ColumnBatch columnBatch, float missing, int nthread) {
columnBatch.getFeatureArrayInterface();
String json = columnBatch.getFeatureArrayInterface();
JsonArray array = JsonUtils.GSON.fromJson(json, JsonArray.class);
JsonArray shapeJson = array.get(0).getAsJsonObject().get("shape").getAsJsonArray();
long[] shapes = new long[shapeJson.size()];
for (int i = 0; i < shapes.length; ++i) {
shapes[i] = shapeJson.get(i).getAsLong();
}
Shape shape = new Shape(shapes);
long handle = JniUtils.createDMatrix(columnBatch, missing, nthread);
return new XgbNDArray(this, alternativeManager, handle, shape, SparseFormat.DENSE);
}
/** {@inheritDoc} */
@Override
public NDArray create(Buffer data, Shape shape, DataType dataType) {
if (shape.dimension() != 2) {
if (data instanceof ByteBuffer) {
// output only NDArray
return new XgbNDArray(this, alternativeManager, (ByteBuffer) data, shape, dataType);
}
if (alternativeManager != null) {
return alternativeManager.create(data, shape, dataType);
}
throw new UnsupportedOperationException("XgbNDArray shape must be in two dimension.");
}
if (dataType != DataType.FLOAT32) {
if (data instanceof ByteBuffer) {
// output only NDArray
return new XgbNDArray(this, alternativeManager, (ByteBuffer) data, shape, dataType);
}
if (alternativeManager != null) {
return alternativeManager.create(data, shape, dataType);
}
throw new UnsupportedOperationException("XgbNDArray only supports float32.");
}
if (data instanceof ByteBuffer) {
if (data.isDirect()) {
// TODO: allow user to set missing value
long handle = JniUtils.createDMatrix(data, shape, missingValue);
return new XgbNDArray(this, alternativeManager, handle, shape, SparseFormat.DENSE);
}
data = ((ByteBuffer) data).asFloatBuffer();
}
DataType inputType = DataType.fromBuffer(data);
if (inputType != DataType.FLOAT32) {
throw new UnsupportedOperationException(
"Only Float32 data type supported, actual " + inputType);
}
int size = Math.toIntExact(shape.size() * DataType.FLOAT32.getNumOfBytes());
ByteBuffer buf = allocateDirect(size);
buf.asFloatBuffer().put((FloatBuffer) data);
buf.rewind();
long handle = JniUtils.createDMatrix(buf, shape, missingValue);
return new XgbNDArray(this, alternativeManager, handle, shape, SparseFormat.DENSE);
}
/** {@inheritDoc} */
@Override
public NDArray createCSR(Buffer buffer, long[] indptr, long[] indices, Shape shape) {
if (shape.dimension() != 2) {
throw new UnsupportedOperationException("Shape must be in two dimension");
}
int[] intIndices = Arrays.stream(indices).mapToInt(Math::toIntExact).toArray();
float[] data = new float[buffer.remaining()];
((FloatBuffer) buffer).get(data);
long handle = JniUtils.createDMatrixCSR(indptr, intIndices, data, missingValue, nthread);
return new XgbNDArray(this, alternativeManager, handle, shape, SparseFormat.CSR);
}
/** {@inheritDoc} */
@Override
public void close() {
super.close();
if (alternativeManager != null) {
alternativeManager.close();
alternativeManager = null;
}
}
/** The SystemManager is the root {@link XgbNDManager} of which all others are children. */
private static final class SystemManager extends XgbNDManager implements SystemNDManager {
SystemManager() {
super(null, null);
}
}
}
|
0
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/ai/djl/ml
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/ai/djl/ml/xgboost/XgbSymbolBlock.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.ml.xgboost;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.AbstractSymbolBlock;
import ai.djl.nn.ParameterList;
import ai.djl.nn.SymbolBlock;
import ai.djl.training.ParameterStore;
import ai.djl.util.PairList;
import ml.dmlc.xgboost4j.java.JniUtils;
import java.nio.ByteBuffer;
import java.util.concurrent.atomic.AtomicReference;
/** {@code XgbSymbolBlock} is the XGBoost implementation of {@link SymbolBlock}. */
public class XgbSymbolBlock extends AbstractSymbolBlock implements AutoCloseable {
private AtomicReference<Long> handle;
private String uid;
private XgbNDManager manager;
private Mode mode;
private int treeLimit;
/**
* Constructs a {@code XgbSymbolBlock}.
*
* <p>You can create a {@code XgbSymbolBlock} using {@link ai.djl.Model#load(java.nio.file.Path,
* String)}.
*
* @param manager the manager to use for the block
* @param handle the Booster handle
*/
@SuppressWarnings("this-escape")
public XgbSymbolBlock(XgbNDManager manager, long handle) {
this.handle = new AtomicReference<>(handle);
this.manager = manager;
uid = String.valueOf(handle);
manager.attachInternal(uid, this);
mode = Mode.DEFAULT;
treeLimit = 0;
}
/** {@inheritDoc} */
@Override
protected NDList forwardInternal(
ParameterStore parameterStore,
NDList inputs,
boolean training,
PairList<String, Object> params) {
NDArray array = inputs.singletonOrThrow();
try (XgbNDManager sub = (XgbNDManager) manager.newSubManager()) {
XgbNDArray xgbNDArray = sub.from(array);
// TODO: return DirectBuffer from JNI to avoid copy
float[] result = JniUtils.inference(this, xgbNDArray, treeLimit, mode);
ByteBuffer buf = manager.allocateDirect(result.length * 4);
buf.asFloatBuffer().put(result);
buf.rewind();
NDArray ret = manager.create(buf, new Shape(result.length), DataType.FLOAT32);
ret.attach(array.getManager());
return new NDList(ret);
}
}
/** {@inheritDoc} */
@Override
public void close() {
Long pointer = handle.getAndSet(null);
if (pointer != null) {
JniUtils.deleteModel(pointer);
manager.detachInternal(uid);
manager = null;
}
}
/**
* Gets the native XGBoost Booster pointer.
*
* @return the pointer
*/
public Long getHandle() {
Long reference = handle.get();
if (reference == null) {
throw new IllegalStateException("XGBoost model handle has been released!");
}
return reference;
}
void setMode(Mode mode) {
this.mode = mode;
}
void setTreeLimit(int treeLimit) {
this.treeLimit = treeLimit;
}
/** {@inheritDoc} */
@Override
public ParameterList getDirectParameters() {
throw new UnsupportedOperationException("Not yet supported");
}
/** The mode of inference for OptionMask. */
public enum Mode {
DEFAULT(0),
OUTPUT_MARGIN(1),
LEAF(2),
CONTRIB(4);
private int value;
Mode(int value) {
this.value = value;
}
/**
* Gets the value of the mode.
*
* @return the value in number
*/
public int getValue() {
return value;
}
}
}
|
0
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/ai/djl/ml
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/ai/djl/ml/xgboost/package-info.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains implementations of interfaces within the DJL API for the XGBoost Engine. */
package ai.djl.ml.xgboost;
|
0
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/com/sun
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/com/sun/jna/PointerProxy.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.sun.jna;
/** {@code PointerProxy} is a Pointer wrapper that can access to the peer value of the pointer. */
public class PointerProxy extends Pointer {
/**
* Creates from native pointer. Don't use this unless you know what you're doing.
*
* @param ptr the target pointer
*/
public PointerProxy(Pointer ptr) {
super(ptr.peer);
}
/**
* Gets peer value from the {@link Pointer}. /
*
* @return peer value in long
*/
public long getPeer() {
return peer;
}
}
|
0
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/com/sun
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/com/sun/jna/package-info.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains JNA changes on the existing Pointer. */
package com.sun.jna;
|
0
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/ml/dmlc/xgboost4j
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/ml/dmlc/xgboost4j/java/JniUtils.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ml.dmlc.xgboost4j.java;
import ai.djl.engine.EngineException;
import ai.djl.ml.xgboost.XgbNDArray;
import ai.djl.ml.xgboost.XgbNDManager;
import ai.djl.ml.xgboost.XgbSymbolBlock;
import ai.djl.ndarray.types.Shape;
import com.sun.jna.Native;
import com.sun.jna.PointerProxy;
import java.nio.Buffer;
/** DJL class that has access to XGBoost JNI. */
@SuppressWarnings("MissingJavadocMethod")
public final class JniUtils {
private JniUtils() {}
public static void checkCall(int ret) {
try {
XGBoostJNI.checkCall(ret);
} catch (XGBoostError e) {
throw new EngineException("XGBoost Engine error: ", e);
}
}
public static XgbSymbolBlock loadModel(XgbNDManager manager, String modelPath) {
// TODO: add Matrix handle option
long handle = createBoosterHandle(null);
checkCall(XGBoostJNI.XGBoosterLoadModel(handle, modelPath));
return new XgbSymbolBlock(manager, handle);
}
public static long createDMatrix(Buffer buf, Shape shape, float missing) {
long[] handles = new long[1];
int rol = (int) shape.get(0);
int col = (int) shape.get(1);
long handle = new PointerProxy(Native.getDirectBufferPointer(buf)).getPeer();
checkCall(XGBoostJNI.XGDMatrixCreateFromMatRef(handle, rol, col, missing, handles));
return handles[0];
}
public static long createDMatrix(ColumnBatch columnBatch, float missing, int nthread) {
long[] handles = new long[1];
String json = columnBatch.getFeatureArrayInterface();
if (json == null || json.isEmpty()) {
throw new IllegalArgumentException(
"Expecting non-empty feature columns' array interface");
}
checkCall(
XGBoostJNI.XGDMatrixCreateFromArrayInterfaceColumns(
json, missing, nthread, handles));
return handles[0];
}
public static long createDMatrixCSR(
long[] indptr, int[] indices, float[] array, float missing, int nthread) {
long[] handles = new long[1];
checkCall(
XGBoostJNI.XGDMatrixCreateFromCSR(
indptr, indices, array, 0, missing, nthread, handles));
return handles[0];
}
public static void deleteDMatrix(long handle) {
checkCall(XGBoostJNI.XGDMatrixFree(handle));
}
public static float[] inference(
XgbSymbolBlock block, XgbNDArray array, int treeLimit, XgbSymbolBlock.Mode mode) {
float[][] output = new float[1][];
checkCall(
XGBoostJNI.XGBoosterPredict(
block.getHandle(), array.getHandle(), treeLimit, mode.getValue(), output));
return output[0];
}
public static void deleteModel(long handle) {
checkCall(XGBoostJNI.XGBoosterFree(handle));
}
private static long createBoosterHandle(long[] matrixHandles) {
long[] handles = new long[1];
checkCall(XGBoostJNI.XGBoosterCreate(matrixHandles, handles));
return handles[0];
}
}
|
0
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/ml/dmlc/xgboost4j
|
java-sources/ai/djl/ml/xgboost/xgboost/0.34.0/ml/dmlc/xgboost4j/java/package-info.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains Helper class to access XGBoost JNI. */
package ml.dmlc.xgboost4j.java;
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/BasicModelZoo.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo;
import ai.djl.Application.CV;
import ai.djl.repository.RemoteRepository;
import ai.djl.repository.Repository;
import ai.djl.repository.zoo.ModelZoo;
import java.util.HashSet;
import java.util.Set;
/** Model Zoo is a repository that contains all models for DJL. */
public class BasicModelZoo extends ModelZoo {
private static final Repository REPOSITORY = new RemoteRepository("zoo", DJL_REPO_URL);
public static final String GROUP_ID = "ai.djl.zoo";
BasicModelZoo() {
addModel(REPOSITORY.model(CV.IMAGE_CLASSIFICATION, GROUP_ID, "mlp", "0.0.3"));
addModel(REPOSITORY.model(CV.IMAGE_CLASSIFICATION, GROUP_ID, "resnet", "0.0.2"));
addModel(REPOSITORY.model(CV.OBJECT_DETECTION, GROUP_ID, "ssd", "0.0.2"));
}
/** {@inheritDoc} */
@Override
public String getGroupId() {
return GROUP_ID;
}
/** {@inheritDoc} */
@Override
public Set<String> getSupportedEngines() {
Set<String> set = new HashSet<>();
set.add("MXNet");
set.add("PyTorch");
// TODO Currently WIP in supporting these two engines in the basic model zoo
// set.add("TensorFlow");
return set;
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/BasicZooProvider.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooProvider;
/**
* An imperative model zoo provider implements the {@link ai.djl.repository.zoo.ZooProvider}
* interface.
*/
public class BasicZooProvider implements ZooProvider {
/** {@inheritDoc} */
@Override
public ModelZoo getModelZoo() {
return new BasicModelZoo();
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains the built-in {@link ai.djl.basicmodelzoo.BasicModelZoo}. */
package ai.djl.basicmodelzoo;
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/basic/Mlp.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo.basic;
import ai.djl.ndarray.NDList;
import ai.djl.nn.Activation;
import ai.djl.nn.Blocks;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.core.Linear;
import java.util.function.Function;
/**
* Multilayer Perceptron (MLP) NeuralNetworks.
*
* <p>A multilayer perceptron (MLP) is a feedforward artificial neural network that generates a set
* of outputs from a set of inputs. An MLP is characterized by several layers of input nodes
* connected as a directed graph between the input and output layers. MLP uses backpropogation for
* training the network.
*
* <p>MLP is widely used for solving problems that require supervised learning as well as research
* into computational neuroscience and parallel distributed processing. Applications include speech
* recognition, image recognition and machine translation.
*
* @see <a href="https://d2l.djl.ai/chapter_multilayer-perceptrons/mlp.html">The D2L chapters on
* MLPs</a>
*/
public class Mlp extends SequentialBlock {
/**
* Create an MLP NeuralNetwork using RELU.
*
* @param input the size of the input vector
* @param output the size of the output vector
* @param hidden the sizes of all of the hidden layers
*/
public Mlp(int input, int output, int[] hidden) {
this(input, output, hidden, Activation::relu);
}
/**
* Create an MLP NeuralNetwork.
*
* @param input the size of the input vector
* @param output the size of the output vector
* @param hidden the sizes of all of the hidden layers
* @param activation the activation function to use
*/
@SuppressWarnings("this-escape")
public Mlp(int input, int output, int[] hidden, Function<NDList, NDList> activation) {
add(Blocks.batchFlattenBlock(input));
for (int hiddenLayerSize : hidden) {
add(Linear.builder().setUnits(hiddenLayerSize).build());
add(activation);
}
add(Linear.builder().setUnits(output).build());
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/basic/MlpBlockFactory.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo.basic;
import ai.djl.Model;
import ai.djl.nn.Block;
import ai.djl.nn.BlockFactory;
import ai.djl.translate.ArgumentsUtil;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/** A {@link BlockFactory} class that creates MLP block. */
public class MlpBlockFactory implements BlockFactory {
private static final long serialVersionUID = 1L;
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public Block newBlock(Model model, Path modelPath, Map<String, ?> arguments) {
int width = ArgumentsUtil.intValue(arguments, "width", 28);
int height = ArgumentsUtil.intValue(arguments, "height", 28);
int output = ArgumentsUtil.intValue(arguments, "output", 10);
int input = width * height;
Object hiddenValue = arguments.get("hidden");
int[] hidden;
if (hiddenValue == null) {
hidden = new int[] {128, 64};
} else if (hiddenValue instanceof List) {
hidden = ((List<Double>) hiddenValue).stream().mapToInt(Double::intValue).toArray();
} else {
String[] v = ((String) hiddenValue).split(",");
hidden = Arrays.stream(v).mapToInt(Integer::parseInt).toArray();
}
return new Mlp(input, output, hidden);
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/basic/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains the basic built-in models. */
package ai.djl.basicmodelzoo.basic;
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains classes for the computer vision section ({@link ai.djl.Application.CV}) of the {@link
* ai.djl.basicmodelzoo.BasicModelZoo}.
*/
package ai.djl.basicmodelzoo.cv;
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/classification/AlexNet.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo.cv.classification;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Activation;
import ai.djl.nn.Block;
import ai.djl.nn.Blocks;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.convolutional.Conv2d;
import ai.djl.nn.core.Linear;
import ai.djl.nn.norm.Dropout;
import ai.djl.nn.pooling.Pool;
/**
* {@code AlexNet} contains a generic implementation of AlexNet adapted from [torchvision
* implmentation](https://github.com/pytorch/vision/blob/master/torchvision/models/alexnet.py)
*
* <p>AlexNet model from the "One weird trick..." https://arxiv.org/abs/1404.5997 paper.
*
* @see <a href="https://d2l.djl.ai/chapter_convolutional-modern/alexnet.html">The D2L chapter on
* AlexNet</a>
*/
public final class AlexNet {
private AlexNet() {}
/**
* Creates a AlexNet network block with the help of the AlexNet Builder.
*
* @param builder the {@link AlexNet.Builder} with the necessary arguments.
* @return a AlexNet block.
*/
public static Block alexNet(Builder builder) {
return new SequentialBlock()
.add(
Conv2d.builder()
.setKernelShape(new Shape(11, 11))
.optStride(new Shape(4, 4))
.setFilters(builder.numChannels[0])
.build())
.add(Activation::relu)
.add(Pool.maxPool2dBlock(new Shape(3, 3), new Shape(2, 2)))
// Make the convolution window smaller, set padding to 2 for consistent
// height and width across the input and output, and increase the
// number of output channels
.add(
Conv2d.builder()
.setKernelShape(new Shape(5, 5))
.optPadding(new Shape(2, 2))
.setFilters(builder.numChannels[1])
.build())
.add(Activation::relu)
.add(Pool.maxPool2dBlock(new Shape(3, 3), new Shape(2, 2)))
// Use three successive convolutional layers and a smaller convolution
// window. Except for the final convolutional layer, the number of
// output channels is further increased. Pooling layers are not used to
// reduce the height and width of input after the first two
// convolutional layers
.add(
Conv2d.builder()
.setKernelShape(new Shape(3, 3))
.optPadding(new Shape(1, 1))
.setFilters(builder.numChannels[2])
.build())
.add(Activation::relu)
.add(
Conv2d.builder()
.setKernelShape(new Shape(3, 3))
.optPadding(new Shape(1, 1))
.setFilters(builder.numChannels[3])
.build())
.add(Activation::relu)
.add(
Conv2d.builder()
.setKernelShape(new Shape(3, 3))
.optPadding(new Shape(1, 1))
.setFilters(builder.numChannels[4])
.build())
.add(Activation::relu)
.add(Pool.maxPool2dBlock(new Shape(3, 3), new Shape(2, 2)))
// Here, the number of outputs of the fully connected layer is several
// times larger than that in LeNet. Use the dropout layer to mitigate
// over fitting
.add(Blocks.batchFlattenBlock())
.add(Linear.builder().setUnits(builder.numChannels[5]).build())
.add(Activation::relu)
.add(Dropout.builder().optRate(builder.dropOutRate).build())
.add(Linear.builder().setUnits(builder.numChannels[6]).build())
.add(Activation::relu)
.add(Dropout.builder().optRate(builder.dropOutRate).build())
// Output layer. The number of
// classes is 10 as default, instead of 1000 as in the paper
.add(Linear.builder().setUnits(builder.outSize).build());
}
/**
* Creates a builder to build a {@link AlexNet}.
*
* @return a new builder
*/
public static AlexNet.Builder builder() {
return new AlexNet.Builder();
}
/** The Builder to construct a {@link AlexNet} object. */
public static final class Builder {
float dropOutRate = 0.5f;
int numLayers = 7;
int[] numChannels = {96, 256, 384, 384, 256, 4096, 4096};
long outSize = 10;
Builder() {}
/**
* Sets the dropout rate in the network.
*
* @param dropOutRate the dropout rate
* @return this {@code Builder}
*/
public AlexNet.Builder setDropOutRate(float dropOutRate) {
this.dropOutRate = dropOutRate;
return this;
}
/**
* Sets the number of channels for the AlexNet blocks.
*
* @param numChannels the number of channels for every AlexNet block.
* @return this {@code Builder}
*/
public AlexNet.Builder setNumChannels(int[] numChannels) {
if (numChannels.length != numLayers) {
throw new IllegalArgumentException(
"number of channels should be equal to " + numLayers);
}
this.numChannels = numChannels;
return this;
}
/**
* Sets the size of the output.
*
* @param outSize the output size
* @return this {@code Builder}
*/
public AlexNet.Builder setOutSize(long outSize) {
this.outSize = outSize;
return this;
}
/**
* Builds a {@link AlexNet} block.
*
* @return the {@link AlexNet} block
*/
public Block build() {
return alexNet(this);
}
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/classification/GoogLeNet.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo.cv.classification;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Activation;
import ai.djl.nn.Block;
import ai.djl.nn.ParallelBlock;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.convolutional.Conv2d;
import ai.djl.nn.core.Linear;
import ai.djl.nn.pooling.Pool;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
/**
* GoogLeNet uses a stack of a total of 9 inception blocks and global average pooling to generate
* its estimates. Maximum pooling between inception blocks reduced the dimensionality. The first
* part is identical to AlexNet and LeNet, the stack of blocks is inherited from VGG and the global
* average pooling avoids a stack of fully-connected layers at the end.
*
* <p>GoogLeNet paper from Christian Szegedy, Wei Liu, Yangqing Jia, Pierre Sermanet, Scott Reed,
* Dragomir Anguelov, Dumitru Erhan, Vincent Vanhoucke, Andrew Rabinovich “Going Deeper with
* Convolutions” https://arxiv.org/abs/1409.4842
*
* @see <a href="https://d2l.djl.ai/chapter_convolutional-modern/googlenet.html">The D2L chapter on
* GoogLeNet</a>
*/
public final class GoogLeNet {
private GoogLeNet() {}
/**
* Creates a GoogLeNet network block with the help of the GoogLeNet Builder.
*
* @param builder the {@link GoogLeNet.Builder} with the necessary arguments.
* @return a GoogLeNet block.
*/
public static Block googLeNet(Builder builder) {
GoogLeNet googLeNet = new GoogLeNet();
// creation of block1
SequentialBlock block1 = new SequentialBlock();
block1.add(
Conv2d.builder()
.setKernelShape(new Shape(7, 7))
.optPadding(new Shape(3, 3))
.optStride(new Shape(2, 2))
.setFilters(64)
.build())
.add(Activation::relu)
.add(Pool.maxPool2dBlock(new Shape(3, 3), new Shape(2, 2), new Shape(1, 1)));
// creation of block2
SequentialBlock block2 = new SequentialBlock();
block2.add(Conv2d.builder().setFilters(64).setKernelShape(new Shape(1, 1)).build())
.add(Activation::relu)
.add(
Conv2d.builder()
.setFilters(192)
.setKernelShape(new Shape(3, 3))
.optPadding(new Shape(1, 1))
.build())
.add(Activation::relu)
.add(Pool.maxPool2dBlock(new Shape(3, 3), new Shape(2, 2), new Shape(1, 1)));
// creation of block3
SequentialBlock block3 = new SequentialBlock();
block3.add(googLeNet.inceptionBlock(64, new int[] {96, 128}, new int[] {16, 32}, 32))
.add(googLeNet.inceptionBlock(128, new int[] {128, 192}, new int[] {32, 96}, 64))
.add(Pool.maxPool2dBlock(new Shape(3, 3), new Shape(2, 2), new Shape(1, 1)));
// creation of block4
SequentialBlock block4 = new SequentialBlock();
block4.add(googLeNet.inceptionBlock(192, new int[] {96, 208}, new int[] {16, 48}, 64))
.add(googLeNet.inceptionBlock(160, new int[] {112, 224}, new int[] {24, 64}, 64))
.add(googLeNet.inceptionBlock(128, new int[] {128, 256}, new int[] {24, 64}, 64))
.add(googLeNet.inceptionBlock(112, new int[] {144, 288}, new int[] {32, 64}, 64))
.add(googLeNet.inceptionBlock(256, new int[] {160, 320}, new int[] {32, 128}, 128))
.add(Pool.maxPool2dBlock(new Shape(3, 3), new Shape(2, 2), new Shape(1, 1)));
// creation of block5
SequentialBlock block5 = new SequentialBlock();
block5.add(googLeNet.inceptionBlock(256, new int[] {160, 320}, new int[] {32, 128}, 128))
.add(googLeNet.inceptionBlock(384, new int[] {192, 384}, new int[] {48, 128}, 128))
.add(Pool.globalAvgPool2dBlock());
return new SequentialBlock()
.addAll(
block1,
block2,
block3,
block4,
block5,
Linear.builder().setUnits(builder.outSize).build());
}
// c1 - c4 are the number of output channels for each layer in the path
/**
* Creates a constituent inception block that becomes a part of the whole GoogLeNet model.
*
* @param c1 number of channels for the first path of sequential block.
* @param c2 array of channels for the second path of sequential block.
* @param c3 array of channels for the third path of sequential block.
* @param c4 number of channels for the fourth path of sequential block.
* @return a parallel block combining all 4 paths of sequential blocks.
*/
public ParallelBlock inceptionBlock(int c1, int[] c2, int[] c3, int c4) {
// Path 1 is a single 1 x 1 convolutional layer
SequentialBlock p1 =
new SequentialBlock()
.add(
Conv2d.builder()
.setFilters(c1)
.setKernelShape(new Shape(1, 1))
.build())
.add(Activation::relu);
// Path 2 is a 1 x 1 convolutional layer followed by a 3 x 3
// convolutional layer
SequentialBlock p2 =
new SequentialBlock()
.add(
Conv2d.builder()
.setFilters(c2[0])
.setKernelShape(new Shape(1, 1))
.build())
.add(Activation::relu)
.add(
Conv2d.builder()
.setFilters(c2[1])
.setKernelShape(new Shape(3, 3))
.optPadding(new Shape(1, 1))
.build())
.add(Activation::relu);
// Path 3 is a 1 x 1 convolutional layer followed by a 5 x 5
// convolutional layer
SequentialBlock p3 =
new SequentialBlock()
.add(
Conv2d.builder()
.setFilters(c3[0])
.setKernelShape(new Shape(1, 1))
.build())
.add(Activation::relu)
.add(
Conv2d.builder()
.setFilters(c3[1])
.setKernelShape(new Shape(5, 5))
.optPadding(new Shape(2, 2))
.build())
.add(Activation::relu);
// Path 4 is a 3 x 3 maximum pooling layer followed by a 1 x 1
// convolutional layer
SequentialBlock p4 =
new SequentialBlock()
.add(Pool.maxPool2dBlock(new Shape(3, 3), new Shape(1, 1), new Shape(1, 1)))
.add(
Conv2d.builder()
.setFilters(c4)
.setKernelShape(new Shape(1, 1))
.build())
.add(Activation::relu);
// Concatenate the outputs on the channel dimension
return new ParallelBlock(
list -> {
List<NDArray> concatenatedList =
list.stream().map(NDList::head).collect(Collectors.toList());
return new NDList(NDArrays.concat(new NDList(concatenatedList), 1));
},
Arrays.asList(p1, p2, p3, p4));
}
/**
* Creates a builder to build a {@link GoogLeNet}.
*
* @return a new builder
*/
public static GoogLeNet.Builder builder() {
return new GoogLeNet.Builder();
}
/** The Builder to construct a {@link GoogLeNet} object. */
public static final class Builder {
long outSize = 10;
Builder() {}
/**
* Sets the size of the output.
*
* @param outSize the output size
* @return this {@code Builder}
*/
public Builder setOutSize(long outSize) {
this.outSize = outSize;
return this;
}
/**
* Builds a {@link GoogLeNet} block.
*
* @return the {@link GoogLeNet} block
*/
public Block build() {
return googLeNet(this);
}
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/classification/LeNet.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo.cv.classification;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Activation;
import ai.djl.nn.Block;
import ai.djl.nn.Blocks;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.convolutional.Conv2d;
import ai.djl.nn.core.Linear;
import ai.djl.nn.pooling.Pool;
/**
* The model was introduced (and named for) Yann Lecun, for the purpose of recognizing handwritten
* digits in images [LeNet5](http://yann.lecun.com/exdb/lenet/).
*
* @see <a href="https://d2l.djl.ai/chapter_convolutional-neural-networks/lenet.html">The D2L
* chapter on LeNet</a>
*/
public final class LeNet {
private LeNet() {}
/**
* Creates a LeNet network block with the help of the LeNet Builder.
*
* @param builder the {@link LeNet.Builder} with the necessary arguments.
* @return a LeNet block.
*/
public static Block leNet(Builder builder) {
return new SequentialBlock()
.add(
Conv2d.builder()
.setKernelShape(new Shape(5, 5))
.optPadding(new Shape(2, 2))
.optBias(false)
.setFilters(builder.numChannels[0])
.build())
.add(Activation::sigmoid)
.add(Pool.avgPool2dBlock(new Shape(5, 5), new Shape(2, 2), new Shape(2, 2)))
.add(
Conv2d.builder()
.setKernelShape(new Shape(5, 5))
.setFilters(builder.numChannels[1])
.build())
.add(Activation::sigmoid)
.add(Pool.avgPool2dBlock(new Shape(5, 5), new Shape(2, 2), new Shape(2, 2)))
// Blocks.batchFlattenBlock() will transform the input of the shape (batch
// size, channel,
// height, width) into the input of the shape (batch size,
// channel * height * width)
.add(Blocks.batchFlattenBlock())
.add(Linear.builder().setUnits(builder.numChannels[2]).build())
.add(Activation::sigmoid)
.add(Linear.builder().setUnits(builder.numChannels[3]).build())
.add(Activation::sigmoid)
.add(Linear.builder().setUnits(builder.outSize).build());
}
/**
* Creates a builder to build a {@link LeNet}.
*
* @return a new builder
*/
public static LeNet.Builder builder() {
return new LeNet.Builder();
}
/** The Builder to construct a {@link LeNet} object. */
public static final class Builder {
int numLayers = 4;
int[] numChannels = {6, 16, 120, 84};
long outSize = 10;
Builder() {}
/**
* Sets the number of channels for the LeNet blocks.
*
* @param numChannels the number of channels for every LeNet block.
* @return this {@code Builder}
*/
public LeNet.Builder setNumChannels(int[] numChannels) {
if (numChannels.length != numLayers) {
throw new IllegalArgumentException(
"number of channels can be equal to " + numLayers);
}
this.numChannels = numChannels;
return this;
}
/**
* Sets the size of the output.
*
* @param outSize the output size
* @return this {@code Builder}
*/
public Builder setOutSize(long outSize) {
this.outSize = outSize;
return this;
}
/**
* Builds a {@link LeNet} block.
*
* @return the {@link LeNet} block
*/
public Block build() {
return leNet(this);
}
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/classification/MobileNetV1.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo.cv.classification;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Activation;
import ai.djl.nn.Block;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.convolutional.Conv2d;
import ai.djl.nn.core.Linear;
import ai.djl.nn.norm.BatchNorm;
import ai.djl.nn.pooling.Pool;
/**
* {@code MobileNetV1} contains a generic implementation of Mobilenet adapted from
* https://github.com/weiaicunzai/pytorch-cifar100/blob/master/models/mobilenet.py (Original author
* weiaicunzai).
*
* <p>see https://arxiv.org/pdf/1704.04861.pdf for more information about MobileNet
*/
public final class MobileNetV1 {
static final int[] FILTERS = {32, 64, 128, 128, 256, 256, 512, 512, 1024, 1024};
private MobileNetV1() {}
/**
* Builds a {@link Block} that represent a depthWise-pointWise Unit used in the implementation
* of the MobileNet Model.
*
* @param inputChannels number of inputChannels, used for depthWise Kernel
* @param outputChannels number of outputChannels, used for pointWise kernel
* @param stride control the stride of depthWise Kernel
* @param builder add the builder to obtain batchNormMomentum
* @return a {@link Block} that represent a depthWise-pointWise Unit
*/
public static Block depthSeparableConv2d(
int inputChannels, int outputChannels, int stride, Builder builder) {
// depthWise does not include bias
SequentialBlock depthWise = new SequentialBlock();
depthWise
.add(
Conv2d.builder()
.setKernelShape(
new Shape(3, 3)) // the kernel size of depthWise is always 3
.optBias(false)
.optPadding(new Shape(1, 1)) // padding = same
.optStride(new Shape(stride, stride)) // stride is either 2 or 1
.optGroups(
inputChannels) // depthWise with 1 filter per input channel
.setFilters(inputChannels)
.build())
.add( // add a batchNorm
BatchNorm.builder()
.optEpsilon(2E-5f)
.optMomentum(builder.batchNormMomentum)
.build())
.add(Activation.reluBlock());
SequentialBlock pointWise = new SequentialBlock();
pointWise
.add(
Conv2d.builder()
.setKernelShape(new Shape(1, 1)) // no padding or stride
.setFilters(outputChannels)
.optBias(false)
.build())
.add(
BatchNorm.builder()
.optEpsilon(2E-5f)
.optMomentum(builder.batchNormMomentum)
.build())
.add(Activation.reluBlock());
return depthWise.add(pointWise); // two blocks are merged together
}
/**
* Creates a new {@link Block} of {@link MobileNetV1} with the arguments from the given {@link
* Builder}.
*
* @param builder the {@link Builder} with the necessary arguments
* @return a {@link Block} that represents the required MobileNet model
*/
public static Block mobilenet(Builder builder) {
// no bias in MobileNet
SequentialBlock mobileNet = new SequentialBlock();
mobileNet
.add(
// conv1
new SequentialBlock()
.add(
Conv2d.builder()
.setKernelShape(new Shape(3, 3))
.optBias(false)
.optStride(new Shape(2, 2))
.optPadding(new Shape(1, 1)) // padding = 'same'
.setFilters(
(int)
(FILTERS[0]
* builder.widthMultiplier))
.build())
.add(
BatchNorm.builder()
.optEpsilon(2E-5f)
.optMomentum(builder.batchNormMomentum)
.build())
.add(Activation.reluBlock()))
// separable conv1
.add(
depthSeparableConv2d(
(int) (FILTERS[0] * builder.widthMultiplier),
(int) (FILTERS[1] * builder.widthMultiplier),
1,
builder))
// separable conv2
.add(
depthSeparableConv2d(
(int) (FILTERS[1] * builder.widthMultiplier),
(int) (FILTERS[2] * builder.widthMultiplier),
2,
builder))
// separable conv3
.add(
depthSeparableConv2d(
(int) (FILTERS[2] * builder.widthMultiplier),
(int) (FILTERS[3] * builder.widthMultiplier),
1,
builder))
// separable conv4
.add(
depthSeparableConv2d(
(int) (FILTERS[3] * builder.widthMultiplier),
(int) (FILTERS[4] * builder.widthMultiplier),
2,
builder))
// separable conv5
.add(
depthSeparableConv2d(
(int) (FILTERS[4] * builder.widthMultiplier),
(int) (FILTERS[5] * builder.widthMultiplier),
1,
builder))
// separable conv6
.add(
depthSeparableConv2d(
(int) (FILTERS[5] * builder.widthMultiplier),
(int) (FILTERS[6] * builder.widthMultiplier),
2,
builder))
// separable conv7*5
.add(
depthSeparableConv2d(
(int) (FILTERS[6] * builder.widthMultiplier),
(int) (FILTERS[7] * builder.widthMultiplier),
1,
builder))
.add(
depthSeparableConv2d(
(int) (FILTERS[6] * builder.widthMultiplier),
(int) (FILTERS[7] * builder.widthMultiplier),
1,
builder))
.add(
depthSeparableConv2d(
(int) (FILTERS[6] * builder.widthMultiplier),
(int) (FILTERS[7] * builder.widthMultiplier),
1,
builder))
.add(
depthSeparableConv2d(
(int) (FILTERS[6] * builder.widthMultiplier),
(int) (FILTERS[7] * builder.widthMultiplier),
1,
builder))
.add(
depthSeparableConv2d(
(int) (FILTERS[6] * builder.widthMultiplier),
(int) (FILTERS[7] * builder.widthMultiplier),
1,
builder))
// separable conv8
.add(
depthSeparableConv2d(
(int) (FILTERS[7] * builder.widthMultiplier),
(int) (FILTERS[8] * builder.widthMultiplier),
2,
builder))
// separable conv9
.add(
depthSeparableConv2d(
(int) (FILTERS[8] * builder.widthMultiplier),
(int) (FILTERS[9] * builder.widthMultiplier),
1,
builder)) // maybe the paper goes wrong here
// AveragePool
.add(Pool.globalAvgPool2dBlock())
// FC
.add(Linear.builder().setUnits(builder.outSize).build());
return mobileNet;
}
/**
* Creates a builder to build a {@link MobileNetV1}.
*
* @return a new builder
*/
public static Builder builder() {
return new MobileNetV1.Builder();
}
/** The Builder to construct a {@link MobileNetV1} object. */
public static final class Builder {
float batchNormMomentum = 0.9f;
float widthMultiplier = 1f; // width multiplier(also named alpha) defined in the paper
long outSize = 10; // 10 as default for basic datasets like cifar-10 or mnist
Builder() {}
/**
* Sets the widthMultiplier of MobileNet.
*
* @param widthMultiplier the widthMultiplier of MobileNet
* @return this {@code Builder}
*/
public Builder optWidthMultiplier(float widthMultiplier) {
this.widthMultiplier = widthMultiplier;
return this;
}
/**
* Sets the momentum of batchNorm layer.
*
* @param batchNormMomentum the momentum
* @return this {@code Builder}
*/
public Builder optBatchNormMomentum(float batchNormMomentum) {
this.batchNormMomentum = batchNormMomentum;
return this;
}
/**
* Sets the size of the output.
*
* @param outSize the output size
* @return this {@code Builder}
*/
public Builder setOutSize(long outSize) {
this.outSize = outSize;
return this;
}
/**
* Builds a {@link MobileNetV1} block.
*
* @return the {@link MobileNetV1} block
*/
public Block build() {
return mobilenet(this);
}
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/classification/MobileNetV2.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo.cv.classification;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Activation;
import ai.djl.nn.Block;
import ai.djl.nn.Blocks;
import ai.djl.nn.ParallelBlock;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.convolutional.Conv2d;
import ai.djl.nn.norm.BatchNorm;
import ai.djl.nn.pooling.Pool;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* {@code MobileNetV2} contains a generic implementation of MobilenetV2 adapted from
* https://github.com/weiaicunzai/pytorch-cifar100/blob/master/models/mobilenetv2.py (Original
* author weiaicunzai).
*
* <p>see https://arxiv.org/pdf/1801.04381.pdf for more information about MobileNetV2
*/
public final class MobileNetV2 {
public static final int FILTERLENGTH = 9;
public static final int REPEATLENGTH = 9;
public static final int STRIDELENGTH = 9;
public static final int MULTILENGTH = 7;
private MobileNetV2() {}
/**
* Builds a {@link Block} that represent an inverted residual Unit used in the implementation of
* the MobileNetV2 Model.
*
* @param inputChannels number of inputChannels of the block
* @param outputChannels number of outputChannels of the block
* @param stride control the stride of a depthWise kernel
* @param t the multiTime of the first pointWise Block
* @param batchNormMomentum the momentum of batchNormLayer
* @return a {@link Block} that represent an inverted residual Unit
*/
public static Block linearBottleNeck(
int inputChannels, int outputChannels, int stride, int t, float batchNormMomentum) {
SequentialBlock block = new SequentialBlock();
block.add(
Conv2d.builder() // PointWise
.setFilters(inputChannels * t)
.setKernelShape(new Shape(1, 1))
.optBias(false)
.build())
// add a batchNorm
.add(BatchNorm.builder().optEpsilon(2E-5f).optMomentum(batchNormMomentum).build())
// add a relu
.add(Activation.relu6Block())
.add(
Conv2d.builder() // DepthWise
.setKernelShape(new Shape(3, 3))
.setFilters(inputChannels * t)
.optStride(new Shape(stride, stride))
.optPadding(new Shape(1, 1))
.optGroups(inputChannels * t)
.optBias(false)
.build())
.add(BatchNorm.builder().optEpsilon(2E-5f).optMomentum(batchNormMomentum).build())
.add(Activation.relu6Block())
.add(
Conv2d.builder() // PointWise
.setFilters(outputChannels)
.setKernelShape(new Shape(1, 1))
.optBias(false)
.build())
.add(BatchNorm.builder().optEpsilon(2E-5f).optMomentum(batchNormMomentum).build());
// if dimMatch,then add x
if (stride == 1 && inputChannels == outputChannels) {
return new ParallelBlock(
list ->
new NDList(
NDArrays.add(
list.get(0).singletonOrThrow(),
list.get(1).singletonOrThrow())),
Arrays.asList(block, Blocks.identityBlock()));
}
return block; // No relu6Block here
}
/**
* Builds a {@link Block} that represent multiple repeats of an inverted residual Unit.
*
* @param repeat the repeatTimes of an inverted residual Block
* @param inputChannels number of inputChannels of the block
* @param outputChannels number of outputChannels of the block
* @param stride the stride of an inverted residual Unit
* @param t the multipleTime of a pointWise Kernel
* @param batchNormMomentum the momentum of batchNormLayer
* @return a {@link Block} that represent several repeated inverted residual Units
*/
public static Block makeStage(
int repeat,
int inputChannels,
int outputChannels,
int stride,
int t,
float batchNormMomentum) {
SequentialBlock layers = new SequentialBlock();
layers.add(linearBottleNeck(inputChannels, outputChannels, stride, t, batchNormMomentum));
for (int i = 0; i < repeat - 1; i++) {
layers.add(linearBottleNeck(outputChannels, outputChannels, 1, t, batchNormMomentum));
}
return layers;
}
/**
* Creates a new {@link Block} of {@link MobileNetV2} with the arguments from the given {@link
* Builder}.
*
* @param builder the {@link Builder} with the necessary arguments
* @return a {@link Block} that represents the required MobileNetV2 model
*/
public static Block mobilenetV2(Builder builder) {
SequentialBlock mobileNet = new SequentialBlock();
SequentialBlock pre = new SequentialBlock();
for (int i = 0; i < builder.repeatTimes[0]; i++) { // add as a sequence
pre.add(
Conv2d.builder()
.setKernelShape(new Shape(1, 1))
.setFilters(builder.filters[0])
.optStride(new Shape(builder.strides[0], builder.strides[0]))
.optBias(false)
.build())
.add(
BatchNorm.builder()
.optEpsilon(2E-5f)
.optMomentum(builder.batchNormMomentum)
.build())
.add(Activation.relu6Block());
}
List<Block> bottleNecks = new ArrayList<>();
for (int i = 0; i < MULTILENGTH; i++) {
bottleNecks.add(
makeStage(
builder.repeatTimes[i + 1],
builder.filters[i],
builder.filters[i + 1],
builder.strides[i + 1],
builder.multiTimes[i],
builder.batchNormMomentum));
}
SequentialBlock conv1 = new SequentialBlock();
for (int i = 0; i < builder.repeatTimes[8]; i++) {
conv1.add(
Conv2d.builder()
.setKernelShape(new Shape(1, 1))
.setFilters(builder.filters[8])
.optStride(new Shape(builder.strides[8], builder.strides[8]))
.optBias(false)
.build())
.add(
BatchNorm.builder()
.optEpsilon(2E-5f)
.optMomentum(builder.batchNormMomentum)
.build())
.add(Activation.relu6Block());
}
Block conv2 =
Conv2d.builder()
.setKernelShape(new Shape(1, 1))
.setFilters((int) builder.outSize)
.build();
return mobileNet
.add(pre)
.addAll(bottleNecks)
.add(conv1)
.add(Pool.globalAvgPool2dBlock())
.addSingleton(
array -> array.reshape(array.getShape().get(0), builder.filters[8], 1, 1))
// reshape for conv1*1
.add(conv2)
// reshape for output
.addSingleton(array -> array.reshape(array.getShape().get(0), builder.outSize));
}
/**
* Creates a builder to build a {@link MobileNetV2}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** The Builder to construct a {@link MobileNetV2} object. */
public static final class Builder {
float batchNormMomentum = 0.9f;
long outSize = 10; // 10 as default for basic datasets like cifar-10 or mnist
int[] repeatTimes = {
1, 1, 2, 3, 4, 3, 3, 1, 1
}; // repeatTimes(n) of each block defined in the paper
int[] filters = {
32, 16, 24, 32, 64, 96, 160, 320, 1280
}; // filters(c) of each Block defined in the paper
int[] strides = {
2, 1, 2, 2, 2, 1, 2, 1, 1
}; // strides(s) of each block defined in the paper
int[] multiTimes = {
1, 6, 6, 6, 6, 6, 6
}; // multipleTimes(t) of each linearBottleneck defined in the paper
Builder() {}
/**
* Sets the momentum of batchNorm layer.
*
* @param batchNormMomentum the momentum
* @return this {@code Builder}
*/
public Builder optBatchNormMomentum(float batchNormMomentum) {
this.batchNormMomentum = batchNormMomentum;
return this;
}
/**
* Sets the size of the output.
*
* @param outSize the output size
* @return this {@code Builder}
*/
public Builder setOutSize(long outSize) {
this.outSize = outSize;
return this;
}
/**
* Sets the filters(the value c defined in the paper) of customized MobileNetV2.
*
* @param filters the customized filter
* @return this {@code Builder}
*/
public Builder optFilters(int[] filters) {
if (filters.length != FILTERLENGTH) {
throw new IllegalArgumentException(
String.format(
"optFilters requires filters of length %d, but was given filters of"
+ " length %d instead",
FILTERLENGTH, filters.length));
}
this.filters = filters;
return this;
}
/**
* Sets the repeatTimes(the value n defined in the paper) of each block of MobileNetV2.
*
* @param repeatTimes the customized repeatTimes
* @return this {@code Builder}
*/
public Builder optRepeatTimes(int[] repeatTimes) {
if (repeatTimes.length != REPEATLENGTH) {
throw new IllegalArgumentException(
String.format(
"optRepeatTimes requires repeatTimes of length %d, but was given"
+ " repeatTimes of length %d instead",
REPEATLENGTH, repeatTimes.length));
}
this.repeatTimes = repeatTimes;
return this;
}
/**
* Sets the strides(the value s defined in the paper) of each block of MobileNetV2.
*
* @param strides the customized strides
* @return this {@code Builder}
*/
public Builder optStrides(int[] strides) {
if (strides.length != STRIDELENGTH) {
throw new IllegalArgumentException(
String.format(
"optStrides requires strides of length %d, but was given strides of"
+ " length %d instead",
STRIDELENGTH, strides.length));
}
this.strides = strides;
return this;
}
/**
* Sets the multiTimes(the value t defined in the paper) of each bottleNeck of MobileNetV2.
*
* @param multiTimes the customized multiTimes
* @return this {@code Builder}
*/
public Builder optMultiTimes(int[] multiTimes) {
if (multiTimes.length != MULTILENGTH) {
throw new IllegalArgumentException(
String.format(
"optMultiTimes requires multiTimes of length %d, but was given"
+ " multiTimes of length %d instead",
MULTILENGTH, multiTimes.length));
}
this.multiTimes = multiTimes;
return this;
}
/**
* Builds a {@link MobileNetV2} block.
*
* @return the {@link MobileNetV2} block
*/
public Block build() {
return mobilenetV2(this);
}
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/classification/NiN.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo.cv.classification;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Activation;
import ai.djl.nn.Block;
import ai.djl.nn.Blocks;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.convolutional.Conv2d;
import ai.djl.nn.norm.Dropout;
import ai.djl.nn.pooling.Pool;
/**
* NiN uses convolutional layers with window shapes of 11×11 , 5×5 , and 3×3 , and the corresponding
* numbers of output channels are the same as in AlexNet. Each NiN block is followed by a maximum
* pooling layer with a stride of 2 and a window shape of 3×3 .
*
* <p>The conventional convolutional layer uses linear filters followed by a nonlinear activation
* function to scan the input.
*
* <p>NiN model from the "Network In Network" http://arxiv.org/abs/1312.4400 paper.
*
* @see <a href="https://d2l.djl.ai/chapter_convolutional-modern/nin.html">The D2L chapter on
* NiN</a>
*/
public final class NiN {
private NiN() {}
/**
* The NiN block consists of one convolutional layer followed by two 1×1 convolutional layers
* that act as per-pixel fully-connected layers with ReLU activations. The convolution width of
* the first layer is typically set by the user. The subsequent widths are fixed to 1×1.
*
* @param builder the {@link NiN.Builder} with the necessary arguments.
* @return a NiN block.
*/
public static Block niN(Builder builder) {
NiN nin = new NiN();
return new SequentialBlock()
.add(
nin.niNBlock(
builder.numChannels[0],
new Shape(11, 11),
new Shape(4, 4),
new Shape(0, 0)))
.add(Pool.maxPool2dBlock(new Shape(3, 3), new Shape(2, 2)))
.add(
nin.niNBlock(
builder.numChannels[1],
new Shape(5, 5),
new Shape(1, 1),
new Shape(2, 2)))
.add(Pool.maxPool2dBlock(new Shape(3, 3), new Shape(2, 2)))
.add(
nin.niNBlock(
builder.numChannels[2],
new Shape(3, 3),
new Shape(1, 1),
new Shape(1, 1)))
.add(Pool.maxPool2dBlock(new Shape(3, 3), new Shape(2, 2)))
.add(Dropout.builder().optRate(builder.dropOutRate).build())
.add(
nin.niNBlock(
builder.numChannels[3],
new Shape(3, 3),
new Shape(1, 1),
new Shape(1, 1)))
// The global average pooling layer automatically sets the window shape
// to the height and width of the input
.add(Pool.globalAvgPool2dBlock())
// Transform the four-dimensional output into two-dimensional output
// with a shape of (batch size, 10)
.add(Blocks.batchFlattenBlock());
}
/**
* Creates a builder to build a {@link NiN}.
*
* @return a new builder
*/
public static NiN.Builder builder() {
return new Builder();
}
/**
* Creates a constituent NiN block that becomes a part of the whole NiN model.
*
* @param numChannels the number of channels in a NiN block.
* @param kernelShape kernel Shape in the 1st convolutional layer of a NiN block.
* @param strideShape stride Shape in a NiN block.
* @param paddingShape padding Shape in a NiN block.
* @return a constituent niN block.
*/
public SequentialBlock niNBlock(
int numChannels, Shape kernelShape, Shape strideShape, Shape paddingShape) {
return new SequentialBlock()
.add(
Conv2d.builder()
.setKernelShape(kernelShape)
.optStride(strideShape)
.optPadding(paddingShape)
.setFilters(numChannels)
.build())
.add(Activation::relu)
.add(
Conv2d.builder()
.setKernelShape(new Shape(1, 1))
.setFilters(numChannels)
.build())
.add(Activation::relu)
.add(
Conv2d.builder()
.setKernelShape(new Shape(1, 1))
.setFilters(numChannels)
.build())
.add(Activation::relu);
}
/** The Builder to construct a {@link NiN} object. */
public static final class Builder {
int numLayers = 4;
int[] numChannels = {96, 256, 384, 10};
float dropOutRate = 0.5f;
Builder() {}
/**
* Sets the dropout rate in the network.
*
* @param dropOutRate the dropout rate
* @return this {@code Builder}
*/
public NiN.Builder setDropOutRate(float dropOutRate) {
this.dropOutRate = dropOutRate;
return this;
}
/**
* Sets the number of channels for the niN blocks.
*
* @param numChannels the number of channels for every niN block.
* @return this {@code Builder}
*/
public NiN.Builder setNumChannels(int[] numChannels) {
if (numChannels.length != numLayers) {
throw new IllegalArgumentException(
"number of channels can be equal to " + numLayers);
}
this.numChannels = numChannels;
return this;
}
/**
* Builds a {@link NiN} block.
*
* @return the {@link NiN} block
*/
public Block build() {
return niN(this);
}
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/classification/ResNetV1.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo.cv.classification;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Activation;
import ai.djl.nn.Block;
import ai.djl.nn.Blocks;
import ai.djl.nn.ParallelBlock;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.convolutional.Conv2d;
import ai.djl.nn.core.Linear;
import ai.djl.nn.norm.BatchNorm;
import ai.djl.nn.pooling.Pool;
import java.util.Arrays;
/**
* {@code ResNetV1} contains a generic implementation of ResNet adapted from
* https://github.com/tornadomeet/ResNet/blob/master/symbol_resnet.py (Original author Wei Wu) by
* Antti-Pekka Hynninen.
*
* <p>Implementing the original resnet ILSVRC 2015 winning network from Kaiming He, Xiangyu Zhang,
* Shaoqing Ren, Jian Sun. "Deep Residual Learning for Image Recognition"
*
* @see <a href="https://d2l.djl.ai/chapter_convolutional-modern/resnet.html">The D2L chapter on
* ResNet</a>
*/
public final class ResNetV1 {
private ResNetV1() {}
/**
* Builds a {@link Block} that represents a residual unit used in the implementation of the
* Resnet model.
*
* @param numFilters the number of output channels
* @param stride the stride of the convolution in each dimension
* @param dimMatch whether the number of channels between input and output has to remain the
* same
* @param bottleneck whether to use bottleneck architecture
* @param batchNormMomentum the momentum to be used for {@link BatchNorm}
* @return a {@link Block} that represents a residual unit
*/
public static Block residualUnit(
int numFilters,
final Shape stride,
final boolean dimMatch,
boolean bottleneck,
float batchNormMomentum) {
SequentialBlock resUnit = new SequentialBlock();
if (bottleneck) {
resUnit.add(
Conv2d.builder()
.setKernelShape(new Shape(1, 1))
.setFilters(numFilters / 4)
.optStride(stride)
.optPadding(new Shape(0, 0))
.optBias(true)
.build())
.add(
BatchNorm.builder()
.optEpsilon(1e-5f)
.optMomentum(batchNormMomentum)
.build())
.add(Activation::relu)
.add(
Conv2d.builder()
.setKernelShape(new Shape(3, 3))
.setFilters(numFilters / 4)
.optStride(new Shape(1, 1))
.optPadding(new Shape(1, 1))
.optBias(false)
.build())
.add(
BatchNorm.builder()
.optEpsilon(2E-5f)
.optMomentum(batchNormMomentum)
.build())
.add(Activation::relu)
.add(
Conv2d.builder()
.setKernelShape(new Shape(1, 1))
.setFilters(numFilters)
.optStride(new Shape(1, 1))
.optPadding(new Shape(0, 0))
.optBias(true)
.build())
.add(
BatchNorm.builder()
.optEpsilon(1E-5f)
.optMomentum(batchNormMomentum)
.build());
} else {
resUnit.add(
Conv2d.builder()
.setKernelShape(new Shape(3, 3))
.setFilters(numFilters)
.optStride(stride)
.optPadding(new Shape(1, 1))
.optBias(false)
.build())
.add(
BatchNorm.builder()
.optEpsilon(1E-5f)
.optMomentum(batchNormMomentum)
.build())
.add(Activation::relu)
.add(
Conv2d.builder()
.setKernelShape(new Shape(3, 3))
.setFilters(numFilters)
.optStride(new Shape(1, 1))
.optPadding(new Shape(1, 1))
.optBias(false)
.build())
.add(
BatchNorm.builder()
.optEpsilon(1E-5f)
.optMomentum(batchNormMomentum)
.build());
}
SequentialBlock shortcut = new SequentialBlock();
if (dimMatch) {
shortcut.add(Blocks.identityBlock());
} else {
shortcut.add(
Conv2d.builder()
.setKernelShape(new Shape(1, 1))
.setFilters(numFilters)
.optStride(stride)
.optPadding(new Shape(0, 0))
.optBias(false)
.build())
.add(
BatchNorm.builder()
.optEpsilon(1E-5f)
.optMomentum(batchNormMomentum)
.build());
}
return new ParallelBlock(
list -> {
NDList unit = list.get(0);
NDList parallel = list.get(1);
return new NDList(
unit.singletonOrThrow()
.add(parallel.singletonOrThrow())
.getNDArrayInternal()
.relu());
},
Arrays.asList(resUnit, shortcut));
}
/**
* Creates a new {@link Block} of {@code ResNetV1} with the arguments from the given {@link
* Builder}.
*
* @param builder the {@link Builder} with the necessary arguments
* @return a {@link Block} that represents the required ResNet model
*/
public static SequentialBlock resnet(Builder builder) {
int numStages = builder.units.length;
long height = builder.imageShape.get(1);
SequentialBlock resNet = new SequentialBlock();
if (height <= 32) {
resNet.add(
Conv2d.builder()
.setKernelShape(new Shape(3, 3))
.setFilters(builder.filters[0])
.optStride(new Shape(1, 1))
.optPadding(new Shape(1, 1))
.optBias(false)
.build());
} else {
resNet.add(
Conv2d.builder()
.setKernelShape(new Shape(7, 7))
.setFilters(builder.filters[0])
.optStride(new Shape(2, 2))
.optPadding(new Shape(3, 3))
.optBias(false)
.build())
.add(
BatchNorm.builder()
.optEpsilon(2E-5f)
.optMomentum(builder.batchNormMomentum)
.build())
.add(Activation.reluBlock())
.add(Pool.maxPool2dBlock(new Shape(3, 3), new Shape(2, 2), new Shape(1, 1)));
}
Shape resStride = new Shape(1, 1);
for (int i = 0; i < numStages; i++) {
resNet.add(
residualUnit(
builder.filters[i + 1],
resStride,
false,
builder.bottleneck,
builder.batchNormMomentum));
for (int j = 0; j < builder.units[i] - 1; j++) {
resNet.add(
residualUnit(
builder.filters[i + 1],
new Shape(1, 1),
true,
builder.bottleneck,
builder.batchNormMomentum));
}
if (i == 0) {
resStride = new Shape(2, 2);
}
}
return resNet.add(Pool.globalAvgPool2dBlock())
.add(Blocks.batchFlattenBlock())
.add(Linear.builder().setUnits(builder.outSize).build())
.add(Blocks.batchFlattenBlock());
}
/**
* Creates a builder to build a {@link ResNetV1}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** The Builder to construct a {@link ResNetV1} object. */
public static final class Builder {
int numLayers;
int numStages;
long outSize;
float batchNormMomentum = 0.9f;
Shape imageShape;
boolean bottleneck;
int[] units;
int[] filters;
Builder() {}
/**
* Sets the number of layers in the network.
*
* @param numLayers the number of layers
* @return this {@code Builder}
*/
public Builder setNumLayers(int numLayers) {
this.numLayers = numLayers;
return this;
}
/**
* Sets the size of the output.
*
* @param outSize the output size
* @return this {@code Builder}
*/
public Builder setOutSize(long outSize) {
this.outSize = outSize;
return this;
}
/**
* Sets the momentum of batchNorm layer.
*
* @param batchNormMomentum the momentum
* @return this {@code Builder}
*/
public Builder optBatchNormMomentum(float batchNormMomentum) {
this.batchNormMomentum = batchNormMomentum;
return this;
}
/**
* Sets the shape of the image.
*
* @param imageShape the shape of the image
* @return this {@code Builder}
*/
public Builder setImageShape(Shape imageShape) {
this.imageShape = imageShape;
return this;
}
/**
* Builds a {@link ResNetV1} block.
*
* @return the {@link ResNetV1} block
*/
public SequentialBlock build() {
if (imageShape == null) {
throw new IllegalArgumentException("Must set imageShape");
}
long height = imageShape.get(1);
if (height <= 28) {
numStages = 3;
int perUnit;
if ((numLayers - 2) % 9 == 0 && numLayers >= 164) {
perUnit = (numLayers - 2) / 9;
filters = new int[] {16, 64, 128, 256};
bottleneck = true;
} else if ((numLayers - 2) % 6 == 0 && numLayers < 164) {
perUnit = (numLayers - 2) / 6;
filters = new int[] {16, 16, 32, 64};
bottleneck = false;
} else {
throw new IllegalArgumentException(
"no experiments done on num_layers "
+ numLayers
+ ", you can do it yourself");
}
units = new int[numStages];
for (int i = 0; i < numStages; i++) {
units[i] = perUnit;
}
} else {
numStages = 4;
if (numLayers >= 50) {
filters = new int[] {64, 256, 512, 1024, 2048};
bottleneck = true;
} else {
filters = new int[] {64, 64, 128, 256, 512};
bottleneck = true;
}
if (numLayers == 18) {
units = new int[] {2, 2, 2, 2};
} else if (numLayers == 34) {
units = new int[] {3, 4, 6, 3};
} else if (numLayers == 50) {
units = new int[] {3, 4, 6, 3};
} else if (numLayers == 101) {
units = new int[] {3, 4, 23, 3};
} else if (numLayers == 152) {
units = new int[] {3, 8, 36, 3};
} else if (numLayers == 200) {
units = new int[] {3, 24, 36, 3};
} else if (numLayers == 269) {
units = new int[] {3, 30, 48, 8};
} else {
throw new IllegalArgumentException(
"no experiments done on num_layers "
+ numLayers
+ ", you can do it yourself");
}
}
return resnet(this);
}
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/classification/ResnetBlockFactory.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo.cv.classification;
import ai.djl.Model;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Block;
import ai.djl.nn.BlockFactory;
import ai.djl.translate.ArgumentsUtil;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
/** A {@link BlockFactory} class that creates {@link ResNetV1} block. */
public class ResnetBlockFactory implements BlockFactory {
private static final long serialVersionUID = 1L;
/** {@inheritDoc} */
@Override
public Block newBlock(Model model, Path modelPath, Map<String, ?> arguments) {
int numLayers = ArgumentsUtil.intValue(arguments, "numLayers");
long outSize = ArgumentsUtil.longValue(arguments, "outSize");
@SuppressWarnings("unchecked")
Shape shape =
new Shape(
((List<Double>) arguments.get("imageShape"))
.stream().mapToLong(Double::longValue).toArray());
ResNetV1.Builder blockBuilder =
ResNetV1.builder().setNumLayers(numLayers).setOutSize(outSize).setImageShape(shape);
if (arguments.containsKey("batchNormMomentum")) {
float batchNormMomentum = ArgumentsUtil.floatValue(arguments, "batchNormMomentum");
blockBuilder.optBatchNormMomentum(batchNormMomentum);
}
return blockBuilder.build();
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/classification/SqueezeNet.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo.cv.classification;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Activation;
import ai.djl.nn.Block;
import ai.djl.nn.Blocks;
import ai.djl.nn.ParallelBlock;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.convolutional.Conv2d;
import ai.djl.nn.norm.Dropout;
import ai.djl.nn.pooling.Pool;
import java.util.Arrays;
/**
* {@code SqueezeNet} contains a generic implementation of Squeezenet adapted from [torchvision
* implmentation](https://github.com/pytorch/vision/blob/master/torchvision/models/squeezenet.py)
*
* <p>Squeezenet is an efficient NN used for Image classification. It provides both performance
* boost and a tiny size. It's a good choice to adopt squeezenet for application runs on Mobile or
* Edge devices. Implementing the original squeezenet from Forrest N. Iandola, Song Han, Matthew W.
* Moskewicz, Khalid Ashraf, William J. Dally, Kurt Keutzer. "SQUEEZENET: ALEXNET-LEVEL ACCURACY
* WITH 50X FEWER PARAMETERS AND 0.5MB MODEL SIZE"
*/
public final class SqueezeNet {
private SqueezeNet() {}
static Block fire(int squeezePlanes, int expand1x1Planes, int expand3x3Planes) {
SequentialBlock squeezeWithActivation =
new SequentialBlock()
.add(
Conv2d.builder()
.setFilters(squeezePlanes)
.setKernelShape(new Shape(1, 1))
.build())
.add(Activation::relu);
SequentialBlock expand1x1 =
new SequentialBlock()
.add(
Conv2d.builder()
.setFilters(expand1x1Planes)
.setKernelShape(new Shape(1, 1))
.build())
.add(Activation::relu);
SequentialBlock expand3x3 =
new SequentialBlock()
.add(
Conv2d.builder()
.setFilters(expand3x3Planes)
.setKernelShape(new Shape(3, 3))
.optPadding(new Shape(1, 1))
.build())
.add(Activation::relu);
return new SequentialBlock()
.add(squeezeWithActivation)
.add(
new ParallelBlock(
list ->
new NDList(
NDArrays.concat(
list.get(0).addAll(list.get(1)), 1)),
Arrays.asList(expand1x1, expand3x3)));
}
/**
* Construct squeezenet v1.1.
*
* @param outSize the number of output classes
* @return squeezenet {@link Block}
*/
public static Block squeezenet(int outSize) {
return new SequentialBlock()
.add(
Conv2d.builder()
.setFilters(64)
.setKernelShape(new Shape(3, 3))
.optStride(new Shape(2, 2))
.build())
.add(Activation::relu)
.add(Pool.maxPool2dBlock(new Shape(3, 3), new Shape(2, 2), new Shape(0, 0), true))
.add(fire(16, 64, 64))
.add(fire(16, 64, 64))
.add(Pool.maxPool2dBlock(new Shape(3, 3), new Shape(2, 2), new Shape(0, 0), true))
.add(fire(32, 128, 128))
.add(fire(32, 128, 128))
.add(Pool.maxPool2dBlock(new Shape(3, 3), new Shape(2, 2), new Shape(0, 0), true))
.add(fire(48, 192, 192))
.add(fire(48, 192, 192))
.add(fire(64, 256, 256))
.add(fire(64, 256, 256))
// Classifier
.add(Dropout.builder().optRate(0.5f).build())
.add(Conv2d.builder().setFilters(outSize).setKernelShape(new Shape(1, 1)).build())
.add(Activation::relu)
.add(Pool.globalAvgPool2dBlock())
.add(Blocks.batchFlattenBlock());
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/classification/VGG.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo.cv.classification;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Activation;
import ai.djl.nn.Block;
import ai.djl.nn.Blocks;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.convolutional.Conv2d;
import ai.djl.nn.core.Linear;
import ai.djl.nn.norm.Dropout;
import ai.djl.nn.pooling.Pool;
/**
* VGG model from the "Very Deep Convolutional Networks for Large-Scale Image Recognition"
* https://arxiv.org/abs/1409.1556 paper.
*
* @see <a href="https://d2l.djl.ai/chapter_convolutional-modern/vgg.html">The D2L chapter on
* VGG</a>
*/
public final class VGG {
private VGG() {}
/**
* Creates a VGG block with the help of the VGG Builder.
*
* @param builder the {@link VGG.Builder} with the necessary arguments
* @return a VGG block.
*/
public static Block vgg(Builder builder) {
SequentialBlock block = new SequentialBlock();
VGG vgg = new VGG();
// The convolutional layer part
for (int[] arr : builder.convArch) {
block.add(vgg.vggBlock(arr[0], arr[1]));
}
// The fully connected layer part
block.add(Blocks.batchFlattenBlock())
.add(Linear.builder().setUnits(4096).build())
.add(Activation::relu)
.add(Dropout.builder().optRate(0.5f).build())
.add(Linear.builder().setUnits(4096).build())
.add(Activation::relu)
.add(Dropout.builder().optRate(0.5f).build())
.add(Linear.builder().setUnits(builder.outSize).build());
return block;
}
/**
* Creates a constituent VGG block that becomes a part of the whole VGG model.
*
* @param numConvs Numbers of layers in each feature block.
* @param numChannels Numbers of filters in each feature block. List length should match the
* layers.
* @return a constituent vgg block.
*/
public SequentialBlock vggBlock(int numConvs, int numChannels) {
SequentialBlock tempBlock = new SequentialBlock();
for (int i = 0; i < numConvs; i++) {
tempBlock
.add(
Conv2d.builder()
.setFilters(numChannels)
.setKernelShape(new Shape(3, 3))
.optPadding(new Shape(1, 1))
.build())
.add(Activation::relu);
}
tempBlock.add(Pool.maxPool2dBlock(new Shape(2, 2), new Shape(2, 2)));
return tempBlock;
}
/**
* Creates a builder to build a {@link VGG}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** The Builder to construct a {@link VGG} object. */
public static final class Builder {
int numLayers = 11;
int[][] convArch = {{1, 64}, {1, 128}, {2, 256}, {2, 512}, {2, 512}};
long outSize = 10;
/**
* Sets the number of layers. It is equal to total sum of numConvs in convArch + 3.
*
* @param numLayers the number of layers in the network. default is 11.
* @return this {@code Builder}
*/
public VGG.Builder setNumLayers(int numLayers) {
this.numLayers = numLayers;
return this;
}
/**
* Sets the number of blocks according to the user. It can be of multiple types, VGG-11,
* VGG-13, VGG-16, VGG-19.
*
* @param convArch 2-D array consisting of number of convolutions and the number of
* channels.
* @return this {@code Builder}
*/
public VGG.Builder setConvArch(int[][] convArch) {
int numConvs = 0;
for (int[] layer : convArch) {
numConvs += layer[0];
}
if (numConvs != (numLayers - 3)) {
throw new IllegalArgumentException(
"total sum of channels in the array "
+ "should be equal to the ( numLayers - 3 )");
}
this.convArch = convArch;
return this;
}
/**
* Sets the size of the output.
*
* @param outSize the output size
* @return this {@code Builder}
*/
public Builder setOutSize(long outSize) {
this.outSize = outSize;
return this;
}
/**
* Builds a {@link VGG} block.
*
* @return the {@link VGG} block
*/
public Block build() {
return vgg(this);
}
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/classification/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains classes for the {@link ai.djl.Application.CV#IMAGE_CLASSIFICATION} models in the {@link
* ai.djl.basicmodelzoo.BasicModelZoo}.
*/
package ai.djl.basicmodelzoo.cv.classification;
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/object_detection
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/object_detection/ssd/SingleShotDetection.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo.cv.object_detection.ssd;
import ai.djl.MalformedModelException;
import ai.djl.modality.cv.MultiBoxPrior;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.AbstractBlock;
import ai.djl.nn.Activation;
import ai.djl.nn.Block;
import ai.djl.nn.LambdaBlock;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.convolutional.Conv2d;
import ai.djl.nn.norm.BatchNorm;
import ai.djl.nn.pooling.Pool;
import ai.djl.training.ParameterStore;
import ai.djl.util.PairList;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* {@code SingleShotDetection} is an implementation of {@link Block} that implements a Single Shot
* Detection (SSD) model for object detection.
*/
public final class SingleShotDetection extends AbstractBlock {
private static final byte VERSION = 2;
private List<Block> features;
private List<Block> classPredictionBlocks;
private List<Block> anchorPredictionBlocks;
private List<MultiBoxPrior> multiBoxPriors;
private int numClasses;
private SingleShotDetection(Builder builder) {
super(VERSION);
features = builder.features;
features.forEach((block) -> addChildBlock(block.getClass().getSimpleName(), block));
numClasses = builder.numClasses;
classPredictionBlocks = builder.classPredictionBlocks;
classPredictionBlocks.forEach(
(block) -> addChildBlock(block.getClass().getSimpleName(), block));
anchorPredictionBlocks = builder.anchorPredictionBlocks;
anchorPredictionBlocks.forEach(
(block) -> addChildBlock(block.getClass().getSimpleName(), block));
multiBoxPriors = builder.multiBoxPriors;
}
/** {@inheritDoc} */
@Override
protected NDList forwardInternal(
ParameterStore parameterStore,
NDList inputs,
boolean training,
PairList<String, Object> params) {
NDList networkOutput = inputs;
NDArray[] anchorsOutputs = new NDArray[features.size()];
NDArray[] classOutputs = new NDArray[features.size()];
NDArray[] boundingBoxOutputs = new NDArray[features.size()];
for (int i = 0; i < features.size(); i++) {
networkOutput = features.get(i).forward(parameterStore, networkOutput, training);
MultiBoxPrior multiBoxPrior = multiBoxPriors.get(i);
anchorsOutputs[i] = multiBoxPrior.generateAnchorBoxes(networkOutput.singletonOrThrow());
classOutputs[i] =
classPredictionBlocks
.get(i)
.forward(parameterStore, networkOutput, training)
.singletonOrThrow();
boundingBoxOutputs[i] =
anchorPredictionBlocks
.get(i)
.forward(parameterStore, networkOutput, training)
.singletonOrThrow();
}
NDArray anchors = NDArrays.concat(new NDList(anchorsOutputs), 1);
NDArray classPredictions = concatPredictions(new NDList(classOutputs));
NDArray boundingBoxPredictions = concatPredictions(new NDList(boundingBoxOutputs));
classPredictions = classPredictions.reshape(classPredictions.size(0), -1, numClasses + 1);
return new NDList(anchors, classPredictions, boundingBoxPredictions);
}
private NDArray concatPredictions(NDList output) {
// transpose and batch flatten
NDArray[] flattenOutput =
output.stream()
.map(array -> array.transpose(0, 2, 3, 1).reshape(array.size(0), -1))
.toArray(NDArray[]::new);
return NDArrays.concat(new NDList(flattenOutput), 1);
}
/** {@inheritDoc} */
@Override
public Shape[] getOutputShapes(Shape[] inputShapes) {
try (NDManager manager = NDManager.newBaseManager()) {
// TODO: output shape is wrong
Shape[] childInputShapes = inputShapes;
Shape[] anchorShapes = new Shape[features.size()];
Shape[] classPredictionShapes = new Shape[features.size()];
Shape[] anchorPredictionShapes = new Shape[features.size()];
for (int i = 0; i < features.size(); i++) {
childInputShapes = features.get(i).getOutputShapes(childInputShapes);
anchorShapes[i] =
multiBoxPriors
.get(i)
.generateAnchorBoxes(manager.ones(childInputShapes[0]))
.getShape();
classPredictionShapes[i] =
classPredictionBlocks.get(i).getOutputShapes(childInputShapes)[0];
anchorPredictionShapes[i] =
anchorPredictionBlocks.get(i).getOutputShapes(childInputShapes)[0];
}
Shape anchorOutputShape = new Shape();
for (Shape shape : anchorShapes) {
anchorOutputShape = concatShape(anchorOutputShape, shape, 1);
}
NDList classPredictions = new NDList();
for (Shape shape : classPredictionShapes) {
classPredictions.add(manager.ones(shape));
}
NDArray classPredictionOutput = concatPredictions(classPredictions);
Shape classPredictionOutputShape =
classPredictionOutput
.reshape(classPredictionOutput.size(0), -1, numClasses + 1)
.getShape();
NDList anchorPredictions = new NDList();
for (Shape shape : anchorPredictionShapes) {
anchorPredictions.add(manager.ones(shape));
}
Shape anchorPredictionOutputShape = concatPredictions(anchorPredictions).getShape();
return new Shape[] {
anchorOutputShape, classPredictionOutputShape, anchorPredictionOutputShape
};
}
}
private Shape concatShape(Shape shape, Shape concat, int axis) {
if (shape.dimension() == 0) {
return concat;
}
if (shape.dimension() != concat.dimension()) {
throw new IllegalArgumentException("Shapes must have same dimensions");
}
long[] dimensions = new long[shape.dimension()];
for (int i = 0; i < shape.dimension(); i++) {
if (axis == i) {
dimensions[i] = shape.get(i) + concat.get(i);
} else {
if (shape.get(i) != concat.get(i)) {
throw new UnsupportedOperationException(
"These shapes cannot be concatenated along axis " + i);
}
dimensions[i] = shape.get(i);
}
}
return new Shape(dimensions);
}
/** {@inheritDoc} */
@Override
public void initialize(NDManager manager, DataType dataType, Shape... inputShapes) {
beforeInitialize(inputShapes);
Shape[] shapes = inputShapes;
for (int i = 0; i < features.size(); i++) {
features.get(i).initialize(manager, dataType, shapes);
shapes = features.get(i).getOutputShapes(shapes);
classPredictionBlocks.get(i).initialize(manager, dataType, shapes);
anchorPredictionBlocks.get(i).initialize(manager, dataType, shapes);
}
}
/** {@inheritDoc} */
@Override
public void loadMetadata(byte loadVersion, DataInputStream is)
throws IOException, MalformedModelException {
if (loadVersion == version) {
readInputShapes(is);
} else if (loadVersion != 1) {
throw new MalformedModelException("Unsupported encoding version: " + loadVersion);
}
}
/**
* Creates a {@link Block} that reduces the size of a convolutional block by half.
*
* @param numFilters the number of filters
* @return a {@link Block} that reduces the size of a convolutional block by half
*/
public static SequentialBlock getDownSamplingBlock(int numFilters) {
SequentialBlock sequentialBlock = new SequentialBlock();
for (int i = 0; i < 2; i++) {
sequentialBlock
.add(
Conv2d.builder()
.setKernelShape(new Shape(3, 3))
.setFilters(numFilters)
.optPadding(new Shape(1, 1))
.build())
.add(BatchNorm.builder().build())
.add(Activation::relu);
}
sequentialBlock.add(Pool.maxPool2dBlock(new Shape(2, 2), new Shape(2, 2), new Shape(0, 0)));
return sequentialBlock;
}
/**
* Creates a class prediction block used in an SSD.
*
* @param numAnchors the number of anchors
* @param numClasses the number of classes
* @return a class prediction block used in an SSD
*/
public static Conv2d getClassPredictionBlock(int numAnchors, int numClasses) {
return Conv2d.builder()
.setKernelShape(new Shape(3, 3))
.setFilters((numClasses + 1) * numAnchors)
.optPadding(new Shape(1, 1))
.build();
}
/**
* Creates a anchor prediction block used in an SSD.
*
* @param numAnchors the number of anchors
* @return a anchor prediction block used in an SSD
*/
public static Conv2d getAnchorPredictionBlock(int numAnchors) {
return Conv2d.builder()
.setKernelShape(new Shape(3, 3))
.setFilters(4 * numAnchors)
.optPadding(new Shape(1, 1))
.build();
}
/**
* Creates a builder to build a {@link SingleShotDetection}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** The Builder to construct a {@link SingleShotDetection}. */
public static class Builder {
private Block network;
private int numFeatures = -1;
private List<Block> features;
private List<List<Float>> sizes;
private List<List<Float>> ratios;
private List<Block> classPredictionBlocks = new ArrayList<>();
private List<Block> anchorPredictionBlocks = new ArrayList<>();
private List<MultiBoxPrior> multiBoxPriors = new ArrayList<>();
private int numClasses;
private boolean globalPool = true;
Builder() {}
/**
* Sets the list of sizes of generated anchor boxes.
*
* @param sizes size of the input
* @return Returns this Builder
*/
public Builder setSizes(List<List<Float>> sizes) {
this.sizes = sizes;
return this;
}
/**
* Sets the list of aspect ratios of generated anchor boxes.
*
* @param ratios size of the input
* @return Returns this Builder
*/
public Builder setRatios(List<List<Float>> ratios) {
this.ratios = ratios;
return this;
}
/**
* Sets the number of classes of objects to be detected.
*
* @param numClasses number of classes
* @return Returns this Builder
*/
public Builder setNumClasses(int numClasses) {
this.numClasses = numClasses;
return this;
}
/**
* Sets the base network for the SSD framework.
*
* @param network Base network
* @return Returns this Builder
*/
public Builder setBaseNetwork(Block network) {
this.network = network;
return this;
}
/**
* Sets the number of down sampling blocks to be applied. Down-sampling blocks are applied
* to the base network successively, and feature maps are drawn from the each of the blocks.
* This value is ignored if features is also set.
*
* @param numFeatures Number of down sampling blocks to be applied
* @return Returns this Builder
*/
public Builder setNumFeatures(int numFeatures) {
this.numFeatures = numFeatures;
return this;
}
/**
* Sets the {@code Conv2d} blocks to be appended to the network to get multi-output network.
*
* @param features List of {@code Conv2d} blocks to be appended
* @return Returns this Builder
*/
public Builder optFeatures(List<Block> features) {
this.features = features;
return this;
}
/**
* Sets the boolean whether to attach a global average pooling layer as the last output
* layer.
*
* @param globalPool Whether to attach a global average pooling layer as the last output
* layer
* @return Returns this Builder
*/
public Builder optGlobalPool(boolean globalPool) {
this.globalPool = globalPool;
return this;
}
/**
* Builds a {@link SingleShotDetection} block.
*
* @return the {@link SingleShotDetection} block
*/
public SingleShotDetection build() {
if (features == null && numFeatures < 0) {
throw new IllegalArgumentException("Either numFeatures or features must be set");
} else if (features == null) {
features = new ArrayList<>();
features.add(network);
for (int i = 0; i < numFeatures; i++) {
features.add(getDownSamplingBlock(128));
}
}
if (globalPool) {
features.add(
LambdaBlock.singleton(
array -> {
NDArray result = Pool.globalAvgPool2d(array);
// result shape: (N, C) MXNet multi-box takes (N, C, 1, 1)
return result.reshape(result.getShape().add(1, 1));
}));
}
int numberOfFeatureMaps = features.size();
if (sizes.size() != ratios.size() || sizes.size() != numberOfFeatureMaps) {
throw new IllegalArgumentException(
"Sizes and ratios must be of size: " + numberOfFeatureMaps);
}
for (int i = 0; i < numberOfFeatureMaps; i++) {
List<Float> size = sizes.get(i);
List<Float> ratio = ratios.get(i);
int numAnchors = size.size() + ratio.size() - 1;
classPredictionBlocks.add(getClassPredictionBlock(numAnchors, numClasses));
anchorPredictionBlocks.add(getAnchorPredictionBlock(numAnchors));
multiBoxPriors.add(MultiBoxPrior.builder().setSizes(size).setRatios(ratio).build());
}
return new SingleShotDetection(this);
}
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/object_detection
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/object_detection/ssd/SsdBlockFactory.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo.cv.object_detection.ssd;
import ai.djl.Model;
import ai.djl.nn.Block;
import ai.djl.nn.BlockFactory;
import ai.djl.nn.SequentialBlock;
import ai.djl.translate.ArgumentsUtil;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/** A {@link BlockFactory} class that creates {@link SingleShotDetection} block. */
public class SsdBlockFactory implements BlockFactory {
private static final long serialVersionUID = 1L;
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public Block newBlock(Model model, Path modelPath, Map<String, ?> arguments) {
int numClasses = ArgumentsUtil.intValue(arguments, "outSize");
int numFeatures = ArgumentsUtil.intValue(arguments, "numFeatures");
boolean globalPool = ArgumentsUtil.booleanValue(arguments, "globalPool");
int[] numFilters =
((List<Double>) arguments.get("numFilters"))
.stream().mapToInt(Double::intValue).toArray();
List<Float> ratio =
((List<Double>) arguments.get("ratios"))
.stream().map(Double::floatValue).collect(Collectors.toList());
List<List<Float>> sizes =
((List<List<Double>>) arguments.get("sizes"))
.stream()
.map(
size ->
size.stream()
.map(Double::floatValue)
.collect(Collectors.toList()))
.collect(Collectors.toList());
List<List<Float>> ratios = new ArrayList<>();
for (int i = 0; i < 5; i++) {
ratios.add(ratio);
}
SequentialBlock baseBlock = new SequentialBlock();
for (int numFilter : numFilters) {
baseBlock.add(SingleShotDetection.getDownSamplingBlock(numFilter));
}
return SingleShotDetection.builder()
.setNumClasses(numClasses)
.setNumFeatures(numFeatures)
.optGlobalPool(globalPool)
.setRatios(ratios)
.setSizes(sizes)
.setBaseNetwork(baseBlock)
.build();
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/object_detection
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/object_detection/ssd/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains classes for the {@link ai.djl.Application.CV#OBJECT_DETECTION} models in the {@link
* ai.djl.basicmodelzoo.BasicModelZoo}.
*/
package ai.djl.basicmodelzoo.cv.object_detection.ssd;
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/object_detection
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/object_detection/yolo/YOLOV3.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo.cv.object_detection.yolo;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.util.NDImageUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.AbstractBlock;
import ai.djl.nn.Activation;
import ai.djl.nn.Block;
import ai.djl.nn.Blocks;
import ai.djl.nn.ParallelBlock;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.convolutional.Conv2d;
import ai.djl.nn.core.Linear;
import ai.djl.nn.norm.BatchNorm;
import ai.djl.nn.pooling.Pool;
import ai.djl.training.ParameterStore;
import ai.djl.util.PairList;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* {@code YOLOV3} contains a generic implementation of <a
* href="https://github.com/bubbliiiing/yolo3-pytorch">yolov3</a> (Original author bubbliiiing).
*
* <p>Yolov3 is a fast and accurate model for ObjectDetection tasks.
*
* @see <a href="https://arxiv.org/abs/1804.02767">for more information and knowledge about
* yolov3</a>
*/
public final class YOLOV3 extends AbstractBlock {
private static final byte VERSION = 1;
private SequentialBlock darkNet53; // backBone of YOLOv3
private Block lastLayer0;
private Block layer0Output;
private Block lastLayer1Conv;
private Block lastLayer1UpSample;
private Block lastLayer1;
private Block layer1Output;
private Block lastLayer2Conv;
private Block lastLayer2UpSample;
private Block lastLayer2;
private Block layer2Output;
static final int[] REPEATS = {
1, 2, 8, 8, 4
}; // the repeat times of the darkNet53 residual units
static final int[] FILTERS = {
32, 64, 128, 256, 512, 1024
}; // the filters of darkNet53 residual units
private YOLOV3(Builder builder) {
super(VERSION);
darkNet53 = addChildBlock("darkNet53", darkNet53(builder, true));
lastLayer0 =
addChildBlock(
"lastLayer0",
makeLastLayers(
FILTERS[4],
FILTERS[5],
builder.batchNormMomentum,
builder.leakyAlpha));
layer0Output =
addChildBlock(
"layer0Output",
makeOutputLayers(
FILTERS[5],
3 * (builder.numClasses + 5),
builder.batchNormMomentum,
builder.leakyAlpha));
lastLayer1Conv =
addChildBlock(
"lastLayer1Conv",
convolutionBlock(256, 1, builder.batchNormMomentum, builder.leakyAlpha));
lastLayer1UpSample = addChildBlock("lastLayer1UpSample", upSampleBlockNearest());
lastLayer1 =
addChildBlock(
"lastLayer1",
makeLastLayers(
FILTERS[3],
FILTERS[4],
builder.batchNormMomentum,
builder.leakyAlpha));
layer1Output =
addChildBlock(
"layer1Output",
makeOutputLayers(
FILTERS[4],
3 * (builder.numClasses + 5),
builder.batchNormMomentum,
builder.leakyAlpha));
lastLayer2Conv =
addChildBlock(
"lastLayer2Conv",
convolutionBlock(128, 1, builder.batchNormMomentum, builder.leakyAlpha));
lastLayer2UpSample = addChildBlock("lastLayer2UpSample", upSampleBlockNearest());
lastLayer2 =
addChildBlock(
"lastLayer2",
makeLastLayers(
FILTERS[2],
FILTERS[3],
builder.batchNormMomentum,
builder.leakyAlpha));
layer2Output =
addChildBlock(
"layer2Output",
makeOutputLayers(
FILTERS[3],
3 * (builder.numClasses + 5),
builder.batchNormMomentum,
builder.leakyAlpha));
}
/**
* Builds a {@link Block} that represents an upSampleLayer(the nearest mode) for yolov3.
*
* @return a {@link Block} that represent an upSampleLayer for yolov3
*/
public static Block upSampleBlockNearest() {
// transpose + upSample + transpose
return new SequentialBlock()
.addSingleton(array -> array.transpose(0, 2, 3, 1))
.addSingleton(
array ->
NDImageUtils.resize(
array,
(int) (array.getShape().get(1) * 2),
(int) (array.getShape().get(2) * 2),
Image.Interpolation.NEAREST))
.addSingleton(array -> array.transpose(0, 3, 1, 2));
}
/**
* Builds a {@link Block} that represents a conv-bn-leakyRelu unit for darkNet53.
*
* @param filters the number of filters for conv
* @param kernel the kernel size for conv
* @param batchNormMomentum the momentum for batchNorm layer
* @param leakyAlpha the alpha for leakyRelu activation
* @return a {@link Block} that represents a conv-bn-leakyRelu unit for darkNet53
*/
public static Block convolutionBlock(
int filters, int kernel, float batchNormMomentum, float leakyAlpha) {
int pad = 0;
if (kernel > 0) {
pad = (kernel - 1) >> 1;
}
return new SequentialBlock()
.add(
Conv2d.builder()
.setFilters(filters)
.setKernelShape(new Shape(kernel, kernel))
.optPadding(new Shape(pad, pad))
.build())
.add(BatchNorm.builder().optEpsilon(2E-5f).optMomentum(batchNormMomentum).build())
.add(Activation.leakyReluBlock(leakyAlpha));
}
/**
* Builds a {@link Block} that represents the feature head in yolov3.
*
* @param filtersIn the number of input filters
* @param filtersOut the number of output filters
* @param batchNormMomentum the momentum of batchNorm layer
* @param leakyAlpha the alpha value for leakyRelu activation
* @return a {@link Block} that represents the feature head in yolov3.
*/
public static Block makeLastLayers(
int filtersIn, int filtersOut, float batchNormMomentum, float leakyAlpha) {
return new SequentialBlock()
.add(convolutionBlock(filtersIn, 1, batchNormMomentum, leakyAlpha))
.add(convolutionBlock(filtersOut, 3, batchNormMomentum, leakyAlpha))
.add(convolutionBlock(filtersIn, 1, batchNormMomentum, leakyAlpha))
.add(convolutionBlock(filtersOut, 3, batchNormMomentum, leakyAlpha))
.add(convolutionBlock(filtersIn, 1, batchNormMomentum, leakyAlpha));
}
/**
* Builds a {@link Block} that represents the output layer of yolov3.
*
* @param filtersOut the number of output filters
* @param outClass the number of output classes
* @param batchNormMomentum the momentum for batchNorm layer
* @param leakyAlpha the alpha for leakyRelu activation
* @return a {@link Block} that represents the output layer of yolov3.
*/
public static Block makeOutputLayers(
int filtersOut, int outClass, float batchNormMomentum, float leakyAlpha) {
return new SequentialBlock()
.add(convolutionBlock(filtersOut, 3, batchNormMomentum, leakyAlpha))
.add(Conv2d.builder().setFilters(outClass).setKernelShape(new Shape(1, 1)).build());
}
/** {@inheritDoc} */
@Override
protected NDList forwardInternal(
ParameterStore parameterStore,
NDList inputs,
boolean training,
PairList<String, Object> params) {
NDList xList = darkNet53.forward(parameterStore, inputs, training);
NDArray x0 = xList.get(7);
NDArray x1 = xList.get(6);
NDArray x2 = xList.get(5);
// the first feature layer
NDList out0Branch = lastLayer0.forward(parameterStore, new NDList(x0), training);
NDList out0 = layer0Output.forward(parameterStore, out0Branch, training);
NDList x1In = lastLayer1Conv.forward(parameterStore, out0Branch, training);
x1In = lastLayer1UpSample.forward(parameterStore, x1In, training);
x1In = new NDList(x1In.singletonOrThrow().concat(x1, 1));
NDList out1Branch = lastLayer1.forward(parameterStore, x1In, training);
NDList out1 = layer1Output.forward(parameterStore, out1Branch, training);
NDList x2In = lastLayer2Conv.forward(parameterStore, out1Branch, training);
x2In = lastLayer2UpSample.forward(parameterStore, x2In, training);
x2In = new NDList(x2In.singletonOrThrow().concat(x2, 1));
// the third feature layer
NDList out2 = lastLayer2.forward(parameterStore, x2In, training);
out2 = layer2Output.forward(parameterStore, out2, training);
// Outputs
return new NDList(
out0.singletonOrThrow(), out1.singletonOrThrow(), out2.singletonOrThrow());
}
/** {@inheritDoc} */
@Override
public Shape[] getOutputShapes(Shape[] inputShapes) {
Shape[] current = inputShapes;
Shape[] outputs = new Shape[3];
Shape[] darkNetOutputs = new Shape[8];
int index = 0;
for (String name : children.keys()) {
Block block = children.get(name);
if (name.contains("darkNet")) {
darkNetOutputs = block.getOutputShapes(current);
current = new Shape[] {darkNetOutputs[7]};
} else if (name.contains("lastLayer")) {
if ("05lastLayer1UpSample".equals(name)) {
current = block.getOutputShapes(current);
current =
new Shape[] {
new Shape(
current[0].get(0),
current[0].get(1) + darkNetOutputs[6].get(1),
current[0].get(2),
current[0].get(3))
};
} else if ("09lastLayer2UpSample".equals(name)) {
current = block.getOutputShapes(current);
current =
new Shape[] {
new Shape(
current[0].get(0),
current[0].get(1) + darkNetOutputs[5].get(1),
current[0].get(2),
current[0].get(3))
};
} else {
current = block.getOutputShapes(current);
}
} else if (!name.contains("Output")) {
current = block.getOutputShapes(current);
} else { // name.contains("Output")
Shape[] output = block.getOutputShapes(current);
outputs[index++] = output[0];
}
}
return outputs;
}
/** {@inheritDoc} */
@Override
public void initializeChildBlocks(NDManager manager, DataType dataType, Shape... inputShapes) {
Shape[] current = inputShapes;
Shape[] darkNetOutputs = new Shape[8];
for (String name : children.keys()) {
Block block = children.get(name);
block.initialize(manager, dataType, current);
if (name.contains("darkNet")) {
darkNetOutputs = block.getOutputShapes(current);
current = new Shape[] {darkNetOutputs[7]};
} else if (name.contains("lastLayer")) {
if ("05lastLayer1UpSample".equals(name)) {
current = block.getOutputShapes(current);
current =
new Shape[] {
new Shape(
current[0].get(0),
current[0].get(1) + darkNetOutputs[6].get(1),
current[0].get(2),
current[0].get(3))
};
} else if ("09lastLayer2UpSample".equals(name)) {
current = block.getOutputShapes(current);
current =
new Shape[] {
new Shape(
current[0].get(0),
current[0].get(1) + darkNetOutputs[5].get(1),
current[0].get(2),
current[0].get(3))
};
} else {
current = block.getOutputShapes(current);
}
} else if (!name.contains("Output")) {
current = block.getOutputShapes(current);
} else { // name.contains("Output")
block.getOutputShapes(current);
}
}
}
/**
* Builds a {@link Block} that a basic residual block unit used in DarkNet53.
*
* @param filters the output filter of the Convolutional Layer
* @param batchNormMomentum the momentum used for computing batchNorm
* @param leakyAlpha the alpha used in LeakyRelu Function
* @return a basic residual block unit used in DarkNet53
*/
public static Block basicBlock(int filters, float batchNormMomentum, float leakyAlpha) {
SequentialBlock block = new SequentialBlock();
block.add(Conv2d.builder().setFilters(filters / 2).setKernelShape(new Shape(1, 1)).build())
.add(BatchNorm.builder().optEpsilon(2E-5f).optMomentum(batchNormMomentum).build())
.add(Activation.leakyReluBlock(leakyAlpha))
.add(
Conv2d.builder()
.setFilters(filters)
.setKernelShape(new Shape(3, 3))
.optPadding(new Shape(1, 1))
.build())
.add(BatchNorm.builder().optEpsilon(2E-5f).optMomentum(batchNormMomentum).build())
.add(Activation.leakyReluBlock(leakyAlpha));
return new ParallelBlock(
list ->
new NDList(
NDArrays.add(
list.get(0).singletonOrThrow(),
list.get(1).singletonOrThrow())),
Arrays.asList(block, Blocks.identityBlock()));
}
/**
* Creates repeated Residual Blocks used in DarkNet53.
*
* @param filters the output filters of the final Convolutional Layer
* @param repeats the repeat times of a residual unit
* @param batchNormMomentum the momentum used for computing batchNorm
* @param leakyAlpha the alpha used in LeakyRelu Function
* @return several repeats of a residual block
*/
public static Block makeLayer(
int filters, int repeats, float batchNormMomentum, float leakyAlpha) {
List<Block> layer = new ArrayList<>();
SequentialBlock convolutionalLayer = new SequentialBlock();
convolutionalLayer
.add(
Conv2d.builder()
.setFilters(filters)
.setKernelShape(new Shape(3, 3))
.optStride(new Shape(2, 2))
.optPadding(new Shape(1, 1))
.build())
.add(BatchNorm.builder().optEpsilon(2E-5f).optMomentum(batchNormMomentum).build())
.add(Activation.leakyReluBlock(leakyAlpha));
for (int i = 0; i < repeats; i++) {
layer.add(basicBlock(filters, batchNormMomentum, leakyAlpha));
}
return new SequentialBlock().add(convolutionalLayer).addAll(layer);
}
private static SequentialBlock darkNet53(Builder builder, boolean setReturnIntermediate) {
SequentialBlock darkNet53 = new SequentialBlock();
darkNet53.setReturnIntermediate(setReturnIntermediate); // return interMediate results;
darkNet53
.add(
Conv2d.builder()
.setFilters(FILTERS[0])
.optPadding(new Shape(1, 1))
.setKernelShape(new Shape(3, 3))
.build())
.add(
BatchNorm.builder()
.optEpsilon(2E-5f)
.optMomentum(builder.batchNormMomentum)
.build())
.add(Activation.leakyReluBlock(builder.leakyAlpha))
.add(
makeLayer(
FILTERS[1],
REPEATS[0],
builder.batchNormMomentum,
builder.leakyAlpha))
.add(
makeLayer(
FILTERS[2],
REPEATS[1],
builder.batchNormMomentum,
builder.leakyAlpha))
.add(
makeLayer(
FILTERS[3],
REPEATS[2],
builder.batchNormMomentum,
builder.leakyAlpha))
.add(
makeLayer(
FILTERS[4],
REPEATS[3],
builder.batchNormMomentum,
builder.leakyAlpha))
.add(
makeLayer(
FILTERS[5],
REPEATS[4],
builder.batchNormMomentum,
builder.leakyAlpha));
return darkNet53;
}
/**
* Creates a builder to build a {@link YOLOV3}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** The Builder to construct a {@link YOLOV3} object. */
public static final class Builder {
int numClasses = 20;
float batchNormMomentum = 0.9f;
float leakyAlpha = 0.1f;
int darkNetOutSize = 10;
/**
* Sets the number of classes for yolov3.
*
* @param numClasses the number of classes
* @return this {@code Builder}
*/
public Builder setNumClasses(int numClasses) {
this.numClasses = numClasses;
return this;
}
/**
* Sets the momentum for batchNorm layer.
*
* @param batchNormMomentum the momentum for batchNorm layer
* @return this {@code Builder}
*/
public Builder optBatchNormMomentum(float batchNormMomentum) {
this.batchNormMomentum = batchNormMomentum;
return this;
}
/**
* Sets the alpha for leakyRelu activation.
*
* @param leakyAlpha the alpha for leakyRelu activation
* @return this {@code Builder}
*/
public Builder optLeakyAlpha(float leakyAlpha) {
this.leakyAlpha = leakyAlpha;
return this;
}
/**
* Sets the out size of darkNet for testing.
*
* @param darkNetOutSize the out size of darkNet
* @return this {@code Builder}
*/
public Builder optDarkNetOutSize(int darkNetOutSize) {
this.darkNetOutSize = darkNetOutSize;
return this;
}
/**
* Builds a {@link YOLOV3} block.
*
* @return a {@link YOLOV3} block
*/
public Block build() {
return new YOLOV3(this);
}
/**
* Builds a {@link Block} that represents the backbone of yolov3, which is called DarkNet53.
* This can be used for testing and transfer learning.
*
* @return a {@link Block} that represents darkNet53
*/
public Block buildDarkNet() {
Block block = darkNet53(this, false);
return new SequentialBlock()
.add(block)
.add(Pool.globalAvgPool2dBlock())
.add(Linear.builder().setUnits(darkNetOutSize).build());
}
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/object_detection
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/cv/object_detection/yolo/package-info.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains classes for the {@link ai.djl.Application.CV#OBJECT_DETECTION} models in the {@link
* ai.djl.basicmodelzoo.BasicModelZoo}.
*/
package ai.djl.basicmodelzoo.cv.object_detection.yolo;
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/nlp/SimpleTextDecoder.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo.nlp;
import ai.djl.modality.nlp.Decoder;
import ai.djl.modality.nlp.embedding.TrainableTextEmbedding;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Block;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.core.Linear;
import ai.djl.nn.recurrent.RecurrentBlock;
import ai.djl.training.ParameterStore;
import ai.djl.util.PairList;
/**
* {@code SimpleTextDecoder} implements a {@link Decoder} that employs a {@link RecurrentBlock} to
* decode text input.
*/
public class SimpleTextDecoder extends Decoder {
private static final byte VERSION = 1;
/**
* Contructs a new instance of {@code SimpleTextDecoder} with the given {@link RecurrentBlock}.
* Use this constructor if you are planning to use pre-trained embeddings that don't need
* further training.
*
* @param recurrentBlock the recurrent block to be used to decode
* @param vocabSize the size of the {@link ai.djl.modality.nlp.Vocabulary}
*/
public SimpleTextDecoder(RecurrentBlock recurrentBlock, int vocabSize) {
this(null, recurrentBlock, vocabSize);
}
/**
* Contructs a new instance of {@code SimpleTextDecoder} with the given {@link RecurrentBlock}.
* Use this constructor if you are planning to use pre-trained embeddings that don't need
* further training.
*
* @param trainableTextEmbedding the {@link TrainableTextEmbedding} to train embeddings with
* @param recurrentBlock the recurrent block to be used to decode
* @param vocabSize the size of the {@link ai.djl.modality.nlp.Vocabulary}
*/
public SimpleTextDecoder(
TrainableTextEmbedding trainableTextEmbedding,
RecurrentBlock recurrentBlock,
long vocabSize) {
super(VERSION, getBlock(trainableTextEmbedding, recurrentBlock, vocabSize));
}
private static Block getBlock(
TrainableTextEmbedding trainableTextEmbedding,
RecurrentBlock recurrentBlock,
long vocabSize) {
SequentialBlock sequentialBlock = new SequentialBlock();
sequentialBlock
.add(trainableTextEmbedding)
.add(recurrentBlock)
.add(Linear.builder().setUnits(vocabSize).build());
return sequentialBlock;
}
/** {@inheritDoc} */
@Override
protected NDList forwardInternal(
ParameterStore parameterStore,
NDList inputs,
boolean training,
PairList<String, Object> params) {
if (training) {
return block.forward(parameterStore, inputs, true, params);
}
Shape inputShape = inputs.get(0).getShape();
if (inputShape.get(1) != 1) {
throw new IllegalArgumentException("Input sequence length must be 1 during prediction");
}
NDList output = new NDList();
for (int i = 0; i < 10; i++) {
inputs = block.forward(parameterStore, inputs, false);
inputs = new NDList(inputs.head().argMax(2));
output.add(inputs.head().transpose(1, 0));
}
return new NDList(NDArrays.stack(output).transpose(2, 1, 0));
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/nlp/SimpleTextEncoder.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo.nlp;
import ai.djl.modality.nlp.Encoder;
import ai.djl.modality.nlp.embedding.TrainableTextEmbedding;
import ai.djl.ndarray.NDList;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.recurrent.RecurrentBlock;
/**
* {@code SimpleTextEncoder} implements a {@link Encoder} that employs a {@link RecurrentBlock} to
* encode text input.
*/
public class SimpleTextEncoder extends Encoder {
private static final byte VERSION = 1;
/**
* Contructs a new instance of {@code SimpleTextEncoder} with the given {@link RecurrentBlock}.
* Use this constructor if you are planning to use pre-trained embeddings that don't need
* further training.
*
* @param recurrentBlock the recurrent block to be used to encode
*/
public SimpleTextEncoder(RecurrentBlock recurrentBlock) {
super(VERSION, recurrentBlock);
}
/**
* Contructs a new instance of {@code SimpleTextEncoder} with the given {@link RecurrentBlock}
* and {@link TrainableTextEmbedding}. Use this constructor if you are planning to use
* pre-trained or fresh embeddings that need further training.
*
* @param trainableTextEmbedding the {@link TrainableTextEmbedding} to train embeddings with
* @param recurrentBlock the recurrent block to be used to encode
*/
public SimpleTextEncoder(
TrainableTextEmbedding trainableTextEmbedding, RecurrentBlock recurrentBlock) {
super(VERSION, new SequentialBlock().add(trainableTextEmbedding).add(recurrentBlock));
}
/** {@inheritDoc} */
@Override
public NDList getStates(NDList encoderOutput) {
NDList ret = new NDList(encoderOutput.get(1));
if (encoderOutput.size() == 3) {
ret.add(encoderOutput.get(2));
}
return ret;
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/nlp/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains classes for the natural language processing section ({@link ai.djl.Application.NLP}) of
* the {@link ai.djl.basicmodelzoo.BasicModelZoo}.
*/
package ai.djl.basicmodelzoo.nlp;
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/tabular/TabNet.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicmodelzoo.tabular;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.AbstractBlock;
import ai.djl.nn.Activation;
import ai.djl.nn.Block;
import ai.djl.nn.Blocks;
import ai.djl.nn.LambdaBlock;
import ai.djl.nn.ParallelBlock;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.core.Linear;
import ai.djl.nn.core.SparseMax;
import ai.djl.nn.norm.BatchNorm;
import ai.djl.nn.norm.GhostBatchNorm;
import ai.djl.training.ParameterStore;
import ai.djl.util.Pair;
import ai.djl.util.PairList;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* {@code TabNet} contains a generic implementation of TabNet adapted from
* https://towardsdatascience.com/implementing-tabnet-in-pytorch-fc977c383279 (Original author
* Samrat Thapa)
*
* <p>TabNet is a neural architecture for tabular dataset developed by the research team at Google
* Cloud AI. It was able to achieve state_of_the_art results on several datasets in both regression
* and classification problems. Another desirable feature of TabNet is interpretability. Contrary to
* most of deep learning, where the neural networks act like black boxes, we can interpret which
* features the models selects in case of TabNet.
*
* <p>see https://arxiv.org/pdf/1908.07442.pdf for more information about TabNet
*/
public final class TabNet extends AbstractBlock {
private static final byte VERSION = 1;
private Block firstStep;
private List<Block> steps;
private Block fullyConnected;
private Block batchNorm;
private int numD;
private int numA;
/**
* Creates a {@link TabNet} instance with given builder.
*
* @param builder the builder to create TabNet
*/
private TabNet(Builder builder) {
super(VERSION);
batchNorm =
addChildBlock(
"batchNorm",
BatchNorm.builder().optMomentum(builder.batchNormMomentum).build());
List<Block> sharedBlocks = new ArrayList<>();
for (int i = 0; i < builder.numShared; i++) {
sharedBlocks.add(
addChildBlock(
"sharedfc" + i,
Linear.builder().setUnits(2L * (builder.numA + builder.numD)).build()));
}
firstStep =
addChildBlock(
"featureTransformer",
featureTransformer(
sharedBlocks,
builder.numD + builder.numA,
builder.numIndependent,
builder.virtualBatchSize,
builder.batchNormMomentum));
steps = new ArrayList<>();
for (int i = 0; i < builder.numSteps - 1; i++) {
steps.add(
addChildBlock(
"steps" + (i + 1),
new DecisionStep(
builder.inputDim,
builder.numD,
builder.numA,
sharedBlocks,
builder.numIndependent,
builder.virtualBatchSize,
builder.batchNormMomentum)));
}
fullyConnected =
addChildBlock(
"fullyConnected", Linear.builder().setUnits(builder.finalOutDim).build());
this.numD = builder.numD;
this.numA = builder.numA;
}
/**
* Applies tabNetGLU activation(which is mostly used in tabNet) on the input {@link NDArray}.
*
* @param array the input {@link NDArray}
* @param units the half number of the resultant features
* @return the {@link NDArray} after applying tabNetGLU function
*/
public static NDArray tabNetGLU(NDArray array, int units) {
return array.get(":,:{}", units).mul(Activation.sigmoid(array.get(":, {}:", units)));
}
/**
* Applies tabNetGLU activation(which is mostly used in tabNet) on the input singleton {@link
* NDList}.
*
* @param arrays the input singleton {@link NDList}
* @param units the half number of the resultant features
* @return the singleton {@link NDList} after applying tabNetGLU function
*/
public static NDList tabNetGLU(NDList arrays, int units) {
return new NDList(tabNetGLU(arrays.singletonOrThrow(), units));
}
/**
* Creates a {@link LambdaBlock} that applies the {@link #tabNetGLU(NDArray, int)} activation
* function in its forward function.
*
* @param units the half number of feature
* @return {@link LambdaBlock} that applies the {@link #tabNetGLU(NDArray, int)} activation
* function
*/
public static Block tabNetGLUBlock(int units) {
return new LambdaBlock(arrays -> tabNetGLU(arrays, units), "tabNetGLU");
}
/**
* Creates a FC-BN-GLU block used in tabNet. In order to do GLU, we double the dimension of the
* input features to the GLU using a fc layer.
*
* @param sharedBlock the shared fully connected layer
* @param outDim the output feature dimension
* @param virtualBatchSize the virtualBatchSize
* @param batchNormMomentum the momentum used for ghost batchNorm layer
* @return a FC-BN-GLU block
*/
public static Block gluBlock(
Block sharedBlock, int outDim, int virtualBatchSize, float batchNormMomentum) {
SequentialBlock featureBlock = new SequentialBlock();
int units = 2 * outDim;
if (sharedBlock == null) {
featureBlock.add(Linear.builder().setUnits(units).build());
} else {
featureBlock.add(sharedBlock);
}
featureBlock
.add(
GhostBatchNorm.builder()
.optVirtualBatchSize(virtualBatchSize)
.optMomentum(batchNormMomentum)
.build())
.add(tabNetGLUBlock(outDim));
return featureBlock;
}
/**
* Creates a featureTransformer Block. The feature transformer is where all the selected
* features are processed to generate the final output.
*
* @param sharedBlocks the sharedBlocks of feature transformer
* @param outDim the output dimension of feature transformer
* @param numIndependent the number of independent blocks of feature transformer
* @param virtualBatchSize the virtual batch size for ghost batch norm
* @param batchNormMomentum the momentum for batch norm layer
* @return a feature transformer
*/
public static Block featureTransformer(
List<Block> sharedBlocks,
int outDim,
int numIndependent,
int virtualBatchSize,
float batchNormMomentum) {
List<Block> allBlocks = new ArrayList<>();
if (!sharedBlocks.isEmpty()) {
for (Block sharedBlock : sharedBlocks) {
allBlocks.add(gluBlock(sharedBlock, outDim, virtualBatchSize, batchNormMomentum));
}
}
for (int i = 0; i < numIndependent; i++) {
allBlocks.add(gluBlock(null, outDim, virtualBatchSize, batchNormMomentum));
}
SequentialBlock featureBlocks = new SequentialBlock();
int startIndex = 0;
if (!sharedBlocks.isEmpty()) {
startIndex = 1;
featureBlocks.add(allBlocks.get(0));
}
for (int i = startIndex; i < allBlocks.size(); i++) {
featureBlocks.add(
new ParallelBlock(
ndLists -> {
NDList unit = ndLists.get(0);
NDList parallel = ndLists.get(1);
return new NDList(
NDArrays.add(
unit.singletonOrThrow(),
parallel.singletonOrThrow())
.mul(Math.sqrt(0.5)));
},
Arrays.asList(allBlocks.get(i), Blocks.identityBlock())));
}
return featureBlocks;
}
/** {@inheritDoc} */
@Override
protected NDList forwardInternal(
ParameterStore parameterStore,
NDList inputs,
boolean training,
PairList<String, Object> params) {
NDManager manager = inputs.getManager();
NDArray input = inputs.singletonOrThrow();
input = input.reshape(input.size(0), input.size() / input.size(0)); // batch flatten
NDArray x =
batchNorm.forward(parameterStore, new NDList(input), training).singletonOrThrow();
NDArray xa =
firstStep
.forward(parameterStore, new NDList(x), training)
.singletonOrThrow()
.get(":," + this.numD + ":");
NDArray sparseLoss = null;
NDArray out = null;
NDArray priors = manager.ones(x.getShape());
for (Block step : steps) {
NDList tempRes = step.forward(parameterStore, new NDList(x, xa, priors), training);
NDArray xte = tempRes.get(0);
NDArray loss = tempRes.get(1);
if (out == null) {
out = Activation.relu(xte.get(":,:" + this.numD));
} else {
out = out.add(Activation.relu(xte.get(":,:" + this.numD)));
}
xa = xte.get(":," + this.numD + ":");
sparseLoss = sparseLoss == null ? loss : sparseLoss.add(loss);
}
NDArray finalOutput =
fullyConnected
.forward(parameterStore, new NDList(out), training)
.singletonOrThrow();
return new NDList(finalOutput, sparseLoss);
}
/** {@inheritDoc} */
@Override
public Shape[] getOutputShapes(Shape[] inputShapes) {
Shape[] shapes = inputShapes;
Shape[] xShapes = batchNorm.getOutputShapes(shapes);
Shape[] xaShapes = firstStep.getOutputShapes(xShapes); // input shape for xa
xaShapes[0] = Shape.update(xaShapes[0], xaShapes[0].dimension() - 1, this.numA);
shapes =
new Shape[] {
xShapes[0], xaShapes[0], xShapes[0]
}; // shape of priors should be the same as x
Shape outputShape = new Shape();
Shape lossShape = new Shape();
for (Block step : steps) {
Shape[] outputShapes = step.getOutputShapes(shapes);
outputShape = Shape.update(outputShapes[0], outputShapes[0].dimension() - 1, numD);
lossShape = outputShapes[1];
}
outputShape = fullyConnected.getOutputShapes(new Shape[] {outputShape})[0];
return new Shape[] {outputShape, lossShape};
}
/** {@inheritDoc} */
@Override
protected void initializeChildBlocks(
NDManager manager, DataType dataType, Shape... inputShapes) {
Shape[] shapes = inputShapes;
batchNorm.initialize(manager, dataType, shapes);
Shape[] xShapes = batchNorm.getOutputShapes(shapes);
firstStep.initialize(manager, dataType, xShapes);
Shape[] xaShapes = firstStep.getOutputShapes(xShapes); // input shape for xa
xaShapes[0] = Shape.update(xaShapes[0], xaShapes[0].dimension() - 1, this.numD);
shapes =
new Shape[] {
xShapes[0], xaShapes[0], xShapes[0]
}; // shape of priors should be the same as x
Shape outputShape = new Shape();
for (Block step : steps) {
step.initialize(manager, dataType, shapes);
Shape[] outputShapes = step.getOutputShapes(shapes);
outputShape = Shape.update(outputShapes[0], outputShapes[0].dimension() - 1, numD);
}
fullyConnected.initialize(manager, dataType, outputShape);
}
/**
* Creates a builder to build a {@link TabNet}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/**
* AttentionTransformer is where the tabNet models learn the relationship between relevant
* features, and decides which features to pass on to the feature transformer of the current
* decision step.
*/
public static final class AttentionTransformer extends AbstractBlock {
private static final Byte VERSION = 1;
private Block fullyConnected;
private Block batchNorm;
private Block sparseMax;
/**
* Creates an attentionTransformer Block with given parameters.
*
* @param inputDim the input Dimension of the TabNet
* @param virtualBatchSize the virtual batch size for ghost batchNorm
* @param batchNormMomentum the momentum for batchNorm layer
*/
private AttentionTransformer(int inputDim, int virtualBatchSize, float batchNormMomentum) {
super(VERSION);
fullyConnected =
addChildBlock("fullyConnected", Linear.builder().setUnits(inputDim).build());
batchNorm =
addChildBlock(
"ghostBatchNorm",
GhostBatchNorm.builder()
.optVirtualBatchSize(virtualBatchSize)
.optMomentum(batchNormMomentum)
.build());
sparseMax = addChildBlock("sparseMax", new SparseMax());
}
/** {@inheritDoc} */
@Override
protected NDList forwardInternal(
ParameterStore parameterStore,
NDList inputs,
boolean training,
PairList<String, Object> params) {
NDArray x = inputs.get(0);
NDArray priors = inputs.get(1);
NDList x1 = fullyConnected.forward(parameterStore, new NDList(x), training);
NDList x2 = batchNorm.forward(parameterStore, x1, training);
return sparseMax.forward(
parameterStore, new NDList(x2.singletonOrThrow().mul(priors)), training);
}
/** {@inheritDoc} */
@Override
public Shape[] getOutputShapes(Shape[] inputShapes) {
Shape[] shapes = {inputShapes[0]};
for (Pair<String, Block> child : getChildren()) {
shapes = child.getValue().getOutputShapes(shapes);
}
return shapes;
}
/** {@inheritDoc} */
@Override
protected void initializeChildBlocks(
NDManager manager, DataType dataType, Shape... inputShapes) {
Shape[] shapes = {inputShapes[0]};
for (Block child : getChildren().values()) {
child.initialize(manager, dataType, shapes);
shapes = child.getOutputShapes(shapes);
}
}
}
/** DecisionStep is just combining featureTransformer and attentionTransformer together. */
public static final class DecisionStep extends AbstractBlock {
private static final Byte VERSION = 1;
private Block featureTransformer;
private Block attentionTransformer;
/**
* Creates a {@link DecisionStep} with given parameters.
*
* @param inputDim the number of input dimension for attentionTransformer
* @param numD the number of dimension except attentionTransformer
* @param numA the number of dimension for attentionTransformer
* @param shared the shared fullyConnected layers
* @param nInd the number of independent fullyConnected layers
* @param virtualBatchSize the virtual batch size
* @param batchNormMomentum the momentum for batchNorm layer
*/
public DecisionStep(
int inputDim,
int numD,
int numA,
List<Block> shared,
int nInd,
int virtualBatchSize,
float batchNormMomentum) {
super(VERSION);
this.featureTransformer =
addChildBlock(
"featureTransformer",
featureTransformer(
shared,
numD + numA,
nInd,
virtualBatchSize,
batchNormMomentum));
this.attentionTransformer =
addChildBlock(
"attentionTransformer",
new AttentionTransformer(
inputDim, virtualBatchSize, batchNormMomentum));
}
/** {@inheritDoc} */
@Override
protected NDList forwardInternal(
ParameterStore parameterStore,
NDList inputs,
boolean training,
PairList<String, Object> params) {
NDArray x = inputs.get(0);
NDArray a = inputs.get(1);
NDArray priors = inputs.get(2);
NDList mask =
attentionTransformer.forward(parameterStore, new NDList(a, priors), training);
NDArray sparseLoss =
mask.singletonOrThrow()
.mul(-1)
.mul(NDArrays.add(mask.singletonOrThrow(), 1e-10).log());
NDList x1 = featureTransformer.forward(parameterStore, new NDList(x), training);
return new NDList(x1.singletonOrThrow(), sparseLoss);
}
/** {@inheritDoc} */
@Override
public Shape[] getOutputShapes(Shape[] inputShapes) {
Shape[] xShape = {inputShapes[0]};
Shape[] aShape = {inputShapes[1], inputShapes[2]};
Shape[] x1Shape = featureTransformer.getOutputShapes(xShape);
Shape[] lossShape = attentionTransformer.getOutputShapes(aShape);
return new Shape[] {x1Shape[0], lossShape[0]};
}
/** {@inheritDoc} */
@Override
protected void initializeChildBlocks(
NDManager manager, DataType dataType, Shape... inputShapes) {
Shape[] xShape = {inputShapes[0]};
Shape[] aShape = {inputShapes[1], inputShapes[2]};
this.attentionTransformer.initialize(manager, dataType, aShape);
this.featureTransformer.initialize(manager, dataType, xShape);
}
}
/** The Builder to construct a {@link TabNet} object. */
public static class Builder {
int inputDim = 128;
int finalOutDim = 10;
int numD = 64;
int numA = 64;
int numShared = 2;
int numIndependent = 2;
int numSteps = 5;
int virtualBatchSize = 128;
float batchNormMomentum = 0.9f;
/**
* Sets the input dimension of TabNet.
*
* @param inputDim the input dimension
* @return this {@code Builder}
*/
public Builder setInputDim(int inputDim) {
this.inputDim = inputDim;
return this;
}
/**
* Sets the output dimension for TabNet.
*
* @param outDim the output dimension
* @return this {@code Builder}
*/
public Builder setOutDim(int outDim) {
this.finalOutDim = outDim;
return this;
}
/**
* Sets the number of dimension except attentionTransformer.
*
* @param numD the number of dimension except attentionTransformer
* @return this {@code Builder}
*/
public Builder optNumD(int numD) {
this.numD = numD;
return this;
}
/**
* Sets the number of dimension for attentionTransformer.
*
* @param numA the number of dimension for attentionTransformer
* @return this {@code Builder}
*/
public Builder optNumA(int numA) {
this.numA = numA;
return this;
}
/**
* Sets the number of shared fullyConnected layers.
*
* @param numShared the number of shared fullyConnected layers
* @return this {@code Builder}
*/
public Builder optNumShared(int numShared) {
this.numShared = numShared;
return this;
}
/**
* Sets the number of independent fullyConnected layers.
*
* @param numIndependent the number of independent fullyConnected layers
* @return this {@code Builder}
*/
public Builder optNumIndependent(int numIndependent) {
this.numIndependent = numIndependent;
return this;
}
/**
* Sets the number of decision steps for tabNet.
*
* @param numSteps the number of decision steps for tabNet
* @return this {@code Builder}
*/
public Builder optNumSteps(int numSteps) {
this.numSteps = numSteps;
return this;
}
/**
* Sets the virtual batch size for ghost batch norm.
*
* @param virtualBatchSize the virtual batch size
* @return this {@code Builder}
*/
public Builder optVirtualBatchSize(int virtualBatchSize) {
this.virtualBatchSize = virtualBatchSize;
return this;
}
/**
* Sets the momentum for batchNorm layer.
*
* @param batchNormMomentum the momentum for batchNormLayer
* @return this {@code Builder}
*/
public Builder optBatchNormMomentum(float batchNormMomentum) {
this.batchNormMomentum = batchNormMomentum;
return this;
}
/**
* Builds an attentionTransformer with given parameter for test.
*
* @param units the number of test units
* @return an attentionTransformer Block
*/
public Block buildAttentionTransformer(int units) {
return new AttentionTransformer(10, virtualBatchSize, batchNormMomentum);
}
/**
* Builds a TabNet with given {@code Builder}.
*
* @return a tabNetBlock
*/
public Block build() {
return new TabNet(this);
}
}
}
|
0
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo
|
java-sources/ai/djl/model-zoo/0.34.0/ai/djl/basicmodelzoo/tabular/package-info.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains classes for the tabular section ({@link ai.djl.Application.Tabular}) of the {@link
* ai.djl.basicmodelzoo.BasicModelZoo}.
*/
package ai.djl.basicmodelzoo.tabular;
|
0
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains the main DJL implementation of the API for Apache MXNet.
*
* @see ai.djl.mxnet.engine.MxEngine
*/
package ai.djl.mxnet;
|
0
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet/engine/CachedOp.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.mxnet.engine;
import ai.djl.Device;
import ai.djl.mxnet.jna.JnaUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Parameter;
import ai.djl.training.ParameterStore;
import ai.djl.util.NativeResource;
import ai.djl.util.Pair;
import ai.djl.util.PairList;
import com.sun.jna.Pointer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Map;
/**
* The {@code CachedOp} is an internal helper that provides the core functionality to execute a
* {@link MxSymbolBlock}.
*
* <p>We don't recommended users interact with this class directly. Users should use {@link
* ai.djl.inference.Predictor} instead. CachedOp is an operator that simplifies calling and
* analyzing the input shape. It requires minimum input to do inference because most of the
* information can be obtained from the model itself.
*/
public class CachedOp extends NativeResource<Pointer> {
private static final Logger logger = LoggerFactory.getLogger(CachedOp.class);
private List<Parameter> parameters;
private PairList<String, Integer> dataIndices;
private Map<String, Integer> dataIndicesMap;
private List<Integer> paramIndices;
private MxNDManager manager;
/**
* Creates an instance of {@link CachedOp}.
*
* <p>It can be created by using {@link JnaUtils#createCachedOp(MxSymbolBlock, MxNDManager,
* boolean)}
*
* @param handle the C handle of the CachedOp
* @param manager the manager used to create the NDArray
* @param parameters the parameter values
* @param paramIndices the parameters required by the model and their corresponding location
* @param dataIndices the input data names required by the model and their corresponding
* location
*/
@SuppressWarnings("this-escape")
public CachedOp(
Pointer handle,
MxNDManager manager,
List<Parameter> parameters,
List<Integer> paramIndices,
PairList<String, Integer> dataIndices) {
super(handle);
this.parameters = parameters;
this.dataIndices = dataIndices;
this.paramIndices = paramIndices;
this.dataIndicesMap = dataIndices.toMap();
// holds all parameter and data NDArray values, final inputs to CachedOp
this.manager = manager;
manager.attachInternal(getUid(), this);
}
/**
* Assigns inputs to the empty locations of the input NDArray.
*
* @param parameterStore the parameterStore
* @param data the input in {@link NDList} format
* @param training true for a training forward pass
* @return an {@link NDList}
*/
public NDList forward(ParameterStore parameterStore, NDList data, boolean training) {
// reset the input data index at the beginning
MxNDArray[] allInputsNDArray = new MxNDArray[parameters.size()];
// check device of input
Device device = data.head().getDevice();
// get the manager of the data
MxNDManager inputManager = (MxNDManager) data.head().getManager();
// fill allInputsNDArray with parameter values on correct device
for (int index : paramIndices) {
Parameter parameter = parameters.get(index);
MxNDArray value = (MxNDArray) parameterStore.getValue(parameter, device, training);
if (value == null) {
throw new NullPointerException("Failed to find parameter from parameterStore");
}
allInputsNDArray[index] = value;
}
// fill allInputsNDArray with data values
int index = 0;
for (NDArray array : data) {
String inputName = array.getName();
// if inputName not provided, value will follow the default order
int idx = indexOf(inputName, index++);
allInputsNDArray[idx] = (MxNDArray) array;
}
// check the input, set as Shape(batchSize) by default
for (Pair<String, Integer> pair : dataIndices) {
if (allInputsNDArray[pair.getValue()] == null) {
// TODO: Do we need to set default to the input?
long batchSize = data.head().getShape().get(0);
String key = pair.getKey();
if (!"prob_label".equals(key) && !"softmax_label".equals(key)) {
logger.warn(
"Input {} not found, set NDArray to Shape({}) by default",
key,
batchSize);
}
allInputsNDArray[pair.getValue()] =
(MxNDArray) inputManager.create(new Shape(batchSize));
}
}
MxNDArray[] result = JnaUtils.cachedOpInvoke(inputManager, getHandle(), allInputsNDArray);
return new NDList(result);
}
/** {@inheritDoc} */
@Override
public void close() {
Pointer pointer = handle.getAndSet(null);
if (pointer != null) {
manager.detachInternal(getUid());
JnaUtils.freeCachedOp(pointer);
manager = null;
}
}
private int indexOf(String inputName, int position) {
if (inputName == null) {
return dataIndices.valueAt(position);
}
Integer index = dataIndicesMap.get(inputName);
if (index == null) {
throw new IllegalArgumentException(
"Unknown input name: "
+ inputName
+ ", expected inputs: "
+ dataIndicesMap.keySet().toString());
}
return index;
}
}
|
0
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet/engine/GradReq.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.mxnet.engine;
/** An enum that indicates whether gradient is required. */
public enum GradReq {
NULL("null", 0),
WRITE("write", 1),
ADD("add", 3);
private String type;
private int value;
GradReq(String type, int value) {
this.type = type;
this.value = value;
}
/**
* Gets the type of this {@code GradReq}.
*
* @return the type
*/
public String getType() {
return type;
}
/**
* Gets the value of this {@code GradType}.
*
* @return the value
*/
public int getValue() {
return value;
}
}
|
0
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet/engine/MxDataType.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.mxnet.engine;
import ai.djl.ndarray.types.DataType;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/** Helper to convert between {@link DataType} an the MXNet internal DataTypes. */
public final class MxDataType {
private static Map<DataType, String> toMx = createMapToMx();
private static Map<String, DataType> fromMx = createMapFromMx();
private MxDataType() {}
private static Map<DataType, String> createMapToMx() {
Map<DataType, String> map = new ConcurrentHashMap<>();
map.put(DataType.FLOAT32, "float32");
map.put(DataType.FLOAT64, "float64");
map.put(DataType.INT32, "int32");
map.put(DataType.INT64, "int64");
map.put(DataType.UINT8, "uint8");
map.put(DataType.BOOLEAN, "bool");
return map;
}
private static Map<String, DataType> createMapFromMx() {
Map<String, DataType> map = new ConcurrentHashMap<>();
map.put("float32", DataType.FLOAT32);
map.put("float64", DataType.FLOAT64);
map.put("int32", DataType.INT32);
map.put("int64", DataType.INT64);
map.put("uint8", DataType.UINT8);
map.put("bool", DataType.BOOLEAN);
return map;
}
/**
* Converts a MXNet type String into a {@link DataType}.
*
* @param mxType the type String to convert
* @return the {@link DataType}
*/
public static DataType fromMx(String mxType) {
return fromMx.get(mxType);
}
/**
* Converts a {@link DataType} into the corresponding MXNet type String.
*
* @param jType the java {@link DataType} to convert
* @return the converted MXNet type string
*/
public static String toMx(DataType jType) {
String dType = toMx.get(jType);
if (dType == null) {
throw new UnsupportedOperationException("Unsupported DataType: " + jType);
}
return dType;
}
}
|
0
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet/engine/MxDeviceType.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.mxnet.engine;
import ai.djl.Device;
/** DeviceType is the MXNet equivalent of the types in {@link Device}. */
public final class MxDeviceType {
private static final String CPU_PINNED = "cpu_pinned";
private MxDeviceType() {}
/**
* Converts a {@link Device} to the corresponding MXNet device number.
*
* @param device the java {@link Device}
* @return the MXNet device number
*/
public static int toDeviceType(Device device) {
String deviceType = device.getDeviceType();
if (Device.Type.CPU.equals(deviceType)) {
return 1;
} else if (Device.Type.GPU.equals(deviceType)) {
return 2;
} else if (CPU_PINNED.equals(deviceType)) {
return 3;
} else {
throw new IllegalArgumentException("Unsupported device: " + device);
}
}
/**
* Converts from an MXNet device number to {@link Device}.
*
* @param deviceType the MXNet device number
* @return the corresponding {@link Device}
*/
public static String fromDeviceType(int deviceType) {
switch (deviceType) {
case 1:
case 3:
// hide the CPU_PINNED to frontend user
// but the advance user can still create CPU_PINNED
// to pass through engine
return Device.Type.CPU;
case 2:
return Device.Type.GPU;
default:
throw new IllegalArgumentException("Unsupported deviceType: " + deviceType);
}
}
}
|
0
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet/engine/MxEngine.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.mxnet.engine;
import ai.djl.Device;
import ai.djl.Model;
import ai.djl.engine.Engine;
import ai.djl.engine.EngineException;
import ai.djl.mxnet.jna.JnaUtils;
import ai.djl.mxnet.jna.LibUtils;
import ai.djl.ndarray.NDManager;
import ai.djl.nn.SymbolBlock;
import ai.djl.training.GradientCollector;
import ai.djl.training.LocalParameterServer;
import ai.djl.training.ParameterServer;
import ai.djl.training.optimizer.Optimizer;
import ai.djl.util.Utils;
import java.io.FileNotFoundException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
/**
* The {@code MxEngine} is an implementation of the {@link Engine} based on the <a
* href="https://mxnet.apache.org/">Apache MXNet Deep Learning Framework</a>.
*
* <p>To get an instance of the {@code MxEngine} when it is not the default Engine, call {@link
* Engine#getEngine(String)} with the Engine name "MXNet".
*/
public final class MxEngine extends Engine {
public static final String ENGINE_NAME = "MXNet";
static final int RANK = 0;
private static final String MXNET_EXTRA_LIBRARY_VERBOSE = "MXNET_EXTRA_LIBRARY_VERBOSE";
/** Constructs an MXNet Engine. */
private MxEngine() {}
static Engine newInstance() {
try {
// Workaround MXNet engine lazy initialization issue
JnaUtils.getAllOpNames();
JnaUtils.setNumpyMode(JnaUtils.NumpyMode.GLOBAL_ON);
// Workaround MXNet shutdown crash issue
Runtime.getRuntime().addShutdownHook(new Thread(JnaUtils::waitAll)); // NOPMD
// load extra MXNet library
String paths = Utils.getEnvOrSystemProperty("MXNET_EXTRA_LIBRARY_PATH");
boolean extraLibVerbose =
Boolean.parseBoolean(Utils.getEnvOrSystemProperty(MXNET_EXTRA_LIBRARY_VERBOSE));
if (paths != null) {
String[] files = paths.split(",");
for (String file : files) {
Path path = Paths.get(file);
if (Files.notExists(path)) {
throw new FileNotFoundException("Extra Library not found: " + file);
}
JnaUtils.loadLib(path.toAbsolutePath().toString(), extraLibVerbose);
}
}
return new MxEngine();
} catch (Throwable t) {
throw new EngineException("Failed to load MXNet native library", t);
}
}
/** {@inheritDoc} */
@Override
public Engine getAlternativeEngine() {
return null;
}
/** {@inheritDoc} */
@Override
public String getEngineName() {
return ENGINE_NAME;
}
/** {@inheritDoc} */
@Override
public int getRank() {
return RANK;
}
/** {@inheritDoc} */
@Override
public String getVersion() {
int version = JnaUtils.getVersion();
int major = version / 10000;
int minor = version / 100 - major * 100;
int patch = version % 100;
return major + "." + minor + '.' + patch;
}
/** {@inheritDoc} */
@Override
public boolean hasCapability(String capability) {
return JnaUtils.getFeatures().contains(capability);
}
/** {@inheritDoc} */
@Override
public SymbolBlock newSymbolBlock(NDManager manager) {
return new MxSymbolBlock(manager);
}
/** {@inheritDoc} */
@Override
public Model newModel(String name, Device device) {
return new MxModel(name, device);
}
/** {@inheritDoc} */
@Override
public NDManager newBaseManager() {
return MxNDManager.getSystemManager().newSubManager();
}
/** {@inheritDoc} */
@Override
public NDManager newBaseManager(Device device) {
return MxNDManager.getSystemManager().newSubManager(device);
}
/** {@inheritDoc} */
@Override
public GradientCollector newGradientCollector() {
return new MxGradientCollector();
}
/** {@inheritDoc} */
@Override
public ParameterServer newParameterServer(Optimizer optimizer) {
return Boolean.getBoolean("ai.djl.use_local_parameter_server")
? new LocalParameterServer(optimizer)
: new MxParameterServer(optimizer);
}
/** {@inheritDoc} */
@Override
public void setRandomSeed(int seed) {
super.setRandomSeed(seed);
JnaUtils.randomSeed(seed);
}
/** {@inheritDoc} */
@Override
public String toString() {
StringBuilder sb = new StringBuilder(200);
sb.append(getEngineName()).append(':').append(getVersion()).append(", capabilities: [\n");
for (String feature : JnaUtils.getFeatures()) {
sb.append("\t").append(feature).append(",\n"); // NOPMD
}
sb.append("]\nMXNet Library: ").append(LibUtils.getLibName());
return sb.toString();
}
}
|
0
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet/engine/MxEngineProvider.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.mxnet.engine;
import ai.djl.engine.Engine;
import ai.djl.engine.EngineProvider;
/** {@code MxEngineProvider} is the MXNet implementation of {@link EngineProvider}. */
public class MxEngineProvider implements EngineProvider {
/** {@inheritDoc} */
@Override
public String getEngineName() {
return MxEngine.ENGINE_NAME;
}
/** {@inheritDoc} */
@Override
public int getEngineRank() {
String osName = System.getProperty("os.name");
String osArch = System.getProperty("os.arch");
if (osName.startsWith("Mac") && "aarch64".equals(osArch)) {
// MXNet doesn't support macOS M1
return 99;
}
return MxEngine.RANK;
}
/** {@inheritDoc} */
@Override
public Engine getEngine() {
return InstanceHolder.INSTANCE;
}
private static class InstanceHolder {
static final Engine INSTANCE = MxEngine.newInstance();
}
}
|
0
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet/engine/MxGradientCollector.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.mxnet.engine;
import ai.djl.mxnet.jna.JnaUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.training.GradientCollector;
/** {@code MxGradientCollector} is the MXNet implementation of {@link GradientCollector}. */
public final class MxGradientCollector implements GradientCollector {
/**
* Constructs an {@code MxGradientCollector} and enables training data collection for
* backpropogation.
*/
MxGradientCollector() {
boolean prevRecordingState = setRecording(true);
if (prevRecordingState) {
throw new IllegalStateException(
"Autograd Recording is already set to True. "
+ "Please create autograd using try with resource ");
}
boolean prevTrainingState = setTraining(true);
if (prevTrainingState) {
throw new IllegalStateException(
"Autograd Training is already set to True. "
+ "Please create autograd using try with resource ");
}
}
/**
* Gets whether Autograd is recording computations.
*
* @return the current state of recording
*/
public static boolean isRecording() {
return JnaUtils.autogradIsRecording();
}
/**
* Gets whether Autograd is in training/predicting mode.
*
* @return the current state of training/predicting
*/
public static boolean isTraining() {
return JnaUtils.autogradIsTraining();
}
/**
* Sets the status to recording/not recording. When recording, graph will be constructed for
* gradient computation.
*
* @param isRecording the recording state to be set
* @return the previous recording state before this set
*/
public static boolean setRecording(boolean isRecording) {
return JnaUtils.autogradSetIsRecording(isRecording);
}
/**
* Sets the status to training/predicting. This affects ctx.is_train in the device running the
* operator. For example, Dropout will drop inputs randomly when isTraining=True, while simply
* passing through if isTraining=False.
*
* @param isTraining {@code true} if for training
* @return the previous status before this set
*/
public static boolean setTraining(boolean isTraining) {
return JnaUtils.autogradSetTraining(isTraining);
}
/**
* Returns the {@link Symbol} of a network formed by the recorded operations on the given {@link
* NDArray}.
*
* @param manager the {@link NDManager} to create the {@link Symbol}
* @param array the {@link NDArray}
* @return the {@link Symbol}
*/
public static Symbol getSymbol(NDManager manager, NDArray array) {
return new Symbol((MxNDManager) manager, JnaUtils.autogradGetSymbol(array));
}
/** {@inheritDoc} */
@Override
public void close() {
setRecording(false);
setTraining(false);
}
/** {@inheritDoc} */
@Override
public void backward(NDArray array) {
backward(array, false);
}
/**
* Computes the gradients of the NDArray w.r.t variables.
*
* @param array the target/head array to run backward on
* @param retainGraph whether to retain the computation graph for another backward pass on the
* same graph. By default the computation history is cleared.
*/
private void backward(NDArray array, boolean retainGraph) {
JnaUtils.autogradBackward(new NDList(array), retainGraph ? 1 : 0);
}
/** {@inheritDoc} */
@Override
public void zeroGradients() {
NDManager systemManager = MxNDManager.getSystemManager();
for (NDArray array : systemManager.getManagedArrays()) {
if (array.hasGradient()) {
array.getGradient().subi(array.getGradient());
}
}
}
}
|
0
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet/engine/MxModel.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.mxnet.engine;
import ai.djl.BaseModel;
import ai.djl.Device;
import ai.djl.MalformedModelException;
import ai.djl.Model;
import ai.djl.mxnet.jna.JnaUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.DataType;
import ai.djl.nn.Parameter;
import ai.djl.training.Trainer;
import ai.djl.training.TrainingConfig;
import ai.djl.training.initializer.Initializer;
import ai.djl.util.Pair;
import ai.djl.util.PairList;
import ai.djl.util.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* {@code MxModel} is the MXNet implementation of {@link Model}.
*
* <p>MxModel contains all the methods in Model to load and process a model. In addition, it
* provides MXNet Specific functionality, such as getSymbol to obtain the Symbolic graph and
* getParameters to obtain the parameter NDArrays
*/
public class MxModel extends BaseModel {
private static final Logger logger = LoggerFactory.getLogger(MxModel.class);
/**
* Constructs a new Model on a given device.
*
* @param name the model name
* @param device the device the model should be located on
*/
MxModel(String name, Device device) {
super(name);
dataType = DataType.FLOAT32;
properties = new ConcurrentHashMap<>();
manager = MxNDManager.getSystemManager().newSubManager(device);
manager.setName("mxModel");
}
/**
* Loads the MXNet model from a specified location.
*
* <p>MXNet engine looks for {MODEL_NAME}-symbol.json and {MODEL_NAME}-{EPOCH}.params files in
* the specified directory. By default, MXNet engine will pick up the latest epoch of the
* parameter file. However, users can explicitly specify an epoch to be loaded:
*
* <pre>
* Map<String, String> options = new HashMap<>()
* <b>options.put("epoch", "3");</b>
* model.load(modelPath, "squeezenet", options);
* </pre>
*
* @param modelPath the directory of the model
* @param prefix the model file name or path prefix
* @param options load model options, see documentation for the specific engine
* @throws IOException Exception for file loading
*/
@Override
@SuppressWarnings("PMD.EmptyControlStatement")
public void load(Path modelPath, String prefix, Map<String, ?> options)
throws IOException, MalformedModelException {
setModelDir(modelPath);
wasLoaded = true;
if (prefix == null) {
prefix = modelName;
}
boolean hasParameter = true;
String optimization = null;
if (options != null) {
String paramOption = (String) options.get("hasParameter");
if (paramOption != null) {
hasParameter = Boolean.parseBoolean(paramOption);
}
optimization = (String) options.get("MxOptimizeFor");
}
Path paramFile = paramPathResolver(prefix, options);
if (hasParameter && paramFile == null) {
prefix = modelDir.toFile().getName();
paramFile = paramPathResolver(prefix, options);
if (paramFile == null && block == null) {
throw new FileNotFoundException(
"Parameter file with prefix: "
+ prefix
+ " not found in: "
+ modelDir
+ " or not readable by the engine.");
}
}
if (block == null) {
// load MxSymbolBlock
Path symbolFile = modelDir.resolve(prefix + "-symbol.json");
if (Files.notExists(symbolFile)) {
throw new FileNotFoundException(
"Symbol file not found: "
+ symbolFile
+ ", please set block manually for imperative model.");
}
Symbol symbol =
Symbol.load((MxNDManager) manager, symbolFile.toAbsolutePath().toString());
// TODO: change default name "data" to model-specific one
block = new MxSymbolBlock(manager, symbol);
}
if (hasParameter) {
loadParameters(paramFile, options);
}
// TODO: Check if Symbol has all names that params file have
if (optimization != null) {
((MxSymbolBlock) block).optimizeFor(optimization);
}
// Freeze parameters to match Block spec for preTrained data
boolean trainParam =
options != null && Boolean.parseBoolean((String) options.get("trainParam"));
if (!trainParam) {
// TODO: See https://github.com/deepjavalibrary/djl/pull/2360
// NOPMD
// block.freezeParameters(true);
}
}
/** {@inheritDoc} */
@Override
@SuppressWarnings("PMD.EmptyControlStatement")
public Trainer newTrainer(TrainingConfig trainingConfig) {
PairList<Initializer, Predicate<Parameter>> initializer = trainingConfig.getInitializers();
if (block == null) {
throw new IllegalStateException(
"You must set a block for the model before creating a new trainer");
}
if (wasLoaded) {
// Unfreeze parameters if training directly
// TODO: See https://github.com/deepjavalibrary/djl/pull/2360
// block.freezeParameters(false);
}
for (Pair<Initializer, Predicate<Parameter>> pair : initializer) {
if (pair.getKey() != null && pair.getValue() != null) {
block.setInitializer(pair.getKey(), pair.getValue());
}
}
return new Trainer(this, trainingConfig);
}
/** {@inheritDoc} */
@Override
public String[] getArtifactNames() {
try (Stream<Path> stream = Files.walk(modelDir)) {
List<Path> files = stream.filter(Files::isRegularFile).collect(Collectors.toList());
List<String> ret = new ArrayList<>(files.size());
for (Path path : files) {
String fileName = path.toFile().getName();
if (fileName.endsWith(".params") || fileName.endsWith("-symbol.json")) {
// ignore symbol and param files.
continue;
}
Path relative = modelDir.relativize(path);
ret.add(relative.toString());
}
return ret.toArray(Utils.EMPTY_ARRAY);
} catch (IOException e) {
throw new AssertionError("Failed list files", e);
}
}
/** {@inheritDoc} */
@Override
public void close() {
// TODO workaround for MXNet Engine crash issue
JnaUtils.waitAll();
super.close();
}
@SuppressWarnings("PMD.UseConcurrentHashMap")
private void loadParameters(Path paramFile, Map<String, ?> options)
throws IOException, MalformedModelException {
if (readParameters(paramFile, options)) {
return;
}
logger.debug("DJL formatted model not found, try to find MXNet model");
NDList paramNDlist = manager.load(paramFile);
MxSymbolBlock symbolBlock = (MxSymbolBlock) block;
List<Parameter> parameters = symbolBlock.getAllParameters();
Map<String, Parameter> map = new LinkedHashMap<>();
parameters.forEach(p -> map.put(p.getName(), p));
for (NDArray nd : paramNDlist) {
String key = nd.getName();
if (key == null) {
throw new IllegalArgumentException("Array names must be present in parameter file");
}
String paramName = key.split(":", 2)[1];
Parameter parameter = map.remove(paramName);
parameter.setArray(nd);
}
symbolBlock.setInputNames(new ArrayList<>(map.keySet()));
// TODO: Find a better to infer model DataType from SymbolBlock.
dataType = paramNDlist.head().getDataType();
logger.debug("MXNet Model {} ({}) loaded successfully.", paramFile, dataType);
}
}
|
0
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet/engine/MxNDArray.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.mxnet.engine;
import ai.djl.Device;
import ai.djl.mxnet.jna.JnaUtils;
import ai.djl.ndarray.BaseNDManager;
import ai.djl.ndarray.LazyNDArray;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.NDScope;
import ai.djl.ndarray.internal.NDArrayEx;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.ndarray.types.SparseFormat;
import ai.djl.util.NativeResource;
import com.sun.jna.Native;
import com.sun.jna.Pointer;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.IntBuffer;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.stream.IntStream;
/** {@code MxNDArray} is the MXNet implementation of {@link NDArray}. */
public class MxNDArray extends NativeResource<Pointer> implements LazyNDArray {
private String name;
private Device device;
private SparseFormat sparseFormat;
private DataType dataType;
private Shape shape;
// use Boolean object to maintain three status: false, true
// and null which means the flag is not set by the native engine yet
private Boolean hasGradient;
protected MxNDManager manager;
private MxNDArrayEx mxNDArrayEx;
/**
* Constructs an MxNDArray from a native handle and metadata (internal. Use {@link NDManager}
* instead).
*
* @param manager the manager to attach the new array to
* @param handle the pointer to the native MxNDArray memory
* @param device the device the new array will be located on
* @param shape the shape of the new array
* @param dataType the dataType of the new array
* @param hasGradient the gradient status of the new array
*/
MxNDArray(
MxNDManager manager,
Pointer handle,
Device device,
Shape shape,
DataType dataType,
boolean hasGradient) {
this(manager, handle);
this.device = device;
// shape check
if (Arrays.stream(shape.getShape()).anyMatch(s -> s < 0)) {
throw new IllegalArgumentException("The shape must be >= 0");
}
this.shape = shape;
this.dataType = dataType;
this.hasGradient = hasGradient;
}
/**
* Constructs an MxNDArray from a native handle (internal. Use {@link NDManager} instead).
*
* @param manager the manager to attach the new array to
* @param handle the pointer to the native MxNDArray memory
*/
MxNDArray(MxNDManager manager, Pointer handle) {
super(handle);
this.manager = manager;
mxNDArrayEx = new MxNDArrayEx(this);
manager.attachInternal(getUid(), this);
NDScope.register(this);
}
/**
* Constructs a sparse MxNDArray from a native handle (internal. Use {@link NDManager} instead).
*
* @param manager the manager to attach the new array to
* @param handle the pointer to the native MxNDArray memory
* @param fmt the sparse format
*/
MxNDArray(MxNDManager manager, Pointer handle, SparseFormat fmt) {
this(manager, handle);
this.sparseFormat = fmt;
}
/** {@inheritDoc} */
@Override
public MxNDManager getManager() {
return manager;
}
/** {@inheritDoc} */
@Override
public String getName() {
return name;
}
/** {@inheritDoc} */
@Override
public void setName(String name) {
this.name = name;
}
/** {@inheritDoc} */
@Override
public DataType getDataType() {
if (dataType == null) {
dataType = JnaUtils.getDataType(getHandle());
}
return dataType;
}
/** {@inheritDoc} */
@Override
public Device getDevice() {
if (device == null) {
device = JnaUtils.getDevice(getHandle());
}
return device;
}
/** {@inheritDoc} */
@Override
public Shape getShape() {
if (shape == null) {
shape = JnaUtils.getShape(getHandle());
}
return shape;
}
/** {@inheritDoc} */
@Override
public SparseFormat getSparseFormat() {
if (sparseFormat == null) {
sparseFormat = JnaUtils.getStorageType(getHandle());
}
return sparseFormat;
}
/** {@inheritDoc} */
@Override
public void attach(NDManager manager) {
detach();
this.manager = (MxNDManager) manager;
manager.attachInternal(getUid(), this);
}
/** {@inheritDoc} */
@Override
public void returnResource(NDManager manager) {
detach();
this.manager = (MxNDManager) manager;
manager.attachUncappedInternal(getUid(), this);
}
/** {@inheritDoc} */
@Override
public void tempAttach(NDManager manager) {
NDManager original = this.manager;
detach();
this.manager = (MxNDManager) manager;
manager.tempAttachInternal(original, getUid(), this);
}
/** {@inheritDoc} */
@Override
public void detach() {
manager.detachInternal(getUid());
manager = MxNDManager.getSystemManager();
}
private NDArray duplicate(
NDManager manager, Shape shape, DataType dataType, Device device, String name) {
// TODO get copy parameter
NDArray array = manager.create(shape, dataType, device);
array.setName(name);
copyTo(array);
return array;
}
/** {@inheritDoc} */
@Override
public NDArray toDevice(Device device, boolean copy) {
if (device.equals(getDevice()) && !copy) {
return this;
}
return duplicate(getManager(), getShape(), getDataType(), device, getName());
}
/** {@inheritDoc} */
@Override
public NDArray toType(DataType dataType, boolean copy) {
if (dataType.equals(getDataType()) && !copy) {
return this;
}
return duplicate(getManager(), getShape(), dataType, getDevice(), getName());
}
/**
* Computes the gradients of the NDArray w.r.t variables.
*
* @param retainGraph whether to retain the computation graph for another backward pass on the
* same graph. By default, the computation history is cleared.
*/
public void backward(boolean retainGraph) {
JnaUtils.autogradBackward(new NDList(this), retainGraph ? 1 : 0);
}
/** {@inheritDoc} */
@Override
public void setRequiresGradient(boolean requiresGrad) {
if ((requiresGrad && hasGradient()) || (!requiresGrad && !hasGradient())) {
return;
}
MxNDArray grad =
hasGradient() ? (MxNDArray) getGradient() : createGradient(getSparseFormat());
// DJL go with write as only MXNet support GradReq
int gradReqValue = requiresGrad ? GradReq.WRITE.getValue() : GradReq.NULL.getValue();
IntBuffer gradReqBuffer = IntBuffer.allocate(1);
gradReqBuffer.put(0, gradReqValue);
JnaUtils.autogradMarkVariables(1, getHandle(), gradReqBuffer, grad.getHandle());
hasGradient = requiresGrad;
grad.close();
}
private MxNDArray createGradient(SparseFormat format) {
try (MxNDArray zeros = (MxNDArray) manager.zeros(getShape(), getDataType(), getDevice())) {
return (MxNDArray) zeros.toSparse(format);
}
}
/** {@inheritDoc} */
@Override
public NDArray getGradient() {
if (!hasGradient()) {
throw new IllegalStateException(
"No gradient attached to this NDArray, please call array.setRequiresGradient()"
+ " on your NDArray or block.setInitializer() on your Block");
}
Pointer pointer = JnaUtils.getGradient(getHandle());
return manager.create(pointer);
}
/** {@inheritDoc} */
@Override
public boolean hasGradient() {
if (hasGradient == null) {
Pointer pointer = JnaUtils.getGradient(getHandle());
hasGradient = pointer != null;
}
return hasGradient;
}
/** {@inheritDoc} */
@Override
public NDArray stopGradient() {
Pointer pointer = JnaUtils.detachGradient(getHandle());
return manager.create(pointer);
}
/** {@inheritDoc} */
@Override
public String[] toStringArray(Charset charset) {
throw new UnsupportedOperationException("String NDArray is not supported!");
}
/** {@inheritDoc} */
@Override
public ByteBuffer toByteBuffer(boolean tryDirect) {
if (getSparseFormat() != SparseFormat.DENSE) {
throw new IllegalStateException("Require Dense NDArray, actual " + getSparseFormat());
}
Shape sh = getShape();
DataType dType = getDataType();
long product = sh.size();
long len = dType.getNumOfBytes() * product;
ByteBuffer bb = manager.allocateDirect(Math.toIntExact(len));
Pointer pointer = Native.getDirectBufferPointer(bb);
JnaUtils.syncCopyToCPU(getHandle(), pointer, Math.toIntExact(product));
return bb;
}
/** {@inheritDoc} */
@Override
public void set(Buffer buffer) {
int size = Math.toIntExact(size());
DataType type = getDataType();
BaseNDManager.validateBuffer(buffer, type, size);
if (buffer.isDirect()) {
JnaUtils.syncCopyFromCPU(getHandle(), buffer, size);
return;
}
ByteBuffer bb = manager.allocateDirect(size * type.getNumOfBytes());
BaseNDManager.copyBuffer(buffer, bb);
JnaUtils.syncCopyFromCPU(getHandle(), bb, size);
}
/** {@inheritDoc} */
@Override
public NDArray gather(NDArray index, int axis) {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray gatherNd(NDArray index) {
MxOpParams params = new MxOpParams();
return manager.invoke("gather_nd", new NDList(this, index), params).singletonOrThrow();
}
/** {@inheritDoc} */
@Override
public NDArray take(NDManager manager, NDArray index) {
MxOpParams params = new MxOpParams();
params.add("mode", "wrap");
return manager.invoke("take", new NDList(this.flatten(), index), params).singletonOrThrow();
}
/** {@inheritDoc} */
@Override
public NDArray put(NDArray index, NDArray value) {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray scatter(NDArray index, NDArray value, int axis) {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public void copyTo(NDArray ndArray) {
if (!(ndArray instanceof MxNDArray)) {
ndArray.set(toByteBuffer());
return;
}
Shape inShape = getShape();
Shape destShape = ndArray.getShape();
if (!Arrays.equals(inShape.getShape(), destShape.getShape())) {
throw new IllegalArgumentException(
"shape are diff. Required: " + destShape + ", Actual " + inShape);
}
manager.invoke("_npi_copyto", new NDArray[] {this}, new NDArray[] {ndArray}, null);
}
/** {@inheritDoc} */
@Override
public NDArray booleanMask(NDArray index, int axis) {
if (isScalar() || index.isScalar()) {
throw new IllegalArgumentException("booleanMask didn't support scalar!");
}
// TODO remove reshape when MXNet numpy support multi-dim index
// and boolean NDArray reshape
Shape remainingDims = getShape().slice(index.getShape().dimension());
// create a reshape array {-1, remainingDims}
long[] reshape = new long[remainingDims.dimension() + 1];
reshape[0] = -1;
System.arraycopy(remainingDims.getShape(), 0, reshape, 1, remainingDims.dimension());
MxOpParams params = new MxOpParams();
params.addParam("axis", axis);
try (NDArray reshaped = this.reshape(reshape);
NDArray reshapedIndex = index.toType(DataType.INT32, false).reshape(-1);
NDArray result =
manager.invoke(
"_npi_boolean_mask",
new NDArray[] {reshaped, reshapedIndex},
params)) {
return result.reshape(reshape);
}
}
/** {@inheritDoc} */
@Override
public NDArray sequenceMask(NDArray sequenceLength, float value) {
if (getShape().dimension() < 2 || getShape().isScalar() || getShape().hasZeroDimension()) {
throw new IllegalArgumentException(
"sequenceMask is not supported for NDArray with less than 2 dimensions");
}
Shape expectedSequenceLengthShape = new Shape(getShape().get(0));
if (!sequenceLength.getShape().equals(expectedSequenceLengthShape)) {
throw new IllegalArgumentException("SequenceLength must be of shape [batchSize]");
}
MxOpParams params = new MxOpParams();
params.add("value", value);
params.add("use_sequence_length", true);
params.add("axis", 1);
NDList mask = new NDList(this, manager.from(sequenceLength));
return manager.invoke("_npx_sequence_mask", mask, params).head();
}
/** {@inheritDoc} */
@Override
public NDArray sequenceMask(NDArray sequenceLength) {
return sequenceMask(sequenceLength, 0);
}
/** {@inheritDoc} */
@Override
public boolean contentEquals(Number number) {
if (number == null || getDataType().isBoolean()) {
return false;
}
try (NDArray result = eq(number)) {
return result.all().getBoolean();
}
}
/** {@inheritDoc} */
@Override
public boolean contentEquals(NDArray other) {
if (other == null || (!shapeEquals(other))) {
return false;
}
if (getDataType() != other.getDataType()) {
return false;
}
other = manager.from(other);
try (NDArray result = eq(other).toType(DataType.INT32, false)) {
return result.all().getBoolean();
}
}
/** {@inheritDoc} */
@Override
public NDArray eq(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
return manager.invoke("_npi_equal_scalar", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray eq(NDArray other) {
other = manager.from(other);
return manager.invoke("_npi_equal", new NDArray[] {this, other}, null);
}
/** {@inheritDoc} */
@Override
public NDArray neq(Number other) {
MxOpParams params = new MxOpParams();
params.add("scalar", other.toString());
return manager.invoke("_npi_not_equal_scalar", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray neq(NDArray other) {
other = manager.from(other);
return manager.invoke("_npi_not_equal", new NDArray[] {this, other}, null);
}
/** {@inheritDoc} */
@Override
public NDArray gt(Number other) {
MxOpParams params = new MxOpParams();
params.add("scalar", other.toString());
return manager.invoke("_npi_greater_scalar", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray gt(NDArray other) {
other = manager.from(other);
return manager.invoke("_npi_greater", new NDArray[] {this, other}, null);
}
/** {@inheritDoc} */
@Override
public NDArray gte(Number other) {
MxOpParams params = new MxOpParams();
params.add("scalar", other.toString());
return manager.invoke("_npi_greater_equal_scalar", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray gte(NDArray other) {
other = manager.from(other);
return manager.invoke("_npi_greater_equal", new NDArray[] {this, other}, null);
}
/** {@inheritDoc} */
@Override
public NDArray lt(Number other) {
MxOpParams params = new MxOpParams();
params.add("scalar", other.toString());
return manager.invoke("_npi_less_scalar", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray lt(NDArray other) {
other = manager.from(other);
return manager.invoke("_npi_less", new NDArray[] {this, other}, null);
}
/** {@inheritDoc} */
@Override
public NDArray lte(Number other) {
MxOpParams params = new MxOpParams();
params.add("scalar", other.toString());
return manager.invoke("_npi_less_equal_scalar", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray lte(NDArray other) {
other = manager.from(other);
return manager.invoke("_npi_less_equal", new NDArray[] {this, other}, null);
}
/** {@inheritDoc} */
@Override
public NDArray add(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
return manager.invoke("_npi_add_scalar", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray add(NDArray other) {
other = manager.from(other);
return manager.invoke("_npi_add", new NDArray[] {this, other}, null);
}
/** {@inheritDoc} */
@Override
public NDArray sub(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
return manager.invoke("_npi_subtract_scalar", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray sub(NDArray other) {
other = manager.from(other);
return manager.invoke("_npi_subtract", new NDArray[] {this, other}, null);
}
/** {@inheritDoc} */
@Override
public NDArray mul(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
return manager.invoke("_npi_multiply_scalar", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray mul(NDArray other) {
other = manager.from(other);
return manager.invoke("_npi_multiply", new NDArray[] {this, other}, null);
}
/** {@inheritDoc} */
@Override
public NDArray toSparse(SparseFormat fmt) {
if (fmt != SparseFormat.DENSE
&& fmt != SparseFormat.CSR
&& fmt != SparseFormat.ROW_SPARSE) {
throw new UnsupportedOperationException(fmt + " is not supported");
}
if (fmt == getSparseFormat()) {
return duplicate();
}
return castStorage(fmt);
}
private NDArray castStorage(SparseFormat fmt) {
MxOpParams params = new MxOpParams();
params.setParam("stype", fmt.getType());
return manager.invoke("cast_storage", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray div(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
return manager.invoke("_npi_true_divide_scalar", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray div(NDArray other) {
other = manager.from(other);
return manager.invoke("_npi_true_divide", new NDArray[] {this, other}, null);
}
/** {@inheritDoc} */
@Override
public NDArray mod(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
return manager.invoke("_npi_mod_scalar", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray mod(NDArray other) {
other = manager.from(other);
return manager.invoke("_npi_mod", new NDArray[] {this, other}, null);
}
/** {@inheritDoc} */
@Override
public NDArray pow(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
return manager.invoke("_npi_power_scalar", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray pow(NDArray other) {
other = manager.from(other);
return manager.invoke("_npi_power", new NDArray[] {this, other}, null);
}
/** {@inheritDoc} */
@Override
public NDArray addi(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
manager.invoke("_npi_add_scalar", new NDArray[] {this}, new NDArray[] {this}, params);
return this;
}
/** {@inheritDoc} */
@Override
public NDArray addi(NDArray other) {
other = manager.from(other);
manager.invoke("_npi_add", new NDArray[] {this, other}, new NDArray[] {this}, null);
return this;
}
/** {@inheritDoc} */
@Override
public NDArray subi(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
manager.invoke("_npi_subtract_scalar", new NDArray[] {this}, new NDArray[] {this}, params);
return this;
}
/** {@inheritDoc} */
@Override
public NDArray subi(NDArray other) {
other = manager.from(other);
manager.invoke("_npi_subtract", new NDArray[] {this, other}, new NDArray[] {this}, null);
return this;
}
/** {@inheritDoc} */
@Override
public NDArray muli(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
manager.invoke("_npi_multiply_scalar", new NDArray[] {this}, new NDArray[] {this}, params);
return this;
}
/** {@inheritDoc} */
@Override
public NDArray muli(NDArray other) {
other = manager.from(other);
manager.invoke("_npi_multiply", new NDArray[] {this, other}, new NDArray[] {this}, null);
return this;
}
/** {@inheritDoc} */
@Override
public NDArray divi(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
manager.invoke(
"_npi_true_divide_scalar", new NDArray[] {this}, new NDArray[] {this}, params);
return this;
}
/** {@inheritDoc} */
@Override
public NDArray divi(NDArray other) {
other = manager.from(other);
manager.invoke("_npi_true_divide", new NDArray[] {this, other}, new NDArray[] {this}, null);
return this;
}
/** {@inheritDoc} */
@Override
public NDArray modi(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
manager.invoke("_npi_mod_scalar", new NDArray[] {this}, new NDArray[] {this}, params);
return this;
}
/** {@inheritDoc} */
@Override
public NDArray modi(NDArray other) {
other = manager.from(other);
manager.invoke("_npi_mod", new NDArray[] {this, other}, new NDArray[] {this}, null);
return this;
}
/** {@inheritDoc} */
@Override
public NDArray powi(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
manager.invoke("_npi_power_scalar", new NDArray[] {this}, new NDArray[] {this}, params);
return this;
}
/** {@inheritDoc} */
@Override
public NDArray powi(NDArray other) {
other = manager.from(other);
manager.invoke("_npi_power", new NDArray[] {this, other}, new NDArray[] {this}, null);
return this;
}
/** {@inheritDoc} */
@Override
public NDArray neg() {
return manager.invoke("_npi_negative", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray negi() {
manager.invoke("_npi_negative", new NDArray[] {this}, new NDArray[] {this}, null);
return this;
}
/** {@inheritDoc} */
@Override
public NDArray sign() {
return manager.invoke("_npi_sign", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray signi() {
manager.invoke("_npi_sign", new NDArray[] {this}, new NDArray[] {this}, null);
return this;
}
/** {@inheritDoc} */
@Override
public NDArray abs() {
return manager.invoke("_npi_absolute", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray square() {
return manager.invoke("_npi_square", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray sqrt() {
return manager.invoke("_npi_sqrt", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray cbrt() {
return manager.invoke("_npi_cbrt", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray floor() {
return manager.invoke("_npi_floor", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray ceil() {
return manager.invoke("_npi_ceil", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray round() {
return manager.invoke("round", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray trunc() {
return manager.invoke("_npi_trunc", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray exp() {
return manager.invoke("_npi_exp", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray gammaln() {
return manager.invoke("gammaln", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray log() {
return manager.invoke("_npi_log", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray log10() {
return manager.invoke("_npi_log10", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray log2() {
return manager.invoke("_npi_log2", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray sin() {
return manager.invoke("_npi_sin", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray cos() {
return manager.invoke("_npi_cos", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray tan() {
return manager.invoke("_npi_tan", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray asin() {
return manager.invoke("_npi_arcsin", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray acos() {
return manager.invoke("_npi_arccos", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray atan() {
return manager.invoke("_npi_arctan", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray atan2(NDArray other) {
other = manager.from(other);
return manager.invoke("_npi_arctan2", new NDArray[] {this, other}, null);
}
/** {@inheritDoc} */
@Override
public NDArray sinh() {
return manager.invoke("_npi_sinh", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray cosh() {
return manager.invoke("_npi_cosh", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray tanh() {
return manager.invoke("_npi_tanh", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray asinh() {
return manager.invoke("_npi_arcsinh", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray acosh() {
return manager.invoke("_npi_arccosh", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray atanh() {
return manager.invoke("_npi_arctanh", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray toDegrees() {
return manager.invoke("_npi_degrees", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray toRadians() {
return manager.invoke("_npi_radians", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray maximum(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
return manager.invoke("_npi_maximum_scalar", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray maximum(NDArray other) {
other = manager.from(other);
return manager.invoke("_npi_maximum", new NDArray[] {this, other}, null);
}
/** {@inheritDoc} */
@Override
public NDArray minimum(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
return manager.invoke("_npi_minimum_scalar", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray minimum(NDArray other) {
other = manager.from(other);
return manager.invoke("_npi_minimum", new NDArray[] {this, other}, null);
}
/** {@inheritDoc} */
@Override
public NDArray max() {
return manager.invoke("_np_max", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray max(int[] axes) {
MxOpParams params = new MxOpParams();
params.addTupleParam("axis", axes);
return manager.invoke("_np_max", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray max(int[] axes, boolean keepDims) {
MxOpParams params = new MxOpParams();
params.addTupleParam("axis", axes);
params.addParam("keepdims", keepDims);
return manager.invoke("_np_max", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray min() {
return manager.invoke("_np_min", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray min(int[] axes, boolean keepDims) {
MxOpParams params = new MxOpParams();
params.addTupleParam("axis", axes);
params.addParam("keepdims", keepDims);
return manager.invoke("_np_min", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray sum() {
// TODO current windows doesn't support boolean NDArray
DataType target = getDataType();
if (System.getProperty("os.name").toLowerCase().contains("win")) {
if (!target.isFloating()) {
try (NDArray thisArr = toType(DataType.FLOAT32, false)) {
if (target == DataType.BOOLEAN) {
target = DataType.INT64;
}
try (NDArray array = manager.invoke("_np_sum", thisArr, null)) {
return array.toType(target, false);
}
}
}
}
NDArray array = manager.invoke("_np_sum", this, null);
if (target.isInteger()) {
array = array.toType(DataType.INT64, false);
}
return array;
}
/** {@inheritDoc} */
@Override
public NDArray sum(int[] axes, boolean keepDims) {
MxOpParams params = new MxOpParams();
params.addTupleParam("axis", axes);
params.addParam("keepdims", keepDims);
return manager.invoke("_np_sum", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray cumProd(int axis) {
throw new UnsupportedOperationException("Not supported by MXNet");
}
/** {@inheritDoc} */
@Override
public NDArray cumProd(int axis, DataType dataType) {
throw new UnsupportedOperationException("Not supported by MXNet");
}
/** {@inheritDoc} */
@Override
public NDArray prod() {
return manager.invoke("_np_prod", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray prod(int[] axes, boolean keepDims) {
MxOpParams params = new MxOpParams();
params.addTupleParam("axis", axes);
params.addParam("keepdims", keepDims);
return manager.invoke("_np_prod", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray mean() {
return manager.invoke("_npi_mean", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray mean(int[] axes, boolean keepDims) {
MxOpParams params = new MxOpParams();
params.addTupleParam("axis", axes);
params.addParam("keepdims", keepDims);
return manager.invoke("_npi_mean", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray normalize(double p, long dim, double eps) {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray rotate90(int times, int[] axes) {
if (axes.length != 2) {
throw new IllegalArgumentException("Axes must be 2");
}
MxOpParams params = new MxOpParams();
params.addTupleParam("axes", axes);
params.addParam("k", times);
return manager.invoke("_npi_rot90", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray trace(int offset, int axis1, int axis2) {
MxOpParams params = new MxOpParams();
params.addParam("offset", offset);
params.addParam("axis1", axis1);
params.addParam("axis2", axis2);
return manager.invoke("_np_trace", this, params);
}
/** {@inheritDoc} */
@Override
public NDList split(long[] indices, int axis) {
if (indices.length == 0) {
return new NDList(this);
}
MxOpParams params = new MxOpParams();
// follow the numpy behavior
if (indices[0] != 0) {
long[] tempIndices = new long[indices.length + 1];
tempIndices[0] = 0;
System.arraycopy(indices, 0, tempIndices, 1, indices.length);
indices = tempIndices;
}
params.addTupleParam("indices", indices);
params.addParam("axis", axis);
params.addParam("squeeze_axis", false);
return manager.invoke("_npi_split", new NDList(this), params);
}
/** {@inheritDoc} */
@Override
public NDArray flatten() {
return reshape(new Shape(Math.toIntExact(size())));
}
/** {@inheritDoc} */
@Override
public NDArray flatten(int startDim, int endDim) {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray fft(long length, long axis) {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray ifft(long length, long axis) {
return null;
}
/** {@inheritDoc} */
@Override
public NDArray irfft(long length, long axis) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray rfft(long length, long axis) {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray stft(
long nFft,
long hopLength,
boolean center,
NDArray window,
boolean normalize,
boolean returnComplex) {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray fft2(long[] sizes, long[] axes) {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray ifft2(long[] sizes, long[] axes) {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray pad(Shape padding, double value) {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray reshape(Shape shape) {
MxOpParams params = new MxOpParams();
params.addParam("newshape", shape);
return manager.invoke("_np_reshape", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray expandDims(int axis) {
MxOpParams params = new MxOpParams();
params.addParam("axis", axis);
return manager.invoke("_npi_expand_dims", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray squeeze() {
return manager.invoke("_np_squeeze", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray squeeze(int[] axes) {
if (axes.length == 0) {
return squeeze();
}
MxOpParams params = new MxOpParams();
params.addTupleParam("axis", axes);
return manager.invoke("_np_squeeze", this, params);
}
/** {@inheritDoc} */
@Override
public NDList unique(Integer dim, boolean sorted, boolean returnInverse, boolean returnCounts) {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray logicalAnd(NDArray other) {
// TODO switch to numpy op, although current op support zero-dim, scalar
NDArray thisArr =
(getDataType() == DataType.BOOLEAN) ? toType(DataType.INT32, false) : this;
other =
(other.getDataType() == DataType.BOOLEAN)
? other.toType(DataType.INT32, false)
: other;
other = manager.from(other);
return manager.invoke("broadcast_logical_and", new NDArray[] {thisArr, other}, null)
.toType(DataType.BOOLEAN, false);
}
/** {@inheritDoc} */
@Override
public NDArray logicalOr(NDArray other) {
// TODO switch to numpy op, although current op support zero-dim, scalar
NDArray thisArr =
(getDataType() == DataType.BOOLEAN) ? toType(DataType.INT32, false) : this;
other =
(other.getDataType() == DataType.BOOLEAN)
? other.toType(DataType.INT32, false)
: other;
other = manager.from(other);
return manager.invoke("broadcast_logical_or", new NDArray[] {thisArr, other}, null)
.toType(DataType.BOOLEAN, false);
}
/** {@inheritDoc} */
@Override
public NDArray logicalXor(NDArray other) {
// TODO switch to numpy op, although current op support zero-dim, scalar
NDArray thisArr =
(getDataType() == DataType.BOOLEAN) ? toType(DataType.INT32, false) : this;
other =
(other.getDataType() == DataType.BOOLEAN)
? other.toType(DataType.INT32, false)
: other;
other = manager.from(other);
return manager.invoke("broadcast_logical_xor", new NDArray[] {thisArr, other}, null)
.toType(DataType.BOOLEAN, false);
}
/** {@inheritDoc} */
@Override
public NDArray logicalNot() {
return manager.invoke("_npi_logical_not", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray argSort(int axis, boolean ascending) {
MxOpParams params = new MxOpParams();
params.addParam("axis", axis);
// be careful that MXNet numpy argsort op didn't officially support this param
params.addParam("is_ascend", ascending);
params.setDataType(DataType.INT64);
return manager.invoke("_npi_argsort", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray sort(int axis) {
MxOpParams params = new MxOpParams();
params.addParam("axis", axis);
return manager.invoke("_npi_sort", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray sort() {
return manager.invoke("_npi_sort", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray softmax(int axis) {
// MXNet softmax op bug on GPU
if (isEmpty()) {
return manager.create(getShape());
}
MxOpParams params = new MxOpParams();
params.addParam("axis", axis);
return manager.invoke("_npx_softmax", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray logSoftmax(int axis) {
// MXNet logsoftmax op bug on GPU
if (isEmpty()) {
return manager.create(getShape());
}
MxOpParams params = new MxOpParams();
params.addParam("axis", axis);
return manager.invoke("_npx_log_softmax", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray cumSum() {
return manager.invoke("_np_cumsum", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray cumSum(int axis) {
MxOpParams params = new MxOpParams();
params.addParam("axis", axis);
return manager.invoke("_np_cumsum", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray diagonal() {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override
public NDArray diagonal(int offset) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override
public NDArray diagonal(int offset, int axis1, int axis2) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override
public void intern(NDArray replaced) {
MxNDArray arr = (MxNDArray) replaced;
Pointer oldHandle = handle.getAndSet(arr.handle.getAndSet(null));
JnaUtils.waitToRead(oldHandle);
JnaUtils.freeNdArray(oldHandle);
// dereference old ndarray
arr.close();
}
/** {@inheritDoc} */
@Override
public NDArray isInfinite() {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray inverse() {
return manager.invoke("inverse", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray isNaN() {
return manager.invoke("_npi_isnan", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray toDense() {
if (!isSparse()) {
return duplicate();
}
return castStorage(SparseFormat.DENSE);
}
/** {@inheritDoc} */
@Override
public NDArray tile(long repeats) {
// zero-dim
if (isEmpty()) {
return duplicate();
}
// scalar
int dim = (isScalar()) ? 1 : getShape().dimension();
long[] repeatsArray = new long[dim];
Arrays.fill(repeatsArray, repeats);
return tile(repeatsArray);
}
/** {@inheritDoc} */
@Override
public NDArray tile(int axis, long repeats) {
// scalar
if (isScalar()) {
throw new IllegalArgumentException("scalar didn't support specifying axis");
}
long[] repeatsArray = new long[getShape().dimension()];
Arrays.fill(repeatsArray, 1);
repeatsArray[withAxis(axis)] = repeats;
return tile(repeatsArray);
}
/** {@inheritDoc} */
@Override
public NDArray tile(long[] repeats) {
MxOpParams params = new MxOpParams();
params.addTupleParam("reps", repeats);
return manager.invoke("_npi_tile", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray tile(Shape desiredShape) {
return tile(repeatsToMatchShape(desiredShape));
}
/** {@inheritDoc} */
@Override
public NDArray repeat(long repeats) {
// zero-dim
if (isEmpty()) {
return duplicate();
}
// scalar
int dim = (isScalar()) ? 1 : getShape().dimension();
long[] repeatsArray = new long[dim];
Arrays.fill(repeatsArray, repeats);
return repeat(repeatsArray);
}
/** {@inheritDoc} */
@Override
public NDArray repeat(int axis, long repeats) {
long[] repeatsArray = new long[getShape().dimension()];
Arrays.fill(repeatsArray, 1);
repeatsArray[withAxis(axis)] = repeats;
return repeat(repeatsArray);
}
/** {@inheritDoc} */
@Override
public NDArray repeat(long[] repeats) {
// TODO get rid of for loop once bug in MXNet np.repeat is fixed
NDArray array = this;
int baseAxis = getShape().dimension() - repeats.length;
for (int i = 0; i < repeats.length; i++) {
if (repeats[i] > 1) {
NDArray previousArray = array;
MxOpParams params = new MxOpParams();
params.addParam("repeats", repeats[i]);
params.addParam("axis", baseAxis + i);
array = manager.invoke("_np_repeat", array, params);
if (previousArray != this) {
previousArray.close();
}
}
}
return array;
}
/** {@inheritDoc} */
@Override
public NDArray repeat(Shape desiredShape) {
return repeat(repeatsToMatchShape(desiredShape));
}
/** {@inheritDoc} */
@Override
public NDArray dot(NDArray other) {
other = manager.from(other);
return manager.invoke("_np_dot", new NDArray[] {this, other}, null);
}
/** {@inheritDoc} */
@Override
public NDArray matMul(NDArray other) {
if (isScalar() || other.isScalar()) {
throw new IllegalArgumentException("scalar is not allowed for matMul()");
}
other = manager.from(other);
return manager.invoke("_npi_matmul", new NDArray[] {this, other}, null);
}
/** {@inheritDoc} */
@Override
public NDArray batchMatMul(NDArray other) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override
public NDArray xlogy(NDArray other) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override
public NDArray clip(Number min, Number max) {
MxOpParams params = new MxOpParams();
params.addParam("a_min", min);
params.addParam("a_max", max);
return manager.invoke("_npi_clip", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray swapAxes(int axis1, int axis2) {
MxOpParams params = new MxOpParams();
params.addParam("dim1", axis1);
params.addParam("dim2", axis2);
return manager.invoke("_npi_swapaxes", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray flip(int... axes) {
MxOpParams params = new MxOpParams();
params.addTupleParam("axis", axes);
return manager.invoke("_npi_flip", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray transpose() {
return manager.invoke("_np_transpose", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray transpose(int... dimensions) {
if (Arrays.stream(dimensions).anyMatch(d -> d < 0)) {
throw new UnsupportedOperationException(
"Passing -1 for broadcasting the dimension is not currently supported");
}
if (!Arrays.equals(
Arrays.stream(dimensions).sorted().toArray(),
IntStream.range(0, getShape().dimension()).toArray())) {
throw new IllegalArgumentException(
"You must include each of the dimensions from 0 until "
+ getShape().dimension());
}
MxOpParams params = new MxOpParams();
params.addTupleParam("axes", dimensions);
return manager.invoke("_np_transpose", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray broadcast(Shape shape) {
MxOpParams params = new MxOpParams();
params.setShape(shape);
return manager.invoke("_npi_broadcast_to", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray argMax() {
if (isEmpty()) {
throw new IllegalArgumentException("attempt to get argMax of an empty NDArray");
}
return manager.invoke("_npi_argmax", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray argMax(int axis) {
MxOpParams params = new MxOpParams();
params.addParam("axis", axis);
return manager.invoke("_npi_argmax", this, params);
}
/** {@inheritDoc} */
@Override
public NDList topK(int k, int axis, boolean largest, boolean sorted) {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray argMin() {
if (isEmpty()) {
throw new IllegalArgumentException("attempt to get argMin of an empty NDArray");
}
return manager.invoke("_npi_argmin", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray argMin(int axis) {
MxOpParams params = new MxOpParams();
params.addParam("axis", axis);
return manager.invoke("_npi_argmin", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray percentile(Number percentile) {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray percentile(Number percentile, int[] dimension) {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray median() {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray median(int[] axes) {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray nonzero() {
NDArray thisArr =
(getDataType() == DataType.BOOLEAN) ? toType(DataType.INT32, false) : this;
return manager.invoke("_npx_nonzero", thisArr, null);
}
/** {@inheritDoc} */
@Override
public NDArray erfinv() {
return manager.invoke("erfinv", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray erf() {
return manager.invoke("erf", this, null);
}
/** {@inheritDoc} */
@Override
public NDArray norm(boolean keepDims) {
MxOpParams params = new MxOpParams();
params.add("flag", -2);
params.addParam("keepdims", keepDims);
return manager.invoke("_npi_norm", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray norm(int ord, int[] axes, boolean keepDims) {
MxOpParams params = new MxOpParams();
params.addParam("ord", (double) ord);
params.addTupleParam("axis", axes);
params.addParam("keepdims", keepDims);
return manager.invoke("_npi_norm", this, params);
}
/** {@inheritDoc} */
@Override
public NDArray oneHot(int depth, float onValue, float offValue, DataType dataType) {
MxOpParams params = new MxOpParams();
params.add("depth", depth);
params.add("on_value", onValue);
params.add("off_value", offValue);
params.add("dtype", dataType);
return manager.invoke("_npx_one_hot", this, params).toType(dataType, false);
}
/** {@inheritDoc} */
@Override
public NDArray batchDot(NDArray other) {
other = manager.from(other);
return manager.invoke("_npx_batch_dot", new NDArray[] {this, other}, null);
}
/** {@inheritDoc} */
@Override
public NDArray complex() {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray real() {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray conj() {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArray diff(int n, int dim) {
throw new UnsupportedOperationException("Not implemented yet.");
}
/** {@inheritDoc} */
@Override
public NDArrayEx getNDArrayInternal() {
return mxNDArrayEx;
}
private long[] repeatsToMatchShape(Shape desiredShape) {
Shape curShape = getShape();
int dimension = curShape.dimension();
if (desiredShape.dimension() > dimension) {
throw new IllegalArgumentException("The desired shape has too many dimensions");
}
if (desiredShape.dimension() < dimension) {
int additionalDimensions = dimension - desiredShape.dimension();
desiredShape = curShape.slice(0, additionalDimensions).addAll(desiredShape);
}
long[] repeats = new long[dimension];
for (int i = 0; i < dimension; i++) {
if (curShape.get(i) == 0 || desiredShape.get(i) % curShape.get(i) != 0) {
throw new IllegalArgumentException(
"The desired shape is not a multiple of the original shape");
}
repeats[i] = Math.round(Math.ceil((double) desiredShape.get(i) / curShape.get(i)));
}
return repeats;
}
private int withAxis(int axis) {
return Math.floorMod(axis, getShape().dimension());
}
/** {@inheritDoc} */
@Override
public void waitToRead() {
JnaUtils.waitToRead(getHandle());
}
/** {@inheritDoc} */
@Override
public void waitToWrite() {
JnaUtils.waitToWrite(getHandle());
}
/** {@inheritDoc} */
@Override
public void waitAll() {
JnaUtils.waitToRead(getHandle());
}
/** {@inheritDoc} */
@Override
public boolean equals(Object obj) {
if (obj instanceof NDArray) {
return contentEquals((NDArray) obj);
}
return false;
}
/** {@inheritDoc} */
@Override
public int hashCode() {
return 0;
}
/** {@inheritDoc} */
@Override
public String toString() {
if (isReleased()) {
return "This array is already closed";
}
return toDebugString();
}
/** {@inheritDoc} */
@Override
public void close() {
onClose();
Pointer pointer = handle.getAndSet(null);
if (pointer != null) {
JnaUtils.waitToRead(pointer);
JnaUtils.freeNdArray(pointer);
}
manager.detachInternal(getUid());
}
}
|
0
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet/engine/MxNDArrayEx.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.mxnet.engine;
import ai.djl.Device;
import ai.djl.mxnet.jna.JnaUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.NDUtils;
import ai.djl.ndarray.index.NDArrayIndexer;
import ai.djl.ndarray.internal.NDArrayEx;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.ndarray.types.SparseFormat;
import ai.djl.nn.recurrent.RNN;
import ai.djl.util.Preconditions;
import java.util.Arrays;
import java.util.List;
/** {@code MxNDArrayEx} is the MXNet implementation of the {@link NDArrayEx}. */
@SuppressWarnings("dangling-doc-comments")
class MxNDArrayEx implements NDArrayEx {
private MxNDArray array;
/**
* Constructs an {@code MxNDArrayEx} given a {@link NDArray}.
*
* @param parent the {@link NDArray} to extend
*/
MxNDArrayEx(MxNDArray parent) {
this.array = parent;
}
// TODO only used to calculate zero-dim numpy shape
// remove it once MXNet have all the np op that we support
private Shape deriveBroadcastedShape(Shape lhs, Shape rhs) {
long[] result = new long[Math.max(lhs.dimension(), rhs.dimension())];
long lDiff = result.length - lhs.dimension();
long rDiff = result.length - rhs.dimension();
for (int i = 0; i < result.length; i++) {
long l = 1;
long r = 1;
if (i >= lDiff) {
l = lhs.get(Math.toIntExact(i - lDiff));
}
if (i >= rDiff) {
r = rhs.get(Math.toIntExact(i - rDiff));
}
if (l != r) {
if (l != 1 && r != 1) {
throw new IllegalArgumentException(
"operands could not be broadcast together with shapes "
+ lhs
+ " "
+ rhs);
}
result[i] = (l == 1) ? r : l;
} else {
result[i] = l;
}
}
return new Shape(result);
}
////////////////////////////////////////
// NDArrays
////////////////////////////////////////
/** {@inheritDoc} */
@Override
public NDArray rdiv(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
return getManager().invoke("_rdiv_scalar", array, params);
}
/** {@inheritDoc} */
@Override
public NDArray rdivi(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
getManager().invoke("_rdiv_scalar", new NDArray[] {array}, new NDArray[] {array}, params);
return array;
}
/** {@inheritDoc} */
@Override
public NDArray rdivi(NDArray b) {
b = getManager().from(b);
getManager().invoke("elemwise_div", new NDArray[] {b, array}, new NDArray[] {array}, null);
return array;
}
/** {@inheritDoc} */
@Override
public NDArray rsub(Number n) {
return array.sub(n).neg();
}
/** {@inheritDoc} */
@Override
public NDArray rsub(NDArray b) {
return array.sub(b).neg();
}
/** {@inheritDoc} */
@Override
public NDArray rsubi(Number n) {
return array.subi(n).negi();
}
/** {@inheritDoc} */
@Override
public NDArray rsubi(NDArray b) {
return array.subi(b).negi();
}
/** {@inheritDoc} */
@Override
public NDArray rmod(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
return getManager().invoke("_npi_rmod_scalar", array, params);
}
/** {@inheritDoc} */
@Override
public NDArray rmod(NDArray b) {
return b.mod(array);
}
/** {@inheritDoc} */
@Override
public NDArray rmodi(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
getManager()
.invoke("_npi_rmod_scalar", new NDArray[] {array}, new NDArray[] {array}, params);
return array;
}
/** {@inheritDoc} */
@Override
public NDArray rmodi(NDArray b) {
b = getManager().from(b);
getManager().invoke("_npi_mod", new NDArray[] {b, array}, new NDArray[] {array}, null);
return array;
}
/** {@inheritDoc} */
@Override
public NDArray rpow(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
return getManager().invoke("_npi_rpower_scalar", array, params);
}
/** {@inheritDoc} */
@Override
public NDArray rpowi(Number n) {
MxOpParams params = new MxOpParams();
params.add("scalar", n.toString());
getManager()
.invoke("_npi_rpower_scalar", new NDArray[] {array}, new NDArray[] {array}, params);
return array;
}
////////////////////////////////////////
// Activations
////////////////////////////////////////
/** {@inheritDoc} */
@Override
public NDArray relu() {
MxOpParams params = new MxOpParams();
params.addParam("act_type", "relu");
return getManager().invoke("_npx_activation", array, params);
}
/** {@inheritDoc} */
@Override
public NDArray sigmoid() {
MxOpParams params = new MxOpParams();
params.addParam("act_type", "sigmoid");
return getManager().invoke("_npx_activation", array, params);
}
/** {@inheritDoc} */
@Override
public NDArray tanh() {
MxOpParams params = new MxOpParams();
params.addParam("act_type", "tanh");
return getManager().invoke("_npx_activation", array, params);
}
/** {@inheritDoc} */
@Override
public NDArray softPlus() {
MxOpParams params = new MxOpParams();
params.addParam("act_type", "softrelu");
return getManager().invoke("_npx_activation", array, params);
}
/** {@inheritDoc} */
@Override
public NDArray softSign() {
MxOpParams params = new MxOpParams();
params.addParam("act_type", "softsign");
return getManager().invoke("_npx_activation", array, params);
}
/** {@inheritDoc} */
@Override
public NDArray leakyRelu(float alpha) {
MxOpParams params = new MxOpParams();
params.addParam("act_type", "leaky");
params.addParam("slope", alpha);
return getManager().invoke("_npx_leaky_relu", array, params);
}
/** {@inheritDoc} */
@Override
public NDArray elu(float alpha) {
MxOpParams params = new MxOpParams();
params.addParam("act_type", "elu");
params.addParam("slope", alpha);
return getManager().invoke("_npx_leaky_relu", array, params);
}
/** {@inheritDoc} */
@Override
public NDArray selu() {
MxOpParams params = new MxOpParams();
params.addParam("act_type", "selu");
return getManager().invoke("_npx_leaky_relu", array, params);
}
/** {@inheritDoc} */
@Override
public NDArray gelu() {
MxOpParams params = new MxOpParams();
params.addParam("act_type", "gelu");
return getManager().invoke("_npx_leaky_relu", array, params);
}
////////////////////////////////////////
// Pooling Operations
////////////////////////////////////////
/** {@inheritDoc} */
@Override
public NDArray maxPool(Shape kernelShape, Shape stride, Shape padding, boolean ceilMode) {
MxOpParams params = new MxOpParams();
params.addParam("kernel", kernelShape);
params.add("pool_type", "max");
params.addParam("stride", stride);
params.addParam("pad", padding);
params.add("pooling_convention", ceilMode ? "full" : "valid");
return getManager().invoke("_npx_pooling", getArray(), params);
}
/** {@inheritDoc} */
@Override
public NDArray globalMaxPool() {
MxOpParams params = new MxOpParams();
params.add("kernel", getGlobalPoolingShapes(1));
params.add("pad", getGlobalPoolingShapes(0));
params.add("pool_type", "max");
params.addParam("global_pool", true);
try (NDArray temp = getManager().invoke("_npx_pooling", getArray(), params)) {
return temp.reshape(temp.getShape().size(0), temp.getShape().size(1));
}
}
/** {@inheritDoc} */
@Override
public NDArray avgPool(
Shape kernelShape,
Shape stride,
Shape padding,
boolean ceilMode,
boolean countIncludePad) {
MxOpParams params = new MxOpParams();
params.addParam("kernel", kernelShape);
params.add("pool_type", "avg");
params.addParam("stride", stride);
params.addParam("pad", padding);
params.add("pooling_convention", ceilMode ? "full" : "valid");
params.addParam("count_include_pad", countIncludePad);
return getManager().invoke("_npx_pooling", getArray(), params);
}
/** {@inheritDoc} */
@Override
public NDArray globalAvgPool() {
MxOpParams params = new MxOpParams();
params.add("kernel", getGlobalPoolingShapes(1));
params.add("pad", getGlobalPoolingShapes(0));
params.add("pool_type", "avg");
params.addParam("global_pool", true);
try (NDArray temp = getManager().invoke("_npx_pooling", getArray(), params)) {
return temp.reshape(temp.getShape().size(0), temp.getShape().size(1));
}
}
/** {@inheritDoc} */
@Override
public NDArray lpPool(
float normType, Shape kernelShape, Shape stride, Shape padding, boolean ceilMode) {
if (((int) normType) != normType) {
throw new IllegalArgumentException(
"float type of normType is not supported in MXNet engine, please use integer"
+ " instead");
}
MxOpParams params = new MxOpParams();
params.addParam("p_value", (int) normType);
params.addParam("kernel", kernelShape);
params.add("pool_type", "lp");
params.addParam("stride", stride);
params.addParam("pad", padding);
params.add("pooling_convention", ceilMode ? "full" : "valid");
return getManager().invoke("_npx_pooling", getArray(), params);
}
/** {@inheritDoc} */
@Override
public NDArray globalLpPool(float normType) {
if (((int) normType) != normType) {
throw new IllegalArgumentException(
"float type of normType is not supported in MXNet engine, please use integer"
+ " instead");
}
MxOpParams params = new MxOpParams();
params.add("pool_type", "lp");
params.addParam("p_value", (int) normType);
params.addParam("global_pool", true);
try (NDArray temp = getManager().invoke("_npx_pooling", getArray(), params)) {
return temp.reshape(temp.getShape().size(0), temp.getShape().size(1));
}
}
////////////////////////////////////////
// Optimizer
////////////////////////////////////////
/** {@inheritDoc} */
@Override
public void adadeltaUpdate(
NDList inputs,
NDList weights,
float weightDecay,
float rescaleGrad,
float clipGrad,
float rho,
float epsilon) {
NDArray weight = inputs.get(0);
NDArray grad = inputs.get(1);
NDArray s = inputs.get(2);
NDArray delta = inputs.get(3);
// create a baseManager to close all intermediate NDArrays
try (NDManager subManager = NDManager.newBaseManager()) {
subManager.tempAttachAll(inputs, weights);
// Preprocess Gradient
grad.muli(rescaleGrad);
if (clipGrad > 0) {
grad = grad.clip(-clipGrad, clipGrad);
}
grad.addi(weight.mul(weightDecay));
// Update s, g, and delta
s.muli(rho).addi(grad.square().mul(1 - rho));
NDArray g = delta.add(epsilon).sqrt().div(s.add(epsilon).sqrt()).mul(grad);
delta.muli(rho).addi(g.square().mul(1 - rho));
// Update weight
weight.subi(g);
}
}
/** {@inheritDoc} */
@Override
public void adagradUpdate(
NDList inputs,
NDList weights,
float learningRate,
float weightDecay,
float rescaleGrad,
float clipGrad,
float epsilon) {
MxOpParams params = new MxOpParams();
params.addParam("lr", learningRate);
params.addParam("wd", weightDecay);
params.addParam("rescale_grad", rescaleGrad);
params.addParam("clip_gradient", clipGrad);
params.addParam("epsilon", epsilon);
getManager().invoke("adagrad_update", inputs, weights, params);
}
/** {@inheritDoc} */
@Override
public void adamUpdate(
NDList inputs,
NDList weights,
float learningRate,
float learningRateBiasCorrection,
float weightDecay,
float rescaleGrad,
float clipGrad,
float beta1,
float beta2,
float epsilon,
boolean lazyUpdate,
boolean adamw) {
MxOpParams params = new MxOpParams();
params.addParam("lr", learningRateBiasCorrection);
params.addParam("clip_gradient", clipGrad);
params.addParam("beta1", beta1);
params.addParam("beta2", beta2);
params.addParam("epsilon", epsilon);
if (!adamw) {
params.addParam("wd", weightDecay);
params.addParam("rescale_grad", rescaleGrad);
params.addParam("lazy_update", lazyUpdate);
getManager().invoke("adam_update", inputs, weights, params);
} else {
// https://github.com/apache/mxnet/blob/7d602e3b2382eb501fdeb94c4d97e652a723af11/src/operator/contrib/adamw.cc#L80-L121
// https://github.com/apache/mxnet/blob/7d602e3b2382eb501fdeb94c4d97e652a723af11/src/operator/contrib/adamw-inl.h#L172-L207
inputs.add(inputs.getManager().create(rescaleGrad));
params.addParam("eta", 1.0f);
params.addParam("wd", weightDecay * learningRate);
getManager().invoke("_adamw_update", inputs, weights, params);
}
}
/** {@inheritDoc} */
@Override
public void rmspropUpdate(
NDList inputs,
NDList weights,
float learningRate,
float weightDecay,
float rescaleGrad,
float clipGrad,
float gamma1,
float gamma2,
float epsilon,
boolean centered) {
MxOpParams params = new MxOpParams();
params.addParam("lr", learningRate);
params.addParam("wd", weightDecay);
params.addParam("rescale_grad", rescaleGrad);
params.addParam("clip_gradient", clipGrad);
params.addParam("gamma1", gamma1);
params.addParam("epsilon", epsilon);
if (!centered) {
getManager().invoke("rmsprop_update", inputs, weights, params);
} else {
params.addParam("gamma2", gamma2);
getManager().invoke("rmspropalex_update", inputs, weights, params);
}
}
/** {@inheritDoc} */
@Override
public void nagUpdate(
NDList inputs,
NDList weights,
float learningRate,
float weightDecay,
float rescaleGrad,
float clipGrad,
float momentum) {
MxOpParams params = new MxOpParams();
params.addParam("lr", learningRate);
params.addParam("wd", weightDecay);
params.addParam("rescale_grad", rescaleGrad);
params.addParam("clip_gradient", clipGrad);
params.addParam("momentum", momentum);
getManager().invoke("nag_mom_update", inputs, weights, params);
}
/** {@inheritDoc} */
@Override
public void sgdUpdate(
NDList inputs,
NDList weights,
float learningRate,
float weightDecay,
float rescaleGrad,
float clipGrad,
float momentum,
boolean lazyUpdate) {
MxOpParams params = new MxOpParams();
params.addParam("lr", learningRate);
params.addParam("wd", weightDecay);
params.addParam("rescale_grad", rescaleGrad);
params.addParam("clip_gradient", clipGrad);
params.addParam("lazy_update", lazyUpdate);
if (momentum != 0) {
params.addParam("momentum", momentum);
getManager().invoke("sgd_mom_update", inputs, weights, params);
} else {
getManager().invoke("sgd_update", inputs, weights, params);
}
}
////////////////////////////////////////
// Neural network
////////////////////////////////////////
/** {@inheritDoc} */
@Override
public NDList convolution(
NDArray input,
NDArray weight,
NDArray bias,
Shape stride,
Shape padding,
Shape dilation,
int groups) {
MxOpParams params = new MxOpParams();
params.addParam("kernel", weight.getShape().slice(2));
params.addParam("stride", stride);
params.addParam("pad", padding);
params.addParam("dilate", dilation);
params.addParam("num_group", groups);
params.addParam("num_filter", weight.getShape().get(0));
NDList inputs = new NDList(input, weight);
if (bias != null) {
params.add("no_bias", false);
inputs.add(bias);
} else {
params.add("no_bias", true);
}
return getManager().invoke("_npx_convolution", inputs, params);
}
/** {@inheritDoc} */
@Override
public NDList deconvolution(
NDArray input,
NDArray weight,
NDArray bias,
Shape stride,
Shape padding,
Shape outPadding,
Shape dilation,
int groups) {
MxOpParams params = new MxOpParams();
params.addParam("kernel", weight.getShape().slice(2));
params.addParam("stride", stride);
params.addParam("pad", padding);
params.addParam("adj", outPadding);
params.addParam("dilate", dilation);
params.addParam("num_group", groups);
params.addParam("num_filter", weight.getShape().get(0));
NDList inputs = new NDList(input, weight);
if (bias != null) {
params.add("no_bias", false);
inputs.add(bias);
} else {
params.add("no_bias", true);
}
return getManager().invoke("_npx_deconvolution", inputs, params);
}
/** {@inheritDoc} */
@Override
public NDList linear(NDArray input, NDArray weight, NDArray bias) {
MxOpParams params = new MxOpParams();
params.addParam("num_hidden", weight.size(0));
params.addParam("flatten", false);
params.addParam("no_bias", bias == null);
NDList inputs = new NDList(input, weight);
if (bias != null) {
inputs.add(bias);
}
return getManager().invoke("_npx_fully_connected", inputs, params);
}
/** {@inheritDoc} */
@Override
public NDList embedding(NDArray input, NDArray weight, SparseFormat sparse) {
if (!sparse.equals(SparseFormat.DENSE) && !sparse.equals(SparseFormat.ROW_SPARSE)) {
throw new IllegalArgumentException("MXNet only supports row sparse");
}
MxOpParams params = new MxOpParams();
long inputDim = weight.getShape().get(0);
long outputDim = weight.getShape().get(1);
params.addParam("input_dim", inputDim);
params.addParam("output_dim", outputDim);
params.addParam("sparse_grad", sparse.getValue());
return getManager().invoke("_npx_embedding", new NDList(input, weight), params);
}
/** {@inheritDoc} */
@Override
public NDList prelu(NDArray input, NDArray alpha) {
MxOpParams params = new MxOpParams();
params.addParam("act_type", "prelu");
return getManager().invoke("_npx_leaky_relu", new NDList(input, alpha), params);
}
/** {@inheritDoc} */
@Override
public NDList dropout(NDArray input, float rate, boolean training) {
if (training != JnaUtils.autogradIsTraining()) {
throw new IllegalArgumentException(
"the mode of dropout in MXNet should align with the mode of GradientCollector");
}
MxOpParams params = new MxOpParams();
params.addParam("p", rate);
return getManager().invoke("_npx_dropout", new NDList(input), params);
}
/** {@inheritDoc} */
@Override
public NDList layerNorm(
NDArray input, Shape normalizedShape, NDArray gamma, NDArray beta, float eps) {
MxOpParams params = new MxOpParams();
params.addParam("axis", -1);
params.addParam("eps", eps);
NDArray reshapedInput =
input.reshape(
input.getShape()
.slice(
0,
Math.toIntExact(
input.getShape().dimension()
- normalizedShape.dimension()))
.add(normalizedShape.size()));
// Cause of gamma and betta attached to model manager we must attach them to input NDManager
// to avoid memory leak.
final NDArray reshapedGamma = gamma.reshape(normalizedShape.size());
final NDArray reshapedBeta = beta.reshape(normalizedShape.size());
final NDManager inputManager = input.getManager();
reshapedBeta.attach(inputManager);
reshapedGamma.attach(inputManager);
return new NDList(
getManager()
.invoke(
"_npx_layer_norm",
new NDList(reshapedInput, reshapedGamma, reshapedBeta),
params)
.get(0)
.reshape(input.getShape()));
}
/** {@inheritDoc} */
@Override
public NDList batchNorm(
NDArray input,
NDArray runningMean,
NDArray runningVar,
NDArray gamma,
NDArray beta,
int axis,
float momentum,
float eps,
boolean training) {
MxOpParams params = new MxOpParams();
params.addParam("axis", axis);
params.addParam("fix_gamma", gamma == null);
params.addParam("eps", eps);
params.addParam("momentum", momentum);
if (training != JnaUtils.autogradIsTraining()) {
throw new IllegalArgumentException(
"the mode of batchNorm in MXNet should align with the mode of"
+ " GradientCollector");
}
return getManager()
.invoke(
"_npx_batch_norm",
new NDList(input, gamma, beta, runningMean, runningVar),
params);
}
/** {@inheritDoc} */
@Override
public NDList rnn(
NDArray input,
NDArray state,
NDList params,
boolean hasBiases,
int numLayers,
RNN.Activation activation,
double dropRate,
boolean training,
boolean bidirectional,
boolean batchFirst) {
int numParams = numLayers * ((hasBiases) ? 4 : 2) * ((bidirectional) ? 2 : 1);
Preconditions.checkArgument(
params.size() == numParams,
"The size of Params is incorrect expect "
+ numParams
+ " parameters but got "
+ params.size());
if (training != JnaUtils.autogradIsTraining()) {
throw new IllegalArgumentException(
"the mode of rnn in MXNet should align with the mode of GradientCollector");
}
if (batchFirst) {
input = input.swapAxes(0, 1);
}
MxOpParams opParams = new MxOpParams();
opParams.addParam("p", dropRate);
opParams.addParam("state_size", state.getShape().tail());
opParams.addParam("num_layers", numLayers);
opParams.addParam("bidirectional", bidirectional);
opParams.addParam("state_outputs", true);
opParams.addParam("mode", activation == RNN.Activation.TANH ? "rnn_tanh" : "rnn_relu");
NDList inputs = new NDList();
inputs.add(input);
try (NDList temp = new NDList()) {
for (NDArray param : params) {
temp.add(param.flatten());
}
NDArray tempParam = NDArrays.concat(temp);
tempParam.attach(input.getManager());
inputs.add(tempParam);
}
inputs.add(state);
if (!batchFirst) {
return getManager().invoke("_npx_rnn", inputs, opParams);
}
NDList result = getManager().invoke("_npx_rnn", inputs, opParams);
try (NDArray temp = result.head()) {
return new NDList(temp.swapAxes(0, 1), result.get(1));
}
}
/** {@inheritDoc} */
@Override
public NDList gru(
NDArray input,
NDArray state,
NDList params,
boolean hasBiases,
int numLayers,
double dropRate,
boolean training,
boolean bidirectional,
boolean batchFirst) {
int numParams = numLayers * ((hasBiases) ? 4 : 2) * ((bidirectional) ? 2 : 1);
Preconditions.checkArgument(
params.size() == numParams,
"The size of Params is incorrect expect "
+ numParams
+ " parameters but got "
+ params.size());
if (training != JnaUtils.autogradIsTraining()) {
throw new IllegalArgumentException(
"the mode of gru in MXNet should align with the mode of GradientCollector");
}
if (batchFirst) {
input = input.swapAxes(0, 1);
}
MxOpParams opParams = new MxOpParams();
opParams.addParam("p", dropRate);
opParams.addParam("state_size", state.getShape().tail());
opParams.addParam("num_layers", numLayers);
opParams.addParam("bidirectional", bidirectional);
opParams.addParam("state_outputs", true);
opParams.addParam("mode", "gru");
NDList inputs = new NDList();
inputs.add(input);
try (NDList temp = new NDList()) {
for (NDArray param : params) {
temp.add(param.flatten());
}
NDArray tempParam = NDArrays.concat(temp);
tempParam.attach(input.getManager());
inputs.add(tempParam);
}
inputs.add(state);
if (!batchFirst) {
return getManager().invoke("_npx_rnn", inputs, opParams);
}
NDList result = getManager().invoke("_npx_rnn", inputs, opParams);
try (NDArray temp = result.head()) {
return new NDList(temp.swapAxes(0, 1), result.get(1));
}
}
/** {@inheritDoc} */
@Override
public NDList lstm(
NDArray input,
NDList states,
NDList params,
boolean hasBiases,
int numLayers,
double dropRate,
boolean training,
boolean bidirectional,
boolean batchFirst) {
if (!hasBiases) {
throw new UnsupportedOperationException(
"Setting hasBias to be false is not supported on MXNet engine.");
}
int numParams = numLayers * 4 * (bidirectional ? 2 : 1);
Preconditions.checkArgument(
params.size() == numParams,
"The size of Params is incorrect expect "
+ numParams
+ " parameters but got "
+ params.size());
if (training != JnaUtils.autogradIsTraining()) {
throw new IllegalArgumentException(
"the mode of lstm in MXNet should align with the mode of GradientCollector");
}
if (batchFirst) {
input = input.swapAxes(0, 1);
}
MxOpParams opParams = new MxOpParams();
opParams.addParam("mode", "lstm");
opParams.addParam("p", dropRate);
opParams.addParam("state_size", states.head().getShape().tail());
opParams.addParam("state_outputs", true);
opParams.addParam("num_layers", numLayers);
opParams.addParam("bidirectional", bidirectional);
opParams.addParam("lstm_state_clip_nan", true);
NDList inputs = new NDList();
inputs.add(input);
try (NDList temp = new NDList()) {
for (NDArray param : params) {
temp.add(param.flatten());
}
NDArray tempParam = NDArrays.concat(temp);
tempParam.attach(input.getManager());
inputs.add(tempParam);
}
inputs.addAll(states);
if (!batchFirst) {
return getManager().invoke("_npx_rnn", inputs, opParams);
}
NDList result = getManager().invoke("_npx_rnn", inputs, opParams);
try (NDArray temp = result.head()) {
return new NDList(temp.swapAxes(0, 1), result.get(1), result.get(2));
}
}
////////////////////////////////////////
// Image and CV
////////////////////////////////////////
/** {@inheritDoc} */
@Override
public NDArray normalize(float[] mean, float[] std) {
MxOpParams params = new MxOpParams();
params.addTupleParam("mean", mean);
params.addTupleParam("std", std);
return getManager().invoke("_npx__image_normalize", array, params);
}
/** {@inheritDoc} */
@Override
public NDArray toTensor() {
return getManager().invoke("_npx__image_to_tensor", array, null);
}
/** {@inheritDoc} */
@Override
public NDArray interpolation(long[] size, int mode, boolean alignCorners) {
throw new UnsupportedOperationException("Not implemented");
}
/** {@inheritDoc} */
@Override
public NDArray resize(int width, int height, int interpolation) {
if (array.isEmpty()) {
throw new IllegalArgumentException("attempt to resize of an empty NDArray");
}
MxOpParams params = new MxOpParams();
params.addTupleParam("size", width, height);
params.addParam("interp", interpolation);
return getManager().invoke("_npx__image_resize", array, params);
}
/** {@inheritDoc} */
@Override
public NDArray crop(int x, int y, int width, int height) {
MxOpParams params = new MxOpParams();
params.add("x", x);
params.add("y", y);
params.add("width", width);
params.add("height", height);
return getManager().invoke("_npx__image_crop", array, params);
}
/** {@inheritDoc} */
@Override
public NDArray randomFlipLeftRight() {
if (array.getDevice().getDeviceType().equals(Device.Type.GPU)) {
throw new UnsupportedOperationException("randomFlipLeftRight is not supported on GPU");
}
return getManager().invoke("_npx__image_random_flip_left_right", array, null);
}
/** {@inheritDoc} */
@Override
public NDArray randomFlipTopBottom() {
if (array.getDevice().getDeviceType().equals(Device.Type.GPU)) {
throw new UnsupportedOperationException("randomFlipTopBottom is not supported on GPU");
}
return getManager().invoke("_npx__image_random_flip_top_bottom", array, null);
}
/** {@inheritDoc} */
@Override
public NDArray randomBrightness(float brightness) {
if (array.getDevice().getDeviceType().equals(Device.Type.GPU)) {
throw new UnsupportedOperationException("randomBrightness is not supported on GPU");
}
MxOpParams params = new MxOpParams();
float min = Math.max(0, 1 - brightness);
float max = 1 + brightness;
params.addParam("min_factor", min);
params.addParam("max_factor", max);
return getManager().invoke("_npx__image_random_brightness", array, params);
}
/** {@inheritDoc} */
@Override
public NDArray randomHue(float hue) {
if (array.getDevice().getDeviceType().equals(Device.Type.GPU)) {
throw new UnsupportedOperationException("randomHue is not supported on GPU");
}
MxOpParams params = new MxOpParams();
float min = Math.max(0, 1 - hue);
float max = 1 + hue;
params.addParam("min_factor", min);
params.addParam("max_factor", max);
return getManager().invoke("_npx__image_random_hue", array, params);
}
/** {@inheritDoc} */
@Override
public NDArray randomColorJitter(
float brightness, float contrast, float saturation, float hue) {
if (array.getDevice().getDeviceType().equals(Device.Type.GPU)) {
throw new UnsupportedOperationException("randomColorJitter is not supported on GPU");
}
MxOpParams params = new MxOpParams();
params.addParam("brightness", brightness);
params.addParam("contrast", contrast);
params.addParam("saturation", saturation);
params.addParam("hue", hue);
return getManager().invoke("_npx__image_random_color_jitter", array, params);
}
/** {@inheritDoc} */
@Override
public NDArrayIndexer getIndexer(NDManager manager) {
return new MxNDArrayIndexer((MxNDManager) manager);
}
////////////////////////////////////////
// Miscellaneous
////////////////////////////////////////
/** {@inheritDoc} */
@Override
@SuppressWarnings("PMD.UseTryWithResources")
public NDArray where(NDArray condition, NDArray other) {
NDArray array1;
NDArray array2;
condition =
(condition.getDataType() == DataType.BOOLEAN)
? condition.toType(DataType.INT32, false)
: condition;
if (array.getDataType() != other.getDataType()) {
throw new IllegalArgumentException(
"DataType mismatch, required "
+ array.getDataType()
+ " actual "
+ other.getDataType());
}
if (!array.shapeEquals(other)) {
Shape res = deriveBroadcastedShape(array.getShape(), other.getShape());
array1 = (!res.equals(array.getShape())) ? array.broadcast(res) : array;
array2 = (!res.equals(other.getShape())) ? other.broadcast(res) : other;
} else {
array1 = array;
array2 = other;
}
try {
MxNDManager manager = getManager();
return manager.invoke(
"where",
new NDArray[] {manager.from(condition), array1, manager.from(array2)},
null);
} finally {
if (array1 != array) {
array1.close();
}
if (array2 != other) {
array2.close();
}
}
}
/** {@inheritDoc} */
@Override
public NDArray stack(NDList arrays, int axis) {
MxOpParams params = new MxOpParams();
params.addParam("axis", axis);
NDArray[] srcArray = new NDArray[arrays.size() + 1];
srcArray[0] = array;
NDManager manager = array.getManager();
int i = 1;
for (NDArray arr : arrays) {
srcArray[i++] = manager.from(arr);
}
return getManager().invoke("_npi_stack", srcArray, params);
}
/** {@inheritDoc} */
@Override
public NDArray concat(NDList list, int axis) {
NDUtils.checkConcatInput(list);
MxOpParams params = new MxOpParams();
// MXNet backend use dim as argument name
params.addParam("axis", axis);
NDArray[] srcArray = new NDArray[list.size() + 1];
srcArray[0] = array;
NDManager manager = array.getManager();
int i = 1;
for (NDArray arr : list) {
srcArray[i++] = manager.from(arr);
}
return getManager().invoke("_npi_concatenate", srcArray, params);
}
/** {@inheritDoc} */
@Override
public NDList multiBoxTarget(
NDList inputs,
float iouThreshold,
float ignoreLabel,
float negativeMiningRatio,
float negativeMiningThreshold,
int minNegativeSamples) {
MxOpParams parameters = new MxOpParams();
parameters.add("minimum_negative_samples", minNegativeSamples);
parameters.add("overlap_threshold", iouThreshold);
parameters.add("ignore_label", ignoreLabel);
parameters.add("negative_mining_ratio", negativeMiningRatio);
parameters.add("negative_mining_thresh", negativeMiningThreshold);
return getManager().invoke("MultiBoxTarget", inputs, parameters);
}
/** {@inheritDoc} */
@Override
public NDList multiBoxPrior(
List<Float> sizes,
List<Float> ratios,
List<Float> steps,
List<Float> offsets,
boolean clip) {
MxOpParams parameters = new MxOpParams();
parameters.add("sizes", sizes);
parameters.add("ratios", ratios);
parameters.add("steps", steps);
parameters.add("offsets", offsets);
parameters.add("clip", clip);
return getManager().invoke("MultiBoxPrior", new NDList(array), parameters);
}
/** {@inheritDoc} */
@Override
public NDList multiBoxDetection(
NDList inputs,
boolean clip,
float threshold,
int backgroundId,
float nmsThreashold,
boolean forceSuppress,
int nmsTopK) {
MxOpParams parameters = new MxOpParams();
parameters.add("clip", clip);
parameters.add("threshold", threshold);
parameters.add("background_id", backgroundId);
parameters.add("nms_threshold", nmsThreashold);
parameters.add("force_suppress", forceSuppress);
parameters.add("nms_topk", nmsTopK);
return getManager().invoke("MultiBoxDetection", inputs, parameters);
}
/** {@inheritDoc} */
@Override
public NDArray getArray() {
return array;
}
private MxNDManager getManager() {
return array.getManager();
}
private int getGlobalPoolingDim() {
int poolDim = getArray().getShape().dimension() - 2;
if (poolDim < 1 || poolDim > 3) {
throw new IllegalStateException(
"GlobalPooling only support"
+ "1 to 3 Dimensions, "
+ poolDim
+ "D is not supported.");
}
return poolDim;
}
private Shape getGlobalPoolingShapes(long fillValue) {
// determine pooling dimension according to input
// input dimension minus 2 (batch and channel dim)
int poolDim = getGlobalPoolingDim();
long[] shape = new long[poolDim];
Arrays.fill(shape, fillValue);
return new Shape(shape);
}
}
|
0
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet/engine/MxNDArrayIndexer.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.mxnet.engine;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.index.NDArrayIndexer;
import ai.djl.ndarray.index.full.NDIndexFullPick;
import ai.djl.ndarray.index.full.NDIndexFullSlice;
import ai.djl.ndarray.index.full.NDIndexFullTake;
import ai.djl.ndarray.types.Shape;
import java.util.Stack;
/** The {@link NDArrayIndexer} used by the {@link MxNDArray}. */
public class MxNDArrayIndexer extends NDArrayIndexer {
private MxNDManager manager;
MxNDArrayIndexer(MxNDManager manager) {
this.manager = manager;
}
/** {@inheritDoc} */
@Override
public NDArray get(NDArray array, NDIndexFullPick fullPick) {
array = manager.from(array);
MxOpParams params = new MxOpParams();
params.addParam("axis", fullPick.getAxis());
params.addParam("keepdims", true);
params.add("mode", "wrap");
NDList pick = new NDList(array, manager.from(fullPick.getIndices()));
return manager.invoke("pick", pick, params).singletonOrThrow();
}
/** {@inheritDoc} */
@Override
public NDArray get(NDArray array, NDIndexFullTake fullTake) {
array = manager.from(array);
MxOpParams params = new MxOpParams();
params.addParam("axis", fullTake.getAxis());
params.add("mode", "wrap");
NDList pick = new NDList(array, manager.from(fullTake.getIndices()));
return manager.invoke("take", pick, params).singletonOrThrow();
}
/** {@inheritDoc} */
@Override
public NDArray get(NDArray array, NDIndexFullSlice fullSlice) {
array = manager.from(array);
long[] min = fullSlice.getMin();
long[] max = fullSlice.getMax();
long[] s = array.getShape().getShape();
for (int i = 0; i < min.length; i++) {
if (min[i] >= max[i] || min[i] >= s[i]) {
return manager.create(new Shape(0));
}
}
MxOpParams params = new MxOpParams();
params.addTupleParam("begin", fullSlice.getMin());
params.addTupleParam("end", fullSlice.getMax());
params.addTupleParam("step", fullSlice.getStep());
NDArray result = manager.invoke("_npi_slice", array, params);
int[] toSqueeze = fullSlice.getToSqueeze();
if (toSqueeze.length > 0) {
NDArray oldResult = result;
result = result.squeeze(toSqueeze);
oldResult.close();
}
return result;
}
/** {@inheritDoc} */
@Override
public void set(NDArray array, NDIndexFullSlice fullSlice, NDArray value) {
array = manager.from(array);
MxOpParams params = new MxOpParams();
params.addTupleParam("begin", fullSlice.getMin());
params.addTupleParam("end", fullSlice.getMax());
params.addTupleParam("step", fullSlice.getStep());
Stack<NDArray> prepareValue = new Stack<>();
prepareValue.add(value);
prepareValue.add(prepareValue.peek().toDevice(array.getDevice(), false));
// prepareValue.add(prepareValue.peek().asType(getDataType(), false));
// Deal with the case target: (1, 10, 1), original (10)
// try to find (10, 1) and reshape (10) to that
Shape targetShape = fullSlice.getShape();
while (targetShape.size() > value.size()) {
targetShape = targetShape.slice(1);
}
prepareValue.add(prepareValue.peek().reshape(targetShape));
prepareValue.add(prepareValue.peek().broadcast(fullSlice.getShape()));
manager.invoke(
"_npi_slice_assign",
new NDArray[] {array, prepareValue.peek()},
new NDArray[] {array},
params);
for (NDArray toClean : prepareValue) {
if (toClean != value) {
toClean.close();
}
}
}
/** {@inheritDoc} */
@Override
public void set(NDArray array, NDIndexFullSlice fullSlice, Number value) {
array = manager.from(array);
MxOpParams params = new MxOpParams();
params.addTupleParam("begin", fullSlice.getMin());
params.addTupleParam("end", fullSlice.getMax());
params.addTupleParam("step", fullSlice.getStep());
params.addParam("scalar", value);
manager.invoke(
"_npi_slice_assign_scalar", new NDArray[] {array}, new NDArray[] {array}, params);
}
}
|
0
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet/engine/MxNDManager.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.mxnet.engine;
import ai.djl.Device;
import ai.djl.engine.Engine;
import ai.djl.engine.EngineException;
import ai.djl.mxnet.jna.JnaUtils;
import ai.djl.ndarray.BaseNDManager;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.ndarray.types.SparseFormat;
import ai.djl.util.PairList;
import com.sun.jna.Pointer;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.file.Path;
/** {@code MxNDManager} is the MXNet implementation of {@link NDManager}. */
public class MxNDManager extends BaseNDManager {
/**
* A global {@link NDManager} singleton instance.
*
* <p>This NDManager is the root of all the other {@code NDManager}s. NDArrays created by this
* manager are un-managed, so the user has to close them manually. Those NDArrays will be
* released on GC, and might be run into an out of native memory issue.
*/
private static final MxNDManager SYSTEM_MANAGER = new SystemManager();
private static final NDArray[] EMPTY = new NDArray[0];
private int version;
private MxNDManager(NDManager parent, Device device, int version) {
super(parent, device);
this.version = version;
}
static MxNDManager getSystemManager() {
return SYSTEM_MANAGER;
}
/** {@inheritDoc} */
@Override
public ByteBuffer allocateDirect(int capacity) {
return ByteBuffer.allocateDirect(capacity).order(ByteOrder.nativeOrder());
}
/** {@inheritDoc} */
@Override
public MxNDArray from(NDArray array) {
if (array == null || array instanceof MxNDArray) {
return (MxNDArray) array;
}
MxNDArray ret = create(array.getShape(), array.getDataType());
ret.set(array.toByteBuffer());
ret.setName(array.getName());
return ret;
}
/**
* Creates an MxNDArray with the given Native Memory Pointer and attaches to this manager.
*
* @param handle the array's native memory pointer
* @return the created array
*/
public MxNDArray create(Pointer handle) {
return new MxNDArray(this, handle);
}
/**
* Creates a sparse MxNDArray with the given Native Memory Pointer and attaches to this manager.
*
* @param handle the array's native memory pointer
* @param fmt the sparse format to use
* @return the created array
*/
public MxNDArray create(Pointer handle, SparseFormat fmt) {
return new MxNDArray(this, handle, fmt);
}
/** {@inheritDoc} */
@Override
public MxNDArray create(Shape shape, DataType dataType) {
Pointer handle = JnaUtils.createNdArray(device, shape, dataType, shape.dimension(), false);
return new MxNDArray(this, handle, device, shape, dataType, false);
}
/** {@inheritDoc} */
@Override
public MxNDArray createCSR(Buffer data, long[] indptr, long[] indices, Shape shape) {
SparseFormat fmt = SparseFormat.CSR;
DataType dataType = DataType.fromBuffer(data);
MxNDArray indptrNd = create(new Shape(indptr.length), DataType.INT64);
indptrNd.set(indptr);
MxNDArray indicesNd = create(new Shape(indices.length), DataType.INT64);
indicesNd.set(indices);
Pointer handle =
JnaUtils.createSparseNdArray(
fmt,
device,
shape,
dataType,
new DataType[] {indptrNd.getDataType(), indicesNd.getDataType()},
new Shape[] {indptrNd.getShape(), indicesNd.getShape()},
false);
MxNDArray sparse = create(handle, fmt);
MxNDArray dataNd = create(new Shape(data.remaining()), dataType);
dataNd.set(data);
JnaUtils.ndArraySyncCopyFromNdArray(sparse, dataNd, -1);
JnaUtils.ndArraySyncCopyFromNdArray(sparse, indptrNd, 0);
JnaUtils.ndArraySyncCopyFromNdArray(sparse, indicesNd, 1);
return sparse;
}
/** {@inheritDoc} */
@Override
public MxNDArray createRowSparse(Buffer data, Shape dataShape, long[] indices, Shape shape) {
SparseFormat fmt = SparseFormat.ROW_SPARSE;
DataType dataType = DataType.fromBuffer(data);
MxNDArray indicesNd = create(new Shape(indices.length), DataType.INT64);
indicesNd.set(indices);
Pointer handle =
JnaUtils.createSparseNdArray(
fmt,
device,
shape,
dataType,
new DataType[] {indicesNd.getDataType()},
new Shape[] {indicesNd.getShape()},
false);
MxNDArray sparse = create(handle, fmt);
MxNDArray dataNd = create(dataShape, dataType);
dataNd.set(data);
JnaUtils.ndArraySyncCopyFromNdArray(sparse, dataNd, -1);
JnaUtils.ndArraySyncCopyFromNdArray(sparse, indicesNd, 0);
return sparse;
}
/** {@inheritDoc} */
@Override
public NDList load(Path path) {
return JnaUtils.loadNdArray(this, path, device);
}
/** {@inheritDoc} */
@Override
public NDArray zeros(Shape shape, DataType dataType) {
return fill("_npi_zeros", shape, dataType);
}
/** {@inheritDoc} */
@Override
public NDArray ones(Shape shape, DataType dataType) {
return fill("_npi_ones", shape, dataType);
}
/** {@inheritDoc} */
@Override
public NDArray full(Shape shape, float value, DataType dataType) {
MxOpParams params = new MxOpParams();
params.addParam("shape", shape);
params.addParam("value", value);
params.setDataType(dataType);
params.setDevice(device);
return invoke("_npi_full", params);
}
/** {@inheritDoc} */
@Override
public NDArray arange(float start, float stop, float step, DataType dataType) {
MxOpParams params = new MxOpParams();
params.addParam("start", start);
params.addParam("stop", stop);
params.addParam("step", step);
params.setDataType(dataType);
params.setDevice(device);
return invoke("_npi_arange", params);
}
/** {@inheritDoc} */
@Override
public NDArray eye(int rows, int cols, int k, DataType dataType) {
MxOpParams params = new MxOpParams();
params.addParam("N", rows);
params.addParam("M", cols);
params.addParam("k", k);
params.setDataType(dataType);
params.setDevice(device);
return invoke("_npi_eye", params);
}
/** {@inheritDoc} */
@Override
public NDArray linspace(float start, float stop, int num, boolean endpoint) {
if (num < 0) {
throw new IllegalArgumentException("Num argument must be non-negative");
}
MxOpParams params = new MxOpParams();
params.addParam("start", start);
params.addParam("stop", stop);
params.addParam("num", num);
params.addParam("endpoint", endpoint);
params.setDevice(device);
return invoke("_npi_linspace", params);
}
/** {@inheritDoc} */
@Override
public NDArray randomInteger(long low, long high, Shape shape, DataType dataType) {
MxOpParams params = new MxOpParams();
params.addParam("low", low);
params.addParam("high", high);
params.addParam("shape", shape);
params.setDevice(device);
params.setDataType(dataType);
return invoke("_npi_random_randint", params);
}
/** {@inheritDoc} */
@Override
public NDArray randomPermutation(long n) {
NDArray array = arange(0, n, 1, DataType.INT64);
MxOpParams params = new MxOpParams();
return invoke("_npi_shuffle", new NDList(array), params).singletonOrThrow();
}
/** {@inheritDoc} */
@Override
public NDArray randomUniform(float low, float high, Shape shape, DataType dataType) {
MxOpParams params = new MxOpParams();
params.addParam("low", low);
params.addParam("high", high);
params.addParam("size", shape);
params.setDevice(device);
params.setDataType(dataType);
return invoke("_npi_uniform", params);
}
/** {@inheritDoc} */
@Override
public NDArray randomNormal(float loc, float scale, Shape shape, DataType dataType) {
MxOpParams params = new MxOpParams();
params.addParam("loc", loc);
params.addParam("scale", scale);
params.addParam("size", shape);
params.setDevice(device);
params.setDataType(dataType);
return invoke("_npi_normal", params);
}
/** {@inheritDoc} */
@Override
public NDArray randomMultinomial(int n, NDArray pValues, Shape shape) {
MxOpParams params = new MxOpParams();
params.addParam("n", n);
params.addParam("size", shape);
return invoke("_npi_multinomial", pValues, params);
}
/** {@inheritDoc} */
@Override
public NDArray randomMultinomial(int n, NDArray pValues) {
MxOpParams params = new MxOpParams();
params.addParam("n", n);
return invoke("_npi_multinomial", pValues, params);
}
/** {@inheritDoc} */
@Override
public NDArray sampleNormal(NDArray mu, NDArray sigma) {
return invoke("sample_normal", new NDArray[] {mu, sigma}, null);
}
/** {@inheritDoc} */
@Override
public NDArray sampleNormal(NDArray mu, NDArray sigma, Shape shape) {
MxOpParams params = new MxOpParams();
params.addParam("shape", shape);
return invoke("sample_normal", new NDArray[] {mu, sigma}, params);
}
/** {@inheritDoc} */
@Override
public NDArray samplePoisson(NDArray lam) {
return invoke("sample_poisson", lam, null);
}
/** {@inheritDoc} */
@Override
public NDArray samplePoisson(NDArray lam, Shape shape) {
MxOpParams params = new MxOpParams();
params.addParam("shape", shape);
return invoke("sample_poisson", lam, params);
}
/** {@inheritDoc} */
@Override
public NDArray sampleGamma(NDArray alpha, NDArray beta) {
return invoke("sample_gamma", new NDArray[] {alpha, beta}, null);
}
/** {@inheritDoc} */
@Override
public NDArray sampleGamma(NDArray alpha, NDArray beta, Shape shape) {
MxOpParams params = new MxOpParams();
params.addParam("shape", shape);
return invoke("sample_gamma", new NDArray[] {alpha, beta}, params);
}
/** {@inheritDoc} */
@Override
public MxNDManager newSubManager(Device dev) {
MxNDManager manager = new MxNDManager(this, dev, version);
attachUncappedInternal(manager.uid, manager);
return manager;
}
/** {@inheritDoc} */
@Override
public void invoke(
String operation, NDArray[] src, NDArray[] dest, PairList<String, ?> params) {
JnaUtils.op(operation).invoke(this, src, dest, params);
}
/** {@inheritDoc} */
@Override
public NDList invoke(String operation, NDList src, PairList<String, ?> params) {
return new NDList(JnaUtils.op(operation).invoke(this, src.toArray(EMPTY), params));
}
/**
* An engine specific generic invocation to native operator.
*
* <p>You should avoid using this function if possible. Since this function is engine specific,
* using this API may cause portability issues. A native operation may not be compatible between
* each version.
*
* @param operation the native operation to perform
* @param src the {@link NDList} of source {@link NDArray}
* @param dest the {@link NDList} to save output to
* @param params the parameters to be passed to the native operator
* @throws IllegalArgumentException if operation is not supported by Engine
* @throws EngineException if operation failed in native engine
*/
public void invoke(String operation, NDList src, NDList dest, PairList<String, ?> params) {
invoke(operation, src.toArray(EMPTY), dest.toArray(EMPTY), params);
}
/**
* An engine specific generic invocation to native operator.
*
* <p>You should avoid using this function if possible. Since this function is engine specific,
* using this API may cause portability issues. A native operation may not be compatible between
* each version.
*
* @param operation the native operation to perform
* @param src the array of source {@link NDArray}
* @param params the parameters to be passed to the native operator
* @return the output array of {@link NDArray}
* @throws IllegalArgumentException if operation is not supported by Engine
* @throws EngineException if operation failed in native engine
*/
public NDArray invoke(String operation, NDArray[] src, PairList<String, ?> params) {
return JnaUtils.op(operation).invoke(this, src, params)[0];
}
/**
* An engine specific generic invocation to native operator.
*
* <p>You should avoid using this function if possible. Since this function is engine specific,
* using this API may cause portability issues. A native operation may not be compatible between
* each version.
*
* @param operation the native operation to perform
* @param src the source {@link NDArray}
* @param params the parameters to be passed to the native operator
* @return the output array of {@link NDArray}
* @throws IllegalArgumentException if operation is not supported by Engine
* @throws EngineException if operation failed in native engine
*/
public NDArray invoke(String operation, NDArray src, PairList<String, ?> params) {
return invoke(operation, new NDArray[] {src}, params);
}
/**
* An engine specific generic invocation to native operator.
*
* <p>You should avoid using this function if possible. Since this function is engine specific,
* using this API may cause portability issues. A native operation may not be compatible between
* each version.
*
* @param operation the native operation to perform
* @param params the parameters to be passed to the native operator
* @return the output array of {@link NDArray}
* @throws IllegalArgumentException if operation is not supported by Engine
* @throws EngineException if operation failed in native engine
*/
public NDArray invoke(String operation, PairList<String, ?> params) {
return invoke(operation, EMPTY, params);
}
/** {@inheritDoc} */
@Override
public final Engine getEngine() {
return Engine.getEngine(MxEngine.ENGINE_NAME);
}
private NDArray fill(String opName, Shape shape, DataType dataType) {
MxOpParams params = new MxOpParams();
if (shape == null) {
throw new IllegalArgumentException("Shape is required for " + opName.substring(1));
}
params.addParam("shape", shape);
params.setDevice(device);
params.setDataType(dataType);
return invoke(opName, params);
}
/** The SystemManager is the root {@link MxNDManager} of which all others are children. */
private static final class SystemManager extends MxNDManager implements SystemNDManager {
SystemManager() {
super(null, null, JnaUtils.getVersion());
}
}
}
|
0
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet/engine/MxOpParams.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.mxnet.engine;
import ai.djl.Device;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.ndarray.types.SparseFormat;
import ai.djl.util.PairList;
/** An internal helper for creating the MXNet operator parameters. */
public class MxOpParams extends PairList<String, Object> {
// mxnet cpu take index
private static final String MXNET_CPU = "cpu(0)";
/**
* Sets the Shape parameter.
*
* @param shape the shape to set
*/
public void setShape(Shape shape) {
addParam("shape", shape);
}
/**
* Sets the device to use for the operation.
*
* @param device the device to use for the operation
*/
public void setDevice(Device device) {
setParam("ctx", ("cpu".equals(device.getDeviceType()) ? MXNET_CPU : device.toString()));
}
/**
* Sets the dataType to use for the operation.
*
* @param dataType the dataType to use for the operation
*/
public void setDataType(DataType dataType) {
if (dataType != null) {
setParam("dtype", MxDataType.toMx(dataType));
}
}
/**
* Sets the sparseFormat to use for the operation.
*
* @param sparseFormat the sparseFormat to use for the operation
*/
public void setSparseFormat(SparseFormat sparseFormat) {
if (sparseFormat != null) {
setParam("stype", String.valueOf(sparseFormat.getValue()));
}
}
/**
* Sets a (potentially existing) parameter to a new value.
*
* @param paramName the parameter name to update
* @param value the value to set the parameter to
*/
public void setParam(String paramName, String value) {
remove(paramName);
add(paramName, value);
}
/**
* Adds a parameter.
*
* @param paramName the name of the new parameter
* @param shape the value of the new parameter
*/
public void addParam(String paramName, Shape shape) {
if (shape != null) {
add(paramName, shape.toString());
}
}
/**
* Adds a parameter.
*
* @param paramName the name of the new parameter
* @param value the value of the new parameter
*/
public void addParam(String paramName, String value) {
add(paramName, value);
}
/**
* Adds a parameter.
*
* @param paramName the name of the new parameter
* @param value the value of the new parameter
*/
public void addParam(String paramName, int value) {
add(paramName, String.valueOf(value));
}
/**
* Adds a parameter.
*
* @param paramName the name of the new parameter
* @param value the value of the new parameter
*/
public void addParam(String paramName, long value) {
add(paramName, String.valueOf(value));
}
/**
* Adds a parameter.
*
* @param paramName the name of the new parameter
* @param value the value of the new parameter
*/
public void addParam(String paramName, double value) {
add(paramName, String.valueOf(value));
}
/**
* Adds a parameter.
*
* @param paramName the name of the new parameter
* @param value the value of the new parameter
*/
public void addParam(String paramName, float value) {
add(paramName, String.valueOf(value));
}
/**
* Adds a parameter.
*
* @param paramName the name of the new parameter
* @param value the value of the new parameter
*/
public void addParam(String paramName, boolean value) {
add(paramName, value ? "True" : "False");
}
/**
* Adds a parameter.
*
* @param paramName the name of the new parameter
* @param value the value of the new parameter
*/
public void addParam(String paramName, Number value) {
add(paramName, String.valueOf(value));
}
/**
* Adds a parameter with tuple value.
*
* @param paramName the name of the new parameter
* @param tuple the values of the new parameter
*/
public void addTupleParam(String paramName, int... tuple) {
StringBuilder sb = new StringBuilder();
sb.append('(');
for (int i = 0; i < tuple.length; ++i) {
if (i > 0) {
sb.append(", ");
}
sb.append(tuple[i]);
}
sb.append(')');
add(paramName, sb.toString());
}
/**
* Adds a parameter with tuple value.
*
* @param paramName the name of the new parameter
* @param tuple the values of the new parameter
*/
public void addTupleParam(String paramName, long... tuple) {
StringBuilder sb = new StringBuilder();
sb.append('(');
for (int i = 0; i < tuple.length; ++i) {
if (i > 0) {
sb.append(", ");
}
sb.append(tuple[i]);
}
sb.append(')');
add(paramName, sb.toString());
}
/**
* Adds a parameter with tuple value.
*
* @param paramName the name of the new parameter
* @param tuple the values of the new parameter
*/
public void addTupleParam(String paramName, float... tuple) {
StringBuilder sb = new StringBuilder();
sb.append('(');
for (int i = 0; i < tuple.length; ++i) {
if (i > 0) {
sb.append(", ");
}
sb.append(tuple[i]);
}
sb.append(')');
add(paramName, sb.toString());
}
}
|
0
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet/engine/MxParameterServer.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.mxnet.engine;
import ai.djl.mxnet.jna.JnaUtils;
import ai.djl.mxnet.jna.MxnetLibrary;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.training.ParameterServer;
import ai.djl.training.optimizer.Optimizer;
import ai.djl.util.NativeResource;
import com.sun.jna.Pointer;
import java.util.Arrays;
/** {@code MxParameterServer} is the MXNet implementation of {@link ParameterServer}. */
public class MxParameterServer extends NativeResource<Pointer> implements ParameterServer {
@SuppressWarnings("PMD.SingularField")
// use class field to hold the OptimizerCallback which prevent it from being gc.
private OptimizerCallback callback;
private int priority;
/**
* Constructs a new {@code MxParameterServer}.
*
* @param optimizer the optimizer to use for the parameter server updates
*/
@SuppressWarnings("this-escape")
public MxParameterServer(Optimizer optimizer) {
super(createdKVStore());
callback = new OptimizerCallback(optimizer);
JnaUtils.parameterStoreSetUpdater(getHandle(), null, callback, null);
priority = 0;
}
/** {@inheritDoc} */
@Override
public void init(String parameterId, NDArray[] values) {
String[] keys = new String[values.length];
Arrays.fill(keys, parameterId);
NDList vals = new NDList(values);
JnaUtils.parameterStoreInit(getHandle(), values.length, keys, vals);
}
/** {@inheritDoc} */
@Override
public void update(String parameterId, NDArray[] grads, NDArray[] params) {
String[] gradKeys = new String[grads.length];
String[] paramKeys = new String[params.length];
Arrays.fill(gradKeys, parameterId);
Arrays.fill(paramKeys, parameterId);
JnaUtils.parameterStorePushPull(
getHandle(),
grads.length,
gradKeys,
params.length,
paramKeys,
new NDList(grads),
new NDList(params),
-priority);
priority++;
}
private static Pointer createdKVStore() {
return JnaUtils.parameterStoreCreate("device");
}
/** {@inheritDoc} */
@Override
public void close() {
Pointer pointer = handle.getAndSet(null);
if (pointer != null) {
JnaUtils.parameterStoreClose(pointer);
}
}
/** A helper to wrap the optimizer so it can be called by the MXNet KVStore. */
private static final class OptimizerCallback implements MxnetLibrary.MXKVStoreStrUpdater {
private Optimizer optimizer;
OptimizerCallback(Optimizer optimizer) {
this.optimizer = optimizer;
}
/** {@inheritDoc} */
@Override
public void apply(String parameterId, Pointer recv, Pointer local, Pointer handle) {
// updater callback arguments order is: index, gradient, weight.
try (NDManager manager = MxNDManager.getSystemManager().newSubManager()) {
MxNDManager m = (MxNDManager) manager;
MxNDArray grad = m.create(recv);
MxNDArray weight = m.create(local);
optimizer.update(parameterId, weight, grad);
}
}
}
}
|
0
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet/engine/MxSymbolBlock.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.mxnet.engine;
import ai.djl.MalformedModelException;
import ai.djl.mxnet.jna.JnaUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.AbstractSymbolBlock;
import ai.djl.nn.Parameter;
import ai.djl.nn.ParameterList;
import ai.djl.nn.SymbolBlock;
import ai.djl.training.ParameterStore;
import ai.djl.util.PairList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* {@code MxSymbolBlock} is the MXNet implementation of {@link SymbolBlock}.
*
* <p>You can create a {@code MxSymbolBlock} using {@link ai.djl.Model#load(java.nio.file.Path,
* String)}.
*/
public class MxSymbolBlock extends AbstractSymbolBlock {
private static final Logger logger = LoggerFactory.getLogger(MxSymbolBlock.class);
private static final byte VERSION = 3;
private NDManager manager;
private CachedOp op;
private Symbol symbol;
private List<Parameter> mxNetParams; // includes input data
private Map<String, Parameter> parameters;
private Map<String, Shape> paramShapes;
private Shape[] outputShapes;
private PairList<String, Shape> inputDescriptions;
private PairList<String, Shape> outputDescriptions;
private boolean first;
/**
* Constructs a {@code MxSymbolBlock} for a {@link Symbol}.
*
* <p>You can create a {@code MxSymbolBlock} using {@link ai.djl.Model#load(java.nio.file.Path,
* String)}.
*
* @param manager the manager to use for the block
* @param symbol the symbol containing the block's symbolic graph
*/
public MxSymbolBlock(NDManager manager, Symbol symbol) {
this(manager);
this.symbol = symbol;
initBlock();
}
/**
* Constructs an empty {@code MxSymbolBlock}.
*
* @param manager the manager to use for the block
*/
public MxSymbolBlock(NDManager manager) {
super(VERSION);
this.manager = manager;
}
/**
* Sets the names of the input data.
*
* @param inputNames the names of the input data
*/
public void setInputNames(List<String> inputNames) {
this.inputNames = inputNames;
// now that we know which of the parameters are just input placeholders and which
// are trainable, add them properly so they are correctly handled
Set<String> nameLookup = new HashSet<>(inputNames);
parameters = new LinkedHashMap<>(mxNetParams.size());
for (Parameter mxNetParameter : mxNetParams) {
if (!nameLookup.contains(mxNetParameter.getName())) {
parameters.put(mxNetParameter.getName(), mxNetParameter);
}
}
}
/**
* Returns the list of inputs and parameter NDArrays.
*
* @return the list of inputs and parameter NDArrays
*/
public List<Parameter> getAllParameters() {
return mxNetParams;
}
/**
* Returns the layers' name.
*
* @return a List of String containing the layers' name
*/
public List<String> getLayerNames() {
return symbol.getLayerNames();
}
/**
* Returns the Symbolic graph from the model.
*
* @return a {@link Symbol} object
*/
public Symbol getSymbol() {
return symbol;
}
/**
* Applies Optimization algorithm for the model.
*
* @param optimization the name of the optimization
*/
public void optimizeFor(String optimization) {
Symbol newSymbol = symbol.optimizeFor(optimization, manager.getDevice());
symbol.close();
symbol = newSymbol;
}
/** {@inheritDoc} */
@Override
public PairList<String, Shape> describeInput() {
if (inputDescriptions == null) {
inputDescriptions = new PairList<>();
for (String name : inputNames) {
// Add empty shapes as input shapes are not saved
// in MXNet models
logger.warn(
"Input shapes are unknown, please run predict or forward once"
+ " and call describeInput again.");
inputDescriptions.add(name, new Shape());
}
}
return inputDescriptions;
}
/** {@inheritDoc} */
@Override
public ParameterList getDirectParameters() {
return new ParameterList(parameters);
}
/** {@inheritDoc} */
@Override
public PairList<String, Shape> describeOutput() {
if (outputDescriptions == null) {
logger.warn(
"Output shapes are unknown, please run predict or forward once"
+ " and call describeOutput again.");
}
return outputDescriptions;
}
/** {@inheritDoc} */
@Override
protected NDList forwardInternal(
ParameterStore parameterStore,
NDList inputs,
boolean training,
PairList<String, Object> params) {
if (first) {
synchronized (this) {
if (first) {
// create CachedOp is not thread-safe
// add synchronized block to avoid creating multiple CachedOps
op = JnaUtils.createCachedOp(this, (MxNDManager) manager, training);
inputDescriptions = new PairList<>();
outputDescriptions = new PairList<>();
for (NDArray array : inputs) {
inputDescriptions.add(array.getName(), array.getShape());
}
NDList outputs = op.forward(parameterStore, inputs, training);
for (NDArray array : outputs) {
outputDescriptions.add(array.getName(), array.getShape());
}
first = false;
return outputs;
}
}
}
return op.forward(parameterStore, inputs, training);
}
/** {@inheritDoc} */
@Override
public Shape[] getOutputShapes(Shape[] inputShapes) {
if (outputShapes == null) {
String[] outputNames = symbol.getOutputNames();
outputShapes = new Shape[outputNames.length];
for (int i = 0; i < outputShapes.length; ++i) {
outputShapes[i] = getParameterShape(outputNames[i], inputShapes);
}
}
return outputShapes;
}
/** {@inheritDoc} */
@Override
public void removeLastBlock() {
List<String> layerNames = getLayerNames();
String layerName = layerNames.get(layerNames.size() - 2);
Symbol sliced = symbol.get(layerName);
symbol.close();
symbol = sliced;
HashSet<String> set = new HashSet<>(Arrays.asList(symbol.getAllNames()));
for (int i = mxNetParams.size() - 1; i >= 0; --i) {
Parameter parameter = mxNetParams.get(i);
if (!set.contains(parameter.getName())) {
mxNetParams.remove(i).close();
parameters.remove(parameter.getName(), parameter);
}
}
}
private Shape getParameterShape(String name, Shape[] inputShapes) {
if (paramShapes == null) {
PairList<String, Shape> pairs = new PairList<>();
for (int i = 0; i < inputNames.size(); i++) {
pairs.add(inputNames.get(i), inputShapes[i]);
}
paramShapes = symbol.inferShape(pairs);
}
if (paramShapes.containsKey(name)) {
return paramShapes.get(name);
} else {
throw new IllegalArgumentException("Name " + name + " not found");
}
}
/** {@inheritDoc} */
@Override
public void saveParameters(DataOutputStream os) throws IOException {
os.writeByte(VERSION);
String json = symbol.toJsonString();
// symbol size may go beyond os.writeUTF() size (65535)
byte[] bytes = json.getBytes(StandardCharsets.UTF_8);
os.writeInt(bytes.length);
os.write(bytes);
int size = inputNames.size();
os.writeInt(size);
for (String name : inputNames) {
os.writeUTF(name);
}
for (Parameter parameter : mxNetParams) {
parameter.save(os);
}
}
/** {@inheritDoc} */
@Override
public void loadParameters(NDManager manager, DataInputStream is)
throws IOException, MalformedModelException {
byte version = is.readByte();
if (version > VERSION) {
throw new MalformedModelException("Unsupported encoding version: " + version);
}
if (version < VERSION && symbol == null) {
throw new IllegalStateException(
"Symbol is required for version 2, please use Model to load");
}
if (version == VERSION) {
int len = is.readInt();
byte[] bytes = new byte[len];
if (is.read(bytes) == -1) {
throw new MalformedModelException("InputStream ends at symbol loading!");
}
// init block only if it is not set
symbol =
Symbol.loadJson(
(MxNDManager) manager, new String(bytes, StandardCharsets.UTF_8));
initBlock();
}
int size = is.readInt();
for (int i = 0; i < size; ++i) {
inputNames.add(is.readUTF());
}
for (Parameter parameter : mxNetParams) {
parameter.load(this.manager, is);
}
setInputNames(inputNames);
}
private void initBlock() {
inputNames = new ArrayList<>();
String[] allNames = symbol.getAllNames();
mxNetParams = new ArrayList<>(allNames.length);
Set<String> auxNameSet = new HashSet<>(Arrays.asList(symbol.getAuxNames()));
for (String name : allNames) {
Parameter.Type type = inferType(name);
boolean requireGrad = !auxNameSet.contains(name);
mxNetParams.add(
Parameter.builder()
.setName(name)
.setType(type)
.optRequiresGrad(requireGrad)
.build());
}
first = true;
}
private static Parameter.Type inferType(String name) {
if (name.endsWith("bias")) {
return Parameter.Type.BIAS;
} else if (name.endsWith("gamma")) {
return Parameter.Type.GAMMA;
} else if (name.endsWith("beta")) {
return Parameter.Type.BETA;
} else if (name.endsWith("moving_mean") || name.endsWith("running_mean")) {
return Parameter.Type.RUNNING_MEAN;
} else if (name.endsWith("moving_var") || name.endsWith("running_var")) {
return Parameter.Type.RUNNING_VAR;
} else if (name.endsWith("weight")) {
return Parameter.Type.WEIGHT;
}
return Parameter.Type.OTHER;
}
}
|
0
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet/engine/Symbol.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.mxnet.engine;
import ai.djl.Device;
import ai.djl.mxnet.jna.JnaUtils;
import ai.djl.ndarray.types.Shape;
import ai.djl.util.NativeResource;
import ai.djl.util.PairList;
import ai.djl.util.Utils;
import com.sun.jna.Pointer;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
/**
* {@code Symbol} is an internal helper for symbolic model graphs used by the {@link
* ai.djl.nn.SymbolBlock}.
*
* @see ai.djl.nn.SymbolBlock
* @see <a href="https://mxnet.incubator.apache.org/api/python/docs/api/symbol/index.html">MXNet
* Symbol</a>
*/
public class Symbol extends NativeResource<Pointer> {
// private String[] argParams;
// private String[] auxParams;
private String[] outputs;
// private List<Integer> outputLayouts;
private MxNDManager manager;
/**
* Constructs a {@code Symbol}.
*
* @param manager the manager to attach the symbol to
* @param pointer the symbol's native data location
*/
Symbol(MxNDManager manager, Pointer pointer) {
super(pointer);
this.manager = manager;
manager.attachInternal(getUid(), this);
// argParams = JnaUtils.listSymbolArguments(getHandle());
// auxParams = JnaUtils.listSymbolAuxiliaryStates(getHandle());
}
/**
* Loads a symbol from a path.
*
* @param manager the manager to load the symbol to
* @param path the path to the symbol file
* @return the new symbol
*/
public static Symbol load(MxNDManager manager, String path) {
Pointer pointer = JnaUtils.createSymbolFromFile(path);
return new Symbol(manager, pointer);
}
/**
* Loads a symbol from a json string.
*
* @param manager the manager to load the symbol to
* @param json the json string of the symbol.
* @return the new symbol
*/
public static Symbol loadJson(MxNDManager manager, String json) {
Pointer pointer = JnaUtils.createSymbolFromString(json);
return new Symbol(manager, pointer);
}
/**
* Returns the symbol argument names.
*
* @return the symbol argument names
*/
public String[] getArgNames() {
return JnaUtils.listSymbolArguments(getHandle());
}
/**
* Returns the MXNet auxiliary states for the symbol.
*
* @return the MXNet auxiliary states for the symbol
*/
public String[] getAuxNames() {
return JnaUtils.listSymbolAuxiliaryStates(getHandle());
}
/**
* Returns the symbol names.
*
* @return the symbol names
*/
public String[] getAllNames() {
return JnaUtils.listSymbolNames(getHandle());
}
/**
* Returns the symbol outputs.
*
* @return the symbol outputs
*/
public String[] getOutputNames() {
if (outputs == null) {
outputs = JnaUtils.listSymbolOutputs(getHandle());
}
return outputs;
}
private String[] getInternalOutputNames() {
return JnaUtils.listSymbolOutputs(getInternals().getHandle());
}
/*
public List<Integer> getOutputLayouts() {
if (outputLayouts == null) {
outputLayouts = new ArrayList<>();
for (String argName : getArgParams()) {
try (Symbol symbol = get(argName)) {
Layout layout = Layout.fromValue(symbol.getAttribute("__layout__"));
outputLayouts.add(DataDesc.getBatchAxis(layout));
}
}
}
return outputLayouts;
}
public String getAttribute(String key) {
return JnaUtils.getSymbolAttr(getHandle(), key);
}
public PairList<String, String> getAttributes() {
return JnaUtils.listSymbolAttr(getHandle());
}
*/
/**
* Copies the symbol.
*
* @return a new copy of the symbol
*/
public Symbol copy() {
throw new UnsupportedOperationException("Not implemented yet");
}
/**
* Returns the output symbol by index.
*
* @param index the index of the output
* @return the symbol output as a new symbol
*/
public Symbol get(int index) {
Pointer pointer = JnaUtils.getSymbolOutput(getInternals().getHandle(), index);
return new Symbol(manager, pointer);
}
/**
* Returns the output symbol with the given name.
*
* @param name the name of the symbol to return
* @return the output symbol
* @throws IllegalArgumentException Thrown if no output matches the name
*/
public Symbol get(String name) {
String[] out = getInternalOutputNames();
int index = Utils.indexOf(out, name);
if (index < 0) {
throw new IllegalArgumentException("Cannot find output that matches name: " + name);
}
return get(index);
}
/**
* Returns the symbol internals.
*
* @return the symbol internals symbol
*/
public Symbol getInternals() {
Pointer pointer = JnaUtils.getSymbolInternals(getHandle());
return new Symbol(manager, pointer);
}
/**
* Returns the list of names for all internal outputs.
*
* @return a list of names
*/
public List<String> getLayerNames() {
String[] outputNames = getInternalOutputNames();
String[] allNames = getAllNames();
Set<String> allNamesSet = new LinkedHashSet<>(Arrays.asList(allNames));
// Kill all params field and keep the output layer
return Arrays.stream(outputNames)
.filter(n -> !allNamesSet.contains(n))
.collect(Collectors.toList());
}
/**
* Infers the shapes for all parameters inside a symbol from the given input shapes.
*
* @param pairs the given input name and shape
* @return a map of arguments with names and shapes
*/
public Map<String, Shape> inferShape(PairList<String, Shape> pairs) {
List<List<Shape>> shapes = JnaUtils.inferShape(this, pairs);
List<Shape> argShapes = shapes.get(0);
List<Shape> outputShapes = shapes.get(1);
List<Shape> auxShapes = shapes.get(2);
// TODO: add output to the map
String[] argNames = getArgNames();
String[] auxNames = getAuxNames();
String[] outputNames = getOutputNames();
Map<String, Shape> shapesMap = new ConcurrentHashMap<>();
for (int i = 0; i < argNames.length; i++) {
shapesMap.put(argNames[i], argShapes.get(i));
}
for (int i = 0; i < auxNames.length; i++) {
shapesMap.put(auxNames[i], auxShapes.get(i));
}
for (int i = 0; i < outputNames.length; i++) {
shapesMap.put(outputNames[i], outputShapes.get(i));
}
return shapesMap;
}
/**
* [Experimental] Add customized optimization on the Symbol.
*
* <p>This method can be used with EIA or TensorRT for model acceleration
*
* @param backend backend name
* @param device the device assigned
* @return optimized Symbol
*/
public Symbol optimizeFor(String backend, Device device) {
return new Symbol(manager, JnaUtils.optimizeFor(this, backend, device));
}
/*
public String debugStr() {
return JnaUtils.getSymbolDebugString(getHandle());
}
public void setAttr(Map<String, String> attrs) {
for (Map.Entry<String, String> entry : attrs.entrySet()) {
JnaUtils.setSymbolAttr(getHandle(), entry.getKey(), entry.getValue());
}
}
public PairList<String, String> listAttr() {
return JnaUtils.listSymbolAttr(getHandle());
}
public PairList<String, String> attrMap() {
return JnaUtils.listSymbolAttr(getHandle());
}
public void save(String path) {
JnaUtils.saveSymbol(getHandle(), path);
}
public Symbol compose(String name, String[] keys) {
return new Symbol(manager, JnaUtils.compose(getHandle(), name, keys));
}
public void compose(String name, Map<String, String> symbols) {
JnaUtils.compose(getHandle(), name, symbols.values().toArray(JnaUtils.EMPTY_ARRAY));
}
public String toJson() {
return JnaUtils.symbolToJson(getHandle());
}
*/
/**
* Converts Symbol to json string for saving purpose.
*
* @return the json string
*/
public String toJsonString() {
return JnaUtils.getSymbolString(getHandle());
}
/** {@inheritDoc} */
@Override
public String toString() {
return Arrays.toString(getOutputNames());
}
/** {@inheritDoc} */
@Override
public void close() {
Pointer pointer = handle.getAndSet(null);
if (pointer != null) {
manager.detachInternal(getUid());
JnaUtils.freeSymbol(pointer);
manager = null;
}
}
}
|
0
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet
|
java-sources/ai/djl/mxnet/mxnet-engine/0.34.0/ai/djl/mxnet/engine/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains implementations of interfaces within the DJL API for the Apache MXNet Engine.
*
* @see ai.djl.mxnet.engine.MxEngine
*/
package ai.djl.mxnet.engine;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.