index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains a library of built-in datasets.
*
* <p>The basic datasets all extend {@link ai.djl.basicdataset.BasicDatasets}.
*/
package ai.djl.basicdataset;
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv/BananaDetection.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.cv;
import ai.djl.Application;
import ai.djl.basicdataset.BasicDatasets;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.output.Point;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.training.dataset.RandomAccessDataset;
import ai.djl.translate.Pipeline;
import ai.djl.translate.TranslateException;
import ai.djl.util.JsonUtils;
import ai.djl.util.PairList;
import ai.djl.util.Progress;
import com.google.gson.reflect.TypeToken;
import java.io.IOException;
import java.io.Reader;
import java.lang.reflect.Type;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
/**
* Banana image detection dataset contains a 3 x 256 x 256 image in the dataset with a banana of
* varying sizes in each image. There are 1000 training images and 100 testing images.
*/
public class BananaDetection extends ObjectDetectionDataset {
private static final String VERSION = "1.0";
private static final String ARTIFACT_ID = "banana";
private final Usage usage;
private final List<Path> imagePaths;
private final PairList<Long, Rectangle> labels;
private final MRL mrl;
private boolean prepared;
/**
* Creates a new instance of {@link RandomAccessDataset} with the given necessary
* configurations.
*
* @param builder a builder with the necessary configurations
*/
public BananaDetection(Builder builder) {
super(builder);
usage = builder.usage;
mrl = builder.getMrl();
imagePaths = new ArrayList<>();
labels = new PairList<>();
}
/**
* Creates a new builder to build a {@link BananaDetection}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** {@inheritDoc} */
@Override
public PairList<Long, Rectangle> getObjects(long index) {
return new PairList<>(Collections.singletonList(labels.get((int) index)));
}
/** {@inheritDoc} */
@Override
public List<String> getClasses() {
return Collections.singletonList("banana");
}
/** {@inheritDoc} */
@Override
protected long availableSize() {
return imagePaths.size();
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException, TranslateException {
if (prepared) {
return;
}
Artifact artifact = mrl.getDefaultArtifact();
mrl.prepare(artifact, progress);
Path root = mrl.getRepository().getResourceDirectory(artifact);
Path usagePath;
switch (usage) {
case TRAIN:
usagePath = Paths.get("train");
break;
case TEST:
usagePath = Paths.get("test");
break;
case VALIDATION:
default:
throw new UnsupportedOperationException("Validation data not available.");
}
usagePath = root.resolve(usagePath);
Path indexFile = usagePath.resolve("index.file");
try (Reader reader = Files.newBufferedReader(indexFile)) {
Type mapType = new TypeToken<Map<String, List<Float>>>() {}.getType();
Map<String, List<Float>> metadata = JsonUtils.GSON.fromJson(reader, mapType);
for (Map.Entry<String, List<Float>> entry : metadata.entrySet()) {
String imgName = entry.getKey();
imagePaths.add(usagePath.resolve(imgName));
List<Float> label = entry.getValue();
long objectClass = label.get(0).longValue();
Rectangle objectLocation =
new Rectangle(
new Point(label.get(1), label.get(2)), label.get(3), label.get(4));
labels.add(objectClass, objectLocation);
}
}
prepared = true;
}
/** {@inheritDoc} */
@Override
protected Image getImage(long index) throws IOException {
int idx = Math.toIntExact(index);
return ImageFactory.getInstance().fromFile(imagePaths.get(idx));
}
/** {@inheritDoc} */
@Override
public Optional<Integer> getImageWidth() {
return Optional.of(256);
}
/** {@inheritDoc} */
@Override
public Optional<Integer> getImageHeight() {
return Optional.of(256);
}
/** A builder for a {@link BananaDetection}. */
public static final class Builder extends ImageDataset.BaseBuilder<BananaDetection.Builder> {
Repository repository;
String groupId;
String artifactId;
Usage usage;
/** Constructs a new builder. */
Builder() {
repository = BasicDatasets.REPOSITORY;
groupId = BasicDatasets.GROUP_ID;
artifactId = ARTIFACT_ID;
usage = Usage.TRAIN;
}
/** {@inheritDoc} */
@Override
public BananaDetection.Builder self() {
return this;
}
/**
* Sets the optional usage.
*
* @param usage the usage
* @return this builder
*/
public BananaDetection.Builder optUsage(Usage usage) {
this.usage = usage;
return self();
}
/**
* Sets the optional repository.
*
* @param repository the repository
* @return this builder
*/
public BananaDetection.Builder optRepository(Repository repository) {
this.repository = repository;
return self();
}
/**
* Sets optional groupId.
*
* @param groupId the groupId}
* @return this builder
*/
public BananaDetection.Builder optGroupId(String groupId) {
this.groupId = groupId;
return this;
}
/**
* Sets the optional artifactId.
*
* @param artifactId the artifactId
* @return this builder
*/
public BananaDetection.Builder optArtifactId(String artifactId) {
if (artifactId.contains(":")) {
String[] tokens = artifactId.split(":");
groupId = tokens[0];
this.artifactId = tokens[1];
} else {
this.artifactId = artifactId;
}
return this;
}
/**
* Builds the {@link BananaDetection}.
*
* @return the {@link BananaDetection}
*/
public BananaDetection build() {
if (pipeline == null) {
pipeline = new Pipeline(new ToTensor());
}
return new BananaDetection(this);
}
MRL getMrl() {
return repository.dataset(Application.CV.ANY, groupId, artifactId, VERSION);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv/CocoDetection.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.cv;
import ai.djl.Application;
import ai.djl.basicdataset.BasicDatasets;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.output.Point;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.translate.Pipeline;
import ai.djl.util.PairList;
import ai.djl.util.Progress;
import java.io.IOException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
/**
* Coco image detection dataset from http://cocodataset.org/#home.
*
* <p>Coco is a large-scale object detection, segmentation, and captioning dataset although only
* object detection is implemented at thsi time. It contains 1.5 million object instances and is one
* of the standard benchmark object detection datasets.
*
* <p>To use this dataset, you have to manually add {@code
* com.twelvemonkeys.imageio:imageio-jpeg:3.11.0} as a dependency in your project.
*
* <p>Each image might have different {@link ai.djl.ndarray.types.Shape}s.
*/
public class CocoDetection extends ObjectDetectionDataset {
// TODO: Add synset logic for coco dataset
private static final String ARTIFACT_ID = "coco";
private static final String VERSION = "1.0";
private Usage usage;
private List<Path> imagePaths;
private List<PairList<Long, Rectangle>> labels;
private MRL mrl;
private boolean prepared;
CocoDetection(Builder builder) {
super(builder);
usage = builder.usage;
mrl = builder.getMrl();
imagePaths = new ArrayList<>();
labels = new ArrayList<>();
}
/**
* Creates a builder to build a {@link CocoDetection}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** {@inheritDoc} */
@Override
public PairList<Long, Rectangle> getObjects(long index) {
return labels.get(Math.toIntExact(index));
}
/** {@inheritDoc} */
@Override
public List<String> getClasses() {
throw new UnsupportedOperationException(
"getClasses() for CocoDetection has not been implemented yet.");
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException {
if (prepared) {
return;
}
Artifact artifact = mrl.getDefaultArtifact();
mrl.prepare(artifact, progress);
Path root = mrl.getRepository().getResourceDirectory(artifact);
Path jsonFile;
switch (usage) {
case TRAIN:
jsonFile = root.resolve("annotations").resolve("instances_train2017.json");
break;
case TEST:
jsonFile = root.resolve("annotations").resolve("instances_val2017.json");
break;
case VALIDATION:
default:
throw new UnsupportedOperationException("Validation data not available.");
}
CocoUtils coco = new CocoUtils(jsonFile);
coco.prepare();
List<Long> imageIds = coco.getImageIds();
for (long id : imageIds) {
Path imagePath = root.resolve(coco.getRelativeImagePath(id));
PairList<Long, Rectangle> labelOfImageId = getLabels(coco, id);
if (!labelOfImageId.isEmpty()) {
imagePaths.add(imagePath);
labels.add(labelOfImageId);
}
}
prepared = true;
}
/** {@inheritDoc} */
@Override
protected long availableSize() {
return imagePaths.size();
}
private PairList<Long, Rectangle> getLabels(CocoUtils coco, long imageId) {
List<Long> annotationIds = coco.getAnnotationIdByImageId(imageId);
if (annotationIds == null) {
return new PairList<>();
}
PairList<Long, Rectangle> label = new PairList<>(annotationIds.size());
for (long annotationId : annotationIds) {
CocoMetadata.Annotation annotation = coco.getAnnotationById(annotationId);
if (annotation.getArea() > 0) {
double[] box = annotation.getBoundingBox();
long labelClass = coco.mapCategoryId(annotation.getCategoryId());
Rectangle objectLocation = new Rectangle(new Point(box[0], box[1]), box[2], box[3]);
label.add(labelClass, objectLocation);
}
}
return label;
}
@Override
protected Image getImage(long index) throws IOException {
int idx = Math.toIntExact(index);
return ImageFactory.getInstance().fromFile(imagePaths.get(idx));
}
/** {@inheritDoc} */
@Override
public Optional<Integer> getImageWidth() {
return Optional.empty();
}
/** {@inheritDoc} */
@Override
public Optional<Integer> getImageHeight() {
return Optional.empty();
}
/** A builder to construct a {@link CocoDetection}. */
public static final class Builder extends ImageDataset.BaseBuilder<Builder> {
Repository repository;
String groupId;
String artifactId;
Usage usage;
/** Constructs a new builder. */
Builder() {
repository = BasicDatasets.REPOSITORY;
groupId = BasicDatasets.GROUP_ID;
artifactId = ARTIFACT_ID;
usage = Usage.TRAIN;
flag = Image.Flag.COLOR;
}
/** {@inheritDoc} */
@Override
public Builder self() {
return this;
}
/**
* Sets the optional usage.
*
* @param usage the new usage
* @return this builder
*/
public Builder optUsage(Usage usage) {
this.usage = usage;
return self();
}
/**
* Sets the optional repository.
*
* @param repository the repository
* @return this builder
*/
public Builder optRepository(Repository repository) {
this.repository = repository;
return self();
}
/**
* Sets optional groupId.
*
* @param groupId the groupId}
* @return this builder
*/
public Builder optGroupId(String groupId) {
this.groupId = groupId;
return this;
}
/**
* Sets the optional artifactId.
*
* @param artifactId the artifactId
* @return this builder
*/
public Builder optArtifactId(String artifactId) {
if (artifactId.contains(":")) {
String[] tokens = artifactId.split(":");
groupId = tokens[0];
this.artifactId = tokens[1];
} else {
this.artifactId = artifactId;
}
return this;
}
/**
* Builds the new {@link CocoDetection}.
*
* @return the new {@link CocoDetection}
*/
public CocoDetection build() {
if (pipeline == null) {
pipeline = new Pipeline(new ToTensor());
}
return new CocoDetection(this);
}
MRL getMrl() {
return repository.dataset(Application.CV.ANY, groupId, artifactId, VERSION);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv/CocoMetadata.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.cv;
import com.google.gson.annotations.SerializedName;
import java.util.List;
/** A metadata class to represent the structure of annotations in Coco. */
public class CocoMetadata {
private List<Image> images;
private List<Annotation> annotations;
private List<Category> categories;
/**
* Returns a list of all annotations.
*
* @return a list of all annotations
*/
public List<Annotation> getAnnotations() {
return annotations;
}
/**
* Returns a list of all categories.
*
* @return a list of all categories
*/
public List<Category> getCategories() {
return categories;
}
/**
* Returns a list of all images.
*
* @return a list of all images
*/
public List<Image> getImages() {
return images;
}
/** An annotation applied to an image in the coco dataset. */
public static final class Annotation {
@SerializedName("image_id")
private long imageId;
private long id;
@SerializedName("bbox")
private double[] bBox;
private double area;
@SerializedName("category_id")
private long categoryId;
/**
* Returns the id of the image this annotation applies to.
*
* @return the id of the image this annotation applies to
*/
public long getImageId() {
return imageId;
}
/**
* Returns the id of this annotation.
*
* @return the id of this annotation
*/
public long getId() {
return id;
}
/**
* Returns the bounding box of this annotation.
*
* @return the bounding box of this annotation
*/
public double[] getBoundingBox() {
return bBox;
}
/**
* Returns the category id of this annotation.
*
* @return the category id of this annotation
*/
public long getCategoryId() {
return categoryId;
}
/**
* Returns the area of this annotation.
*
* @return the area of this annotation
*/
public double getArea() {
return area;
}
}
/** An image in the coco dataset. */
public static final class Image {
private int id;
@SerializedName("coco_url")
private String cocoUrl;
private int height;
private int width;
/**
* Returns the id of this image.
*
* @return the id of this image
*/
public long getId() {
return id;
}
/**
* Returns the url of this image.
*
* @return the url of this image
*/
public String getCocoUrl() {
return cocoUrl;
}
/**
* Returns the height of this image.
*
* @return the height of this image
*/
public int getHeight() {
return height;
}
/**
* Returns the width of this image.
*
* @return the width of this image
*/
public int getWidth() {
return width;
}
}
/** An annotation category in the coco dataset. */
public static final class Category {
private long id;
/**
* Returns the id of this category.
*
* @return the id of this category
*/
public long getId() {
return id;
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv/CocoUtils.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.cv;
import ai.djl.util.JsonUtils;
import java.io.IOException;
import java.io.Reader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/** A utility class that assists in loading and parsing the annotations in Coco. */
public class CocoUtils {
private Path annotationPath;
private boolean prepared;
private List<Long> imageIds;
private Map<Long, CocoMetadata.Image> imageMap;
private Map<Long, CocoMetadata.Annotation> annotationMap;
private Map<Long, List<Long>> imageToAnn;
private Map<Long, Integer> categoryIdMap;
CocoUtils(Path annotationPath) {
this.annotationPath = annotationPath;
imageIds = new ArrayList<>();
imageMap = new HashMap<>();
annotationMap = new HashMap<>();
imageToAnn = new HashMap<>();
categoryIdMap = new HashMap<>();
}
/**
* Prepares and indexes the annotation file in memory.
*
* @throws IOException if reading the annotation file fails
*/
public void prepare() throws IOException {
if (!prepared) {
CocoMetadata metadata;
try (Reader reader = Files.newBufferedReader(annotationPath)) {
metadata = JsonUtils.GSON.fromJson(reader, CocoMetadata.class);
}
createIndex(metadata);
prepared = true;
}
}
private void createIndex(CocoMetadata metadata) {
for (CocoMetadata.Annotation annotation : metadata.getAnnotations()) {
long imageId = annotation.getImageId();
long id = annotation.getId();
if (!imageToAnn.containsKey(imageId)) {
imageToAnn.put(annotation.getImageId(), new ArrayList<>());
}
imageToAnn.get(imageId).add(id);
annotationMap.put(id, annotation);
}
for (CocoMetadata.Image image : metadata.getImages()) {
imageIds.add(image.getId());
imageMap.put(image.getId(), image);
}
// create categoryIndex
List<Long> categoryIds = new ArrayList<>();
for (CocoMetadata.Category category : metadata.getCategories()) {
categoryIds.add(category.getId());
}
for (int i = 0; i < categoryIds.size(); i++) {
categoryIdMap.put(categoryIds.get(i), i);
}
// sort to keep the dataset ordered
Collections.sort(imageIds);
}
/**
* Returns all image ids in the annotation file.
*
* @return all image ids in the annotation file
*/
public List<Long> getImageIds() {
return imageIds;
}
/**
* Returns the relative path of an image given an image id.
*
* @param imageId the image id to retrieve the path for
* @return the relative path of an image
*/
public Path getRelativeImagePath(long imageId) {
CocoMetadata.Image image = imageMap.get(imageId);
String[] cocoUrl = image.getCocoUrl().split("/");
return Paths.get(cocoUrl[cocoUrl.length - 2])
.resolve(Paths.get(cocoUrl[cocoUrl.length - 1]));
}
/**
* Returns all ids of the annotation that correspond to a given image id.
*
* @param imageId the image id to retrieve annotations for
* @return all ids of the annotation
*/
public List<Long> getAnnotationIdByImageId(long imageId) {
return imageToAnn.get(imageId);
}
/**
* Returns an {@link CocoMetadata.Annotation} that corresponds to a given annotation id.
*
* @param annotationId the annotation id to retrieve an annotation for
* @return an {@link CocoMetadata.Annotation}
*/
public CocoMetadata.Annotation getAnnotationById(long annotationId) {
return annotationMap.get(annotationId);
}
/**
* Returns the continuous category id given an original category id.
*
* @param originalCategoryId the original category id to retrieve the continuous category id for
* @return the continuous category id
*/
public int mapCategoryId(long originalCategoryId) {
return categoryIdMap.get(originalCategoryId);
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv/ImageDataset.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.cv;
import ai.djl.basicdataset.cv.classification.ImageClassificationDataset;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.util.NDImageUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.training.dataset.RandomAccessDataset;
import java.io.IOException;
import java.util.Optional;
/**
* A helper to create a {@link ai.djl.training.dataset.Dataset} where the data contains a single
* image.
*/
public abstract class ImageDataset extends RandomAccessDataset {
protected Image.Flag flag;
/**
* Creates a new instance of {@link RandomAccessDataset} with the given necessary
* configurations.
*
* @param builder a builder with the necessary configurations
*/
public ImageDataset(BaseBuilder<?> builder) {
super(builder);
this.flag = builder.flag;
}
protected NDArray getRecordImage(NDManager manager, long index) throws IOException {
NDArray image = getImage(index).toNDArray(manager, flag);
// Resize the image if the image size is fixed
Optional<Integer> width = getImageWidth();
Optional<Integer> height = getImageHeight();
if (width.isPresent() && height.isPresent()) {
image = NDImageUtils.resize(image, width.get(), height.get());
}
return image;
}
/**
* Returns the image at the given index in the dataset.
*
* @param index the index (if the dataset is a list of data items)
* @return the image
* @throws IOException if the image could not be loaded
*/
protected abstract Image getImage(long index) throws IOException;
/**
* Returns the number of channels in the images in the dataset.
*
* <p>For example, RGB would be 3 channels while grayscale only uses 1 channel.
*
* @return the number of channels in the images in the dataset
*/
public int getImageChannels() {
return flag.numChannels();
}
/**
* Returns the width of the images in the dataset.
*
* @return the width of the images in the dataset
*/
public abstract Optional<Integer> getImageWidth();
/**
* Returns the height of the images in the dataset.
*
* @return the height of the images in the dataset
*/
public abstract Optional<Integer> getImageHeight();
/**
* Used to build an {@link ImageClassificationDataset}.
*
* @param <T> the builder type
*/
@SuppressWarnings("rawtypes")
public abstract static class BaseBuilder<T extends BaseBuilder<T>>
extends RandomAccessDataset.BaseBuilder<T> {
Image.Flag flag;
protected BaseBuilder() {
flag = Image.Flag.COLOR;
}
/**
* Sets the optional color mode flag.
*
* @param flag the color mode flag
* @return this builder
*/
public T optFlag(Image.Flag flag) {
this.flag = flag;
return self();
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv/ObjectDetectionDataset.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.cv;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.training.dataset.Record;
import ai.djl.util.Pair;
import ai.djl.util.PairList;
import java.io.IOException;
import java.util.List;
/**
* A helper to create {@link ai.djl.training.dataset.Dataset}s for {@link
* ai.djl.Application.CV#OBJECT_DETECTION}.
*/
public abstract class ObjectDetectionDataset extends ImageDataset {
/**
* Creates a new instance of {@link ObjectDetectionDataset} with the given necessary
* configurations.
*
* @param builder a builder with the necessary configurations
*/
public ObjectDetectionDataset(ImageDataset.BaseBuilder<?> builder) {
super(builder);
}
/** {@inheritDoc} */
@Override
public Record get(NDManager manager, long index) throws IOException {
NDList data = new NDList(getRecordImage(manager, index));
PairList<Long, Rectangle> objects = getObjects(index);
float[][] labelsSplit = new float[objects.size()][5];
for (int i = 0; i < objects.size(); i++) {
Pair<Long, Rectangle> obj = objects.get(i);
labelsSplit[i][0] = obj.getKey();
Rectangle location = obj.getValue();
labelsSplit[i][1] = (float) location.getX();
labelsSplit[i][2] = (float) location.getY();
labelsSplit[i][3] = (float) location.getWidth();
labelsSplit[i][4] = (float) location.getHeight();
}
NDList labels = new NDList(manager.create(labelsSplit));
return new Record(data, labels);
}
/**
* Returns the list of objects in the image at the given index.
*
* @param index the index (if the dataset is a list of data items)
* @return the list of objects in the image. The long is the class number of the index into the
* list of classes of the desired class name. The rectangle is the location of the object
* inside the image.
* @throws IOException if the data could not be loaded
*/
public abstract PairList<Long, Rectangle> getObjects(long index) throws IOException;
/**
* Returns the classes that detected objects in the dataset can be classified into.
*
* @return the classes that detected objects in the dataset can be classified into.
*/
public abstract List<String> getClasses();
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv/PikachuDetection.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.cv;
import ai.djl.Application.CV;
import ai.djl.basicdataset.BasicDatasets;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.output.Point;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.translate.Pipeline;
import ai.djl.util.JsonUtils;
import ai.djl.util.PairList;
import ai.djl.util.Progress;
import com.google.gson.reflect.TypeToken;
import java.io.IOException;
import java.io.Reader;
import java.lang.reflect.Type;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
/**
* Pikachu image detection dataset that contains multiple Pikachus in each image.
*
* <p>It was based on a section from the [Dive into Deep Learning
* book](http://d2l.ai/chapter_computer-vision/object-detection-dataset.html). It contains 1000
* Pikachu images of different angles and sizes created using an open source 3D Pikachu model. Each
* image contains only a single pikachu.
*/
public class PikachuDetection extends ObjectDetectionDataset {
private static final String VERSION = "1.0";
private static final String ARTIFACT_ID = "pikachu";
private Usage usage;
private List<Path> imagePaths;
private PairList<Long, Rectangle> labels;
private MRL mrl;
private boolean prepared;
protected PikachuDetection(Builder builder) {
super(builder);
usage = builder.usage;
mrl = builder.getMrl();
imagePaths = new ArrayList<>();
labels = new PairList<>();
}
/**
* Creates a new builder to build a {@link PikachuDetection}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException {
if (prepared) {
return;
}
Artifact artifact = mrl.getDefaultArtifact();
mrl.prepare(artifact, progress);
Path root = mrl.getRepository().getResourceDirectory(artifact);
Path usagePath;
switch (usage) {
case TRAIN:
usagePath = Paths.get("train");
break;
case TEST:
usagePath = Paths.get("test");
break;
case VALIDATION:
default:
throw new UnsupportedOperationException("Validation data not available.");
}
usagePath = root.resolve(usagePath);
Path indexFile = usagePath.resolve("index.file");
try (Reader reader = Files.newBufferedReader(indexFile)) {
Type mapType = new TypeToken<Map<String, List<Float>>>() {}.getType();
Map<String, List<Float>> metadata = JsonUtils.GSON.fromJson(reader, mapType);
for (Map.Entry<String, List<Float>> entry : metadata.entrySet()) {
String imgName = entry.getKey();
imagePaths.add(usagePath.resolve(imgName));
List<Float> label = entry.getValue();
long objectClass = label.get(4).longValue();
Rectangle objectLocation =
new Rectangle(
new Point(label.get(5), label.get(6)), label.get(7), label.get(8));
labels.add(objectClass, objectLocation);
}
}
prepared = true;
}
/** {@inheritDoc} */
@Override
public PairList<Long, Rectangle> getObjects(long index) {
return new PairList<>(Collections.singletonList(labels.get((int) index)));
}
/** {@inheritDoc} */
@Override
public List<String> getClasses() {
return Collections.singletonList("pikachu");
}
/** {@inheritDoc} */
@Override
protected long availableSize() {
return imagePaths.size();
}
@Override
protected Image getImage(long index) throws IOException {
int idx = Math.toIntExact(index);
return ImageFactory.getInstance().fromFile(imagePaths.get(idx));
}
/** {@inheritDoc} */
@Override
public Optional<Integer> getImageWidth() {
return Optional.empty();
}
/** {@inheritDoc} */
@Override
public Optional<Integer> getImageHeight() {
return Optional.empty();
}
/** A builder for a {@link PikachuDetection}. */
public static final class Builder extends ImageDataset.BaseBuilder<Builder> {
Repository repository;
String groupId;
String artifactId;
Usage usage;
/** Constructs a new builder. */
Builder() {
repository = BasicDatasets.REPOSITORY;
groupId = BasicDatasets.GROUP_ID;
artifactId = ARTIFACT_ID;
usage = Usage.TRAIN;
}
/** {@inheritDoc} */
@Override
public Builder self() {
return this;
}
/**
* Sets the optional usage.
*
* @param usage the usage
* @return this builder
*/
public Builder optUsage(Usage usage) {
this.usage = usage;
return self();
}
/**
* Sets the optional repository.
*
* @param repository the repository
* @return this builder
*/
public Builder optRepository(Repository repository) {
this.repository = repository;
return self();
}
/**
* Sets optional groupId.
*
* @param groupId the groupId}
* @return this builder
*/
public Builder optGroupId(String groupId) {
this.groupId = groupId;
return this;
}
/**
* Sets the optional artifactId.
*
* @param artifactId the artifactId
* @return this builder
*/
public Builder optArtifactId(String artifactId) {
if (artifactId.contains(":")) {
String[] tokens = artifactId.split(":");
groupId = tokens[0];
this.artifactId = tokens[1];
} else {
this.artifactId = artifactId;
}
return this;
}
/**
* Builds the {@link PikachuDetection}.
*
* @return the {@link PikachuDetection}
*/
public PikachuDetection build() {
if (pipeline == null) {
pipeline = new Pipeline(new ToTensor());
}
return new PikachuDetection(this);
}
MRL getMrl() {
return repository.dataset(CV.ANY, groupId, artifactId, VERSION);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv/package-info.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains a library of built-in datasets for {@link ai.djl.Application.CV}. */
package ai.djl.basicdataset.cv;
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv/classification/AbstractImageFolder.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.cv.classification;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.repository.zoo.DefaultModelZoo;
import ai.djl.translate.TranslateException;
import ai.djl.util.Pair;
import ai.djl.util.PairList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Stream;
/**
* A dataset for loading image files stored in a folder structure.
*
* <p>Usually, you want to use {@link ImageFolder} instead.
*/
public abstract class AbstractImageFolder extends ImageClassificationDataset {
private static final Logger logger = LoggerFactory.getLogger(AbstractImageFolder.class);
private static final Set<String> EXT =
new HashSet<>(Arrays.asList(".jpg", ".jpeg", ".png", ".bmp", ".wbmp", ".gif"));
protected List<String> synset;
protected PairList<String, Integer> items;
protected MRL mrl;
protected boolean prepared;
private int maxDepth;
private Integer imageWidth;
private Integer imageHeight;
protected AbstractImageFolder(ImageFolderBuilder<?> builder) {
super(builder);
this.maxDepth = builder.maxDepth;
this.imageWidth = builder.imageWidth;
this.imageHeight = builder.imageHeight;
this.synset = new ArrayList<>();
this.items = new PairList<>();
String path = builder.repository.getBaseUri().toString();
mrl = MRL.undefined(builder.repository, DefaultModelZoo.GROUP_ID, path);
}
/** {@inheritDoc} */
@Override
protected Image getImage(long index) throws IOException {
ImageFactory imageFactory = ImageFactory.getInstance();
Pair<String, Integer> item = items.get(Math.toIntExact(index));
Path imagePath = getImagePath(item.getKey());
return imageFactory.fromFile(imagePath);
}
/** {@inheritDoc} */
@Override
protected long getClassNumber(long index) {
Pair<String, Integer> item = items.get(Math.toIntExact(index));
return item.getValue();
}
/** {@inheritDoc} */
@Override
protected long availableSize() {
return items.size();
}
/**
* Returns the synsets of the ImageFolder dataset.
*
* @return a list that contains synsets
* @throws IOException for various exceptions depending on the dataset
* @throws TranslateException if there is an error while processing input
*/
public List<String> getSynset() throws IOException, TranslateException {
prepare();
return synset;
}
protected void listImages(Path root, List<String> classes) {
int label = 0;
for (String className : classes) {
Path classFolder = root.resolve(className);
if (!Files.isDirectory(classFolder)) {
continue;
}
try (Stream<Path> stream = Files.walk(classFolder, maxDepth)) {
final int classLabel = label;
stream.forEach(
p -> {
if (isImage(p.toFile())) {
String path = p.toAbsolutePath().toString();
items.add(new Pair<>(path, classLabel));
}
});
} catch (IOException e) {
logger.warn("Failed to list images", e);
}
logger.debug("Loaded {} images in {}, class: {}", items.size(), classFolder, label);
++label;
}
}
protected abstract Path getImagePath(String key);
protected boolean isImage(File file) {
String path = file.getName();
if (!file.isFile() || file.isHidden() || path.startsWith(".")) {
return false;
}
int extensionIndex = path.lastIndexOf('.');
if (extensionIndex < 0) {
return false;
}
return EXT.contains(path.substring(extensionIndex).toLowerCase());
}
/** {@inheritDoc} */
@Override
public Optional<Integer> getImageWidth() {
return Optional.ofNullable(imageWidth);
}
/** {@inheritDoc} */
@Override
public Optional<Integer> getImageHeight() {
return Optional.ofNullable(imageHeight);
}
/** {@inheritDoc} */
@Override
public List<String> getClasses() {
return synset;
}
/**
* Used to build an {@link AbstractImageFolder}.
*
* @param <T> the builder type
*/
public abstract static class ImageFolderBuilder<T extends ImageFolderBuilder<T>>
extends BaseBuilder<T> {
Repository repository;
int maxDepth;
Integer imageWidth;
Integer imageHeight;
protected ImageFolderBuilder() {
maxDepth = 1;
}
/**
* Sets the repository containing the image folder.
*
* @param repository the repository containing the image folder
* @return this builder
*/
public T setRepository(Repository repository) {
this.repository = repository;
return self();
}
/**
* Sets the repository file path containing the image folder.
*
* @param path the repository file path containing the image folder
* @return this builder
*/
public T setRepositoryPath(String path) {
this.repository = Repository.newInstance("images", path);
return self();
}
/**
* Sets the repository file path containing the image folder.
*
* @param path the repository file path containing the image folder
* @return this builder
*/
public T setRepositoryPath(Path path) {
this.repository = Repository.newInstance("images", path);
return self();
}
/**
* Sets the depth of the image folder.
*
* @param maxDepth the maximum number of directory levels to visit
* @return this builder
*/
public T optMaxDepth(int maxDepth) {
this.maxDepth = maxDepth;
return self();
}
/**
* Sets the size of the images.
*
* @param size the size (both width and height)
* @return this builder
*/
public T optImageSize(int size) {
this.imageWidth = size;
this.imageHeight = size;
return self();
}
/**
* Sets the width of the images.
*
* @param width the width of the images
* @return this builder
*/
public T optImageWidth(int width) {
this.imageWidth = width;
return self();
}
/**
* Sets the height of the images.
*
* @param height the height of the images
* @return this builder
*/
public T optImageHeight(int height) {
this.imageHeight = height;
return self();
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv/classification/CaptchaDataset.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.cv.classification;
import ai.djl.Application.CV;
import ai.djl.basicdataset.BasicDatasets;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.dataset.RandomAccessDataset;
import ai.djl.training.dataset.Record;
import ai.djl.translate.Pipeline;
import ai.djl.util.Progress;
import java.io.IOException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
/**
* A {@link ai.djl.training.dataset.Dataset} featuring captcha images.
*
* <p>Each image is a 160x60 grayscale image featuring 5 or 6 digits where each digit ranges from
* 0-10. The dataset therefore features 6 labels. Each label ranges from 0-11 where 0-10 represent a
* recognized digit and 11 indicates that the value is not a digit (size 5 and not 6).
*/
public class CaptchaDataset extends RandomAccessDataset {
private static final String ARTIFACT_ID = "captcha";
private static final String VERSION = "1.1";
public static final int IMAGE_WIDTH = 160;
public static final int IMAGE_HEIGHT = 60;
public static final int CAPTCHA_LENGTH = 6;
public static final int CAPTCHA_OPTIONS = 12;
private Usage usage;
private List<String> items;
private Artifact.Item dataItem;
private String pathPrefix;
private MRL mrl;
private boolean prepared;
/**
* Creates a new instance of {@link CaptchaDataset}.
*
* @param builder a builder with the necessary configurations
*/
public CaptchaDataset(Builder builder) {
super(builder);
this.usage = builder.usage;
mrl = builder.getMrl();
}
/**
* Creates a builder to build a {@link CaptchaDataset}.
*
* @return a new builder
*/
public static CaptchaDataset.Builder builder() {
return new CaptchaDataset.Builder();
}
/** {@inheritDoc} */
@Override
public Record get(NDManager manager, long index) throws IOException {
String item = items.get(Math.toIntExact(index));
Path imagePath = mrl.getRepository().getFile(dataItem, pathPrefix + '/' + item + ".jpeg");
NDArray imageArray =
ImageFactory.getInstance()
.fromFile(imagePath)
.toNDArray(manager, Image.Flag.GRAYSCALE);
NDList data = new NDList(imageArray);
NDList labels = new NDList(CAPTCHA_LENGTH);
char[] labelChars = item.toCharArray();
for (int i = 0; i < CAPTCHA_LENGTH; i++) {
if (i < item.length()) {
int labelDigit = Integer.parseInt(Character.toString(labelChars[i]));
labels.add(manager.create(labelDigit));
} else {
labels.add(manager.create(11));
}
}
return new Record(data, labels);
}
/** {@inheritDoc} */
@Override
protected long availableSize() {
return items.size();
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException {
if (prepared) {
return;
}
Artifact artifact = mrl.getDefaultArtifact();
mrl.prepare(artifact, progress);
dataItem = artifact.getFiles().get("data");
pathPrefix = getUsagePath();
items = new ArrayList<>();
for (String filenameWithExtension :
mrl.getRepository().listDirectory(dataItem, pathPrefix)) {
String captchaFilename =
filenameWithExtension.substring(0, filenameWithExtension.lastIndexOf('.'));
items.add(captchaFilename);
}
prepared = true;
}
private String getUsagePath() {
switch (usage) {
case TRAIN:
return "train";
case TEST:
return "test";
case VALIDATION:
return "validate";
default:
throw new IllegalArgumentException("Invalid usage");
}
}
/** A builder for a {@link CaptchaDataset}. */
public static final class Builder extends BaseBuilder<Builder> {
Repository repository;
String groupId;
String artifactId;
Usage usage;
/** Constructs a new builder. */
Builder() {
repository = BasicDatasets.REPOSITORY;
groupId = BasicDatasets.GROUP_ID;
artifactId = ARTIFACT_ID;
usage = Dataset.Usage.TRAIN;
pipeline = new Pipeline(new ToTensor());
}
/** {@inheritDoc} */
@Override
protected Builder self() {
return this;
}
/**
* Sets the optional repository.
*
* @param repository the repository
* @return this builder
*/
public Builder optRepository(Repository repository) {
this.repository = repository;
return this;
}
/**
* Sets optional groupId.
*
* @param groupId the groupId}
* @return this builder
*/
public Builder optGroupId(String groupId) {
this.groupId = groupId;
return this;
}
/**
* Sets the optional artifactId.
*
* @param artifactId the artifactId
* @return this builder
*/
public Builder optArtifactId(String artifactId) {
if (artifactId.contains(":")) {
String[] tokens = artifactId.split(":");
groupId = tokens[0];
this.artifactId = tokens[1];
} else {
this.artifactId = artifactId;
}
return this;
}
/**
* Sets the optional usage.
*
* @param usage the usage
* @return this builder
*/
public Builder optUsage(Usage usage) {
this.usage = usage;
return this;
}
/**
* Builds the {@link CaptchaDataset}.
*
* @return the {@link CaptchaDataset}
*/
public CaptchaDataset build() {
return new CaptchaDataset(this);
}
MRL getMrl() {
return repository.dataset(CV.ANY, groupId, artifactId, VERSION);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv/classification/Cifar10.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.cv.classification;
import ai.djl.Application.CV;
import ai.djl.basicdataset.BasicDatasets;
import ai.djl.engine.Engine;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.training.dataset.ArrayDataset;
import ai.djl.translate.Pipeline;
import ai.djl.util.Progress;
import ai.djl.util.Utils;
import java.io.IOException;
import java.io.InputStream;
import java.util.Map;
/**
* CIFAR10 image classification dataset from https://www.cs.toronto.edu/~kriz/cifar.html.
*
* <p>It consists of 60,000 32x32 color images with 10 classes. It can train in a few hours with a
* GPU.
*
* <p>Each sample is an image (in 3-D {@link NDArray}) with shape (32, 32, 3).
*/
public final class Cifar10 extends ArrayDataset {
private static final String ARTIFACT_ID = "cifar10";
private static final String VERSION = "1.0";
public static final int IMAGE_WIDTH = 32;
public static final int IMAGE_HEIGHT = 32;
public static final float[] NORMALIZE_MEAN = {0.4914f, 0.4822f, 0.4465f};
public static final float[] NORMALIZE_STD = {0.2023f, 0.1994f, 0.2010f};
// 3072 = 32 * 32 * 3, i.e. one image size, +1 here is label
private static final int DATA_AND_LABEL_SIZE = IMAGE_HEIGHT * IMAGE_WIDTH * 3 + 1;
private NDManager manager;
private Usage usage;
private MRL mrl;
private boolean prepared;
Cifar10(Builder builder) {
super(builder);
this.manager = builder.manager;
this.manager.setName("cifar10");
this.usage = builder.usage;
mrl = builder.getMrl();
}
/**
* Creates a builder to build a {@link Cifar10}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException {
if (prepared) {
return;
}
Artifact artifact = mrl.getDefaultArtifact();
mrl.prepare(artifact, progress);
Map<String, Artifact.Item> map = artifact.getFiles();
Artifact.Item item;
switch (usage) {
case TRAIN:
item = map.get("data_batch.bin");
break;
case TEST:
item = map.get("test_batch.bin");
break;
case VALIDATION:
default:
throw new UnsupportedOperationException("Validation data not available.");
}
NDArray dataAndLabels = readData(item);
data =
new NDArray[] {
dataAndLabels
.get(":, 1:")
.reshape(-1, 3, IMAGE_HEIGHT, IMAGE_WIDTH)
.transpose(0, 2, 3, 1)
};
labels = new NDArray[] {dataAndLabels.get(":,0")};
// check if data and labels have the same size
if (data[0].size(0) != labels[0].size(0)) {
throw new IOException(
"the size of data "
+ data[0].size(0)
+ " didn't match with the size of labels "
+ labels[0].size(0));
}
prepared = true;
}
private NDArray readData(Artifact.Item item) throws IOException {
try (InputStream is = mrl.getRepository().openStream(item, null)) {
byte[] buf = Utils.toByteArray(is);
int length = buf.length / DATA_AND_LABEL_SIZE;
try (NDArray array =
manager.create(new Shape(length, DATA_AND_LABEL_SIZE), DataType.UINT8)) {
array.set(buf);
return array.toType(DataType.FLOAT32, false);
}
}
}
/** A builder to construct a {@link Cifar10}. */
public static final class Builder extends BaseBuilder<Builder> {
NDManager manager;
Repository repository;
String groupId;
String artifactId;
Usage usage;
/** Constructs a new builder. */
Builder() {
repository = BasicDatasets.REPOSITORY;
groupId = BasicDatasets.GROUP_ID;
artifactId = ARTIFACT_ID;
usage = Usage.TRAIN;
pipeline = new Pipeline(new ToTensor());
manager = Engine.getInstance().newBaseManager();
}
/** {@inheritDoc} */
@Override
protected Builder self() {
return this;
}
/**
* Sets the optional manager for the dataset (default follows engine default).
*
* @param manager the new manager
* @return this builder
*/
public Builder optManager(NDManager manager) {
this.manager.close();
this.manager = manager.newSubManager();
return this;
}
/**
* Sets the optional repository for the dataset.
*
* @param repository the new repository
* @return this builder
*/
public Builder optRepository(Repository repository) {
this.repository = repository;
return this;
}
/**
* Sets optional groupId.
*
* @param groupId the groupId}
* @return this builder
*/
public Builder optGroupId(String groupId) {
this.groupId = groupId;
return this;
}
/**
* Sets the optional artifactId.
*
* @param artifactId the artifactId
* @return this builder
*/
public Builder optArtifactId(String artifactId) {
if (artifactId.contains(":")) {
String[] tokens = artifactId.split(":");
groupId = tokens[0];
this.artifactId = tokens[1];
} else {
this.artifactId = artifactId;
}
return this;
}
/**
* Sets the optional usage for the dataset.
*
* @param usage the usage
* @return this builder
*/
public Builder optUsage(Usage usage) {
this.usage = usage;
return this;
}
/**
* Builds a new {@link Cifar10}.
*
* @return the new {@link Cifar10}
*/
public Cifar10 build() {
return new Cifar10(this);
}
MRL getMrl() {
return repository.dataset(CV.ANY, groupId, artifactId, VERSION);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv/classification/FashionMnist.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.cv.classification;
import ai.djl.Application.CV;
import ai.djl.basicdataset.BasicDatasets;
import ai.djl.engine.Engine;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.training.dataset.ArrayDataset;
import ai.djl.translate.Pipeline;
import ai.djl.util.Progress;
import ai.djl.util.Utils;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.Map;
/**
* FashMnist is a dataset from Zalando article images
* (https://github.com/zalandoresearch/fashion-mnist).
*
* <p>Each sample is a grayscale image (in 3-D NDArray) with shape (28, 28, 1).
*
* <p>It was created to be a drop in replacement for {@link Mnist}, but have a less simplistic task.
*/
public final class FashionMnist extends ArrayDataset {
private static final String ARTIFACT_ID = "fashmnist";
private static final String VERSION = "1.0";
public static final int IMAGE_WIDTH = 28;
public static final int IMAGE_HEIGHT = 28;
public static final int NUM_CLASSES = 10;
private final NDManager manager;
private final Usage usage;
private MRL mrl;
private boolean prepared;
/**
* Creates a new instance of {@code ArrayDataset} with the arguments in {@link Builder}.
*
* @param builder a builder with the required arguments
*/
private FashionMnist(FashionMnist.Builder builder) {
super(builder);
this.manager = builder.manager;
this.manager.setName("fashionmnist");
this.usage = builder.usage;
mrl = builder.getMrl();
}
/**
* Creates a builder to build a {@link Mnist}.
*
* @return a new builder
*/
public static FashionMnist.Builder builder() {
return new FashionMnist.Builder();
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException {
if (prepared) {
return;
}
Artifact artifact = mrl.getDefaultArtifact();
mrl.prepare(artifact, progress);
Map<String, Artifact.Item> map = artifact.getFiles();
Artifact.Item imageItem;
Artifact.Item labelItem;
switch (usage) {
case TRAIN:
imageItem = map.get("train_data");
labelItem = map.get("train_labels");
break;
case TEST:
imageItem = map.get("test_data");
labelItem = map.get("test_labels");
break;
case VALIDATION:
default:
throw new UnsupportedOperationException("Validation data not available.");
}
labels = new NDArray[] {readLabel(labelItem)};
data = new NDArray[] {readData(imageItem, labels[0].size())};
prepared = true;
}
private NDArray readData(Artifact.Item item, long length) throws IOException {
try (InputStream is = mrl.getRepository().openStream(item, null)) {
if (is.skip(16) != 16) {
throw new AssertionError("Failed skip data.");
}
byte[] buf = Utils.toByteArray(is);
try (NDArray array =
manager.create(
ByteBuffer.wrap(buf),
new Shape(length, IMAGE_WIDTH, IMAGE_HEIGHT, 1),
DataType.UINT8)) {
return array.toType(DataType.FLOAT32, false);
}
}
}
private NDArray readLabel(Artifact.Item item) throws IOException {
try (InputStream is = mrl.getRepository().openStream(item, null)) {
if (is.skip(8) != 8) {
throw new AssertionError("Failed skip data.");
}
byte[] buf = Utils.toByteArray(is);
try (NDArray array =
manager.create(ByteBuffer.wrap(buf), new Shape(buf.length), DataType.UINT8)) {
return array.toType(DataType.FLOAT32, false);
}
}
}
/** A builder for a {@link FashionMnist}. */
public static final class Builder extends BaseBuilder<Builder> {
NDManager manager;
Repository repository;
String groupId;
String artifactId;
Usage usage;
/** Constructs a new builder. */
Builder() {
repository = BasicDatasets.REPOSITORY;
groupId = BasicDatasets.GROUP_ID;
artifactId = ARTIFACT_ID;
usage = Usage.TRAIN;
manager = Engine.getInstance().newBaseManager();
}
/** {@inheritDoc} */
@Override
protected Builder self() {
return this;
}
/**
* Sets the optional manager for the dataset (default follows engine default).
*
* @param manager the manager
* @return this builder
*/
public Builder optManager(NDManager manager) {
this.manager.close();
this.manager = manager.newSubManager();
return this;
}
/**
* Sets the optional repository.
*
* @param repository the repository
* @return this builder
*/
public Builder optRepository(Repository repository) {
this.repository = repository;
return this;
}
/**
* Sets optional groupId.
*
* @param groupId the groupId}
* @return this builder
*/
public Builder optGroupId(String groupId) {
this.groupId = groupId;
return this;
}
/**
* Sets the optional artifactId.
*
* @param artifactId the artifactId
* @return this builder
*/
public Builder optArtifactId(String artifactId) {
if (artifactId.contains(":")) {
String[] tokens = artifactId.split(":");
groupId = tokens[0];
this.artifactId = tokens[1];
} else {
this.artifactId = artifactId;
}
return this;
}
/**
* Sets the optional usage.
*
* @param usage the usage
* @return this builder
*/
public Builder optUsage(Usage usage) {
this.usage = usage;
return this;
}
/**
* Builds the {@link Mnist}.
*
* @return the {@link Mnist}
*/
public FashionMnist build() {
if (pipeline == null) {
pipeline = new Pipeline(new ToTensor());
}
return new FashionMnist(this);
}
MRL getMrl() {
return repository.dataset(CV.ANY, groupId, artifactId, VERSION);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv/classification/FruitsFreshAndRotten.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.cv.classification;
import ai.djl.Application;
import ai.djl.basicdataset.BasicDatasets;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.translate.Pipeline;
import ai.djl.util.Progress;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
/**
* FruitRottenFresh classification dataset that contains the same fruit where rotten and fresh class
* are stored in different sub folders.
*
* <pre>
* It is structured similar to ImageFolders as follows:
* root/freshapples/1.png
* root/freshapples/2.png
* ...
* root/rottenapples/1.png
* root/rottenapples/2.png
* ...
* root/freshbanana/1.png
* root/freshbanana/2.png
* ...
* root/rottenbanana/1.png
* root/rottenbanana/2.png
* ...
* </pre>
*/
public final class FruitsFreshAndRotten extends AbstractImageFolder {
private static final String VERSION = "1.0";
private static final String ARTIFACT_ID = "fruit";
private MRL mrl;
private boolean prepared;
private FruitsFreshAndRotten(Builder builder) {
super(builder);
mrl = builder.getMrl();
}
/**
* Creates a new builder to build a {@link FruitsFreshAndRotten}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** {@inheritDoc} */
@Override
protected Path getImagePath(String key) {
return Paths.get(key);
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException {
// Use the code in ImageFolder
if (!prepared) {
mrl.prepare(null, progress);
loadSynset();
Path root = Paths.get(mrl.getRepository().getBaseUri());
if (progress != null) {
progress.reset("Preparing", 2);
progress.start(0);
listImages(root, synset);
progress.end();
} else {
listImages(root, synset);
}
prepared = true;
}
}
private void loadSynset() {
File root = new File(mrl.getRepository().getBaseUri());
File[] dir = root.listFiles(f -> f.isDirectory() && !f.getName().startsWith("."));
if (dir == null || dir.length == 0) {
throw new IllegalArgumentException(root + " not found or didn't have any file in it");
}
Arrays.sort(dir);
for (File file : dir) {
synset.add(file.getName());
}
}
/** A builder for the {@link FruitsFreshAndRotten}. */
public static final class Builder extends ImageFolderBuilder<Builder> {
String groupId;
String artifactId;
Usage usage;
private Repository optRepository;
/** Constructs a new builder. */
Builder() {
repository = BasicDatasets.REPOSITORY;
groupId = BasicDatasets.GROUP_ID;
artifactId = ARTIFACT_ID;
usage = Usage.TRAIN;
}
/** {@inheritDoc} */
@Override
public Builder self() {
return this;
}
/**
* Sets the optional usage.
*
* @param usage the usage
* @return this builder
*/
public Builder optUsage(Usage usage) {
this.usage = usage;
return self();
}
/**
* Sets the optional repository.
*
* @param repository the repository
* @return this builder
*/
public Builder optRepository(Repository repository) {
this.optRepository = repository;
return self();
}
/**
* Sets optional groupId.
*
* @param groupId the groupId}
* @return this builder
*/
public Builder optGroupId(String groupId) {
this.groupId = groupId;
return this;
}
/**
* Sets the optional artifactId.
*
* @param artifactId the artifactId
* @return this builder
*/
public Builder optArtifactId(String artifactId) {
if (artifactId.contains(":")) {
String[] tokens = artifactId.split(":");
groupId = tokens[0];
this.artifactId = tokens[1];
} else {
this.artifactId = artifactId;
}
return this;
}
/**
* Builds the {@link FruitsFreshAndRotten}.
*
* @return the {@link FruitsFreshAndRotten}
* @throws IOException if there is an issue
*/
public FruitsFreshAndRotten build() throws IOException {
if (pipeline == null) {
pipeline = new Pipeline(new ToTensor());
}
if (optRepository != null) {
repository = optRepository;
} else {
MRL mrl = getMrl();
Artifact artifact = mrl.getDefaultArtifact();
// Downloading the cache happens here
mrl.prepare(artifact, null);
Artifact.Item item;
switch (usage) {
case TRAIN:
item = artifact.getFiles().get("train");
break;
case TEST:
item = artifact.getFiles().get("test");
break;
case VALIDATION:
default:
throw new IOException("Only training and testing dataset supported.");
}
Path root = mrl.getRepository().getFile(item, "").toAbsolutePath();
// set repository
repository = Repository.newInstance("banana", root);
}
return new FruitsFreshAndRotten(this);
}
MRL getMrl() {
return repository.dataset(Application.CV.ANY, groupId, artifactId, VERSION);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv/classification/ImageClassificationDataset.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.cv.classification;
import ai.djl.basicdataset.cv.ImageDataset;
import ai.djl.modality.cv.transform.Resize;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.modality.cv.translator.ImageClassificationTranslator;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.training.dataset.RandomAccessDataset;
import ai.djl.training.dataset.Record;
import ai.djl.translate.Pipeline;
import ai.djl.translate.TranslatorOptions;
import java.io.IOException;
import java.util.List;
import java.util.Optional;
/**
* A helper to create {@link ai.djl.training.dataset.Dataset}s for {@link
* ai.djl.Application.CV#IMAGE_CLASSIFICATION}.
*/
public abstract class ImageClassificationDataset extends ImageDataset {
/**
* Creates a new instance of {@link RandomAccessDataset} with the given necessary
* configurations.
*
* @param builder a builder with the necessary configurations
*/
public ImageClassificationDataset(ImageDataset.BaseBuilder<?> builder) {
super(builder);
}
/**
* Returns the class of the data item at the given index.
*
* @param index the index (if the dataset is a list of data items)
* @return the class number or the index into the list of classes of the desired class name
* @throws IOException if the data could not be loaded
*/
protected abstract long getClassNumber(long index) throws IOException;
/** {@inheritDoc} */
@Override
public Record get(NDManager manager, long index) throws IOException {
NDList data = new NDList(getRecordImage(manager, index));
NDList label = new NDList(manager.create(getClassNumber(index)));
return new Record(data, label);
}
/** {@inheritDoc} */
@Override
public TranslatorOptions matchingTranslatorOptions() {
Pipeline pipeline = new Pipeline();
// Resize the image if the image size is fixed
Optional<Integer> width = getImageWidth();
Optional<Integer> height = getImageHeight();
if (width.isPresent() && height.isPresent()) {
pipeline.add(new Resize(width.get(), height.get()));
}
pipeline.add(new ToTensor());
return ImageClassificationTranslator.builder()
.optSynset(getClasses())
.setPipeline(pipeline)
.build()
.getExpansions();
}
/**
* Returns the classes that the images in the dataset are classified into.
*
* @return the classes that the images in the dataset are classified into
*/
public abstract List<String> getClasses();
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv/classification/ImageFolder.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.cv.classification;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.translate.Pipeline;
import ai.djl.util.Progress;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
/**
* A dataset for loading image files stored in a folder structure.
*
* <p>Below is an example directory layout for the image folder:
*
* <pre>
* The image folder should be structured as follows:
* root/shoes/Aerobic Shoes1.png
* root/shoes/Aerobic Shose2.png
* ...
* root/boots/Black Boots.png
* root/boots/White Boots.png
* ...
* root/pumps/Red Pumps.png
* root/pumps/Pink Pumps.png
* ...
*
* here shoes, boots, pumps are your labels
* </pre>
*
* <p>Here, the dataset will take the folder names (shoes, boots, bumps) in sorted order as your
* labels. Nested folder structures are not currently supported.
*
* <p>Then, you can create your instance of the dataset as follows:
*
* <pre>
* // set the image folder path
* Repository repository = Repository.newInstance("folder", Paths.get("/path/to/imagefolder/root");
* ImageFolder dataset =
* ImageFolder.builder()
* .setRepository(repository)
* .addTransform(new Resize(100, 100)) // Use image transforms as necessary for your data
* .addTransform(new ToTensor()) // Usually required as the last transform to convert images to tensors
* .setSampling(batchSize, true)
* .build();
*
* // call prepare before using
* dataset.prepare();
*
* // to get the synset or label names
* List>String< synset = dataset.getSynset();
* </pre>
*/
public final class ImageFolder extends AbstractImageFolder {
private ImageFolder(ImageFolderBuilder<?> builder) {
super(builder);
}
/**
* Creates a new builder to build a {@link ImageFolder}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** {@inheritDoc} */
@Override
protected Path getImagePath(String key) {
return Paths.get(key);
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException {
if (!prepared) {
mrl.prepare(null, progress);
loadSynset();
Path root = Paths.get(mrl.getRepository().getBaseUri());
if (progress != null) {
progress.reset("Preparing", 2);
progress.start(0);
listImages(root, synset);
progress.end();
} else {
listImages(root, synset);
}
prepared = true;
}
}
private void loadSynset() {
File root = new File(mrl.getRepository().getBaseUri());
File[] dir = root.listFiles(f -> f.isDirectory() && !f.getName().startsWith("."));
if (dir == null || dir.length == 0) {
throw new IllegalArgumentException(root + " not found or didn't have any file in it");
}
Arrays.sort(dir);
for (File file : dir) {
synset.add(file.getName());
}
}
/** A builder for the {@link ImageFolder}. */
public static final class Builder extends ImageFolderBuilder<Builder> {
Builder() {}
/** {@inheritDoc} */
@Override
protected Builder self() {
return this;
}
/**
* Builds the {@link ImageFolder}.
*
* @return the {@link ImageFolder}
*/
public ImageFolder build() {
if (pipeline == null) {
pipeline = new Pipeline(new ToTensor());
}
return new ImageFolder(this);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv/classification/ImageNet.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.cv.classification;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.training.dataset.Dataset;
import ai.djl.translate.Pipeline;
import ai.djl.util.ClassLoaderUtils;
import ai.djl.util.JsonUtils;
import ai.djl.util.Progress;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
/**
* ImageNet is an image classification dataset from http://image-net.org 2012 Classification
* dataset.
*
* <p>Each image might have different {@link ai.djl.ndarray.types.Shape}s.
*/
public class ImageNet extends AbstractImageFolder {
private String[] wordNetIds;
private String[] classNames;
private String[] classFull;
private Path root;
ImageNet(Builder builder) {
super(builder);
String usagePath = getUsagePath(builder.usage);
root = Paths.get(mrl.getRepository().getBaseUri()).resolve(usagePath);
}
/**
* Creates a new builder to build a {@link ImageNet}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/**
* Returns all WordNet ids of this ImageNet dataset.
*
* @return all WordNet ids of this ImageNet dataset
*/
public String[] getWordNetIds() {
return wordNetIds;
}
/**
* Returns all class names of this ImageNet dataset.
*
* @return all class names of this ImageNet dataset
*/
public String[] getClassNames() {
return classNames;
}
/**
* Returns all full class names of this ImageNet dataset.
*
* @return all full class names of this ImageNet dataset
*/
public String[] getClassFull() {
return classFull;
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException {
if (!prepared) {
mrl.prepare(null, progress);
if (progress != null) {
progress.reset("Preparing", 2);
progress.start(0);
listImages(root, Arrays.asList(wordNetIds));
progress.end();
} else {
listImages(root, Arrays.asList(wordNetIds));
}
loadSynset();
prepared = true;
}
}
private void loadSynset() {
ClassLoader cl = ClassLoaderUtils.getContextClassLoader();
try (InputStream classStream = cl.getResourceAsStream("imagenet/classes.json")) {
if (classStream == null) {
throw new AssertionError("Missing imagenet/classes.json in jar resource");
}
Reader reader = new InputStreamReader(classStream, StandardCharsets.UTF_8);
String[][] classes = JsonUtils.GSON.fromJson(reader, String[][].class);
wordNetIds = new String[classes.length];
classNames = new String[classes.length];
classFull = new String[classes.length];
for (int i = 0; i < classes.length; i++) {
wordNetIds[i] = classes[i][0];
classNames[i] = classes[i][1];
classFull[i] = classes[i][2];
synset.add(wordNetIds[i] + ", " + classNames[i] + ", " + classFull[i]);
}
} catch (IOException e) {
throw new AssertionError("Failed to read imagenet/classes.json file.", e);
}
}
private String getUsagePath(Dataset.Usage usage) {
String usagePath;
switch (usage) {
case TRAIN:
usagePath = "train";
return usagePath;
case VALIDATION:
usagePath = "val";
return usagePath;
case TEST:
throw new UnsupportedOperationException("Test data not available.");
default:
throw new UnsupportedOperationException("Data not available.");
}
}
/** {@inheritDoc} */
@Override
protected Path getImagePath(String key) {
return root.resolve(key);
}
/** A builder for a {@link ImageNet}. */
public static class Builder extends ImageFolderBuilder<Builder> {
private Usage usage = Usage.TRAIN;
Builder() {}
/**
* Sets the optional usage.
*
* @param usage the usage
* @return this builder
*/
public Builder optUsage(Usage usage) {
this.usage = usage;
return this;
}
/** {@inheritDoc} */
@Override
public Builder self() {
return this;
}
/**
* Builds the {@link ImageNet}.
*
* @return the {@link ImageNet}
*/
public ImageNet build() {
if (pipeline == null) {
pipeline = new Pipeline(new ToTensor());
}
return new ImageNet(this);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv/classification/Mnist.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.cv.classification;
import ai.djl.Application.CV;
import ai.djl.basicdataset.BasicDatasets;
import ai.djl.engine.Engine;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.training.dataset.ArrayDataset;
import ai.djl.translate.Pipeline;
import ai.djl.util.Progress;
import ai.djl.util.Utils;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.Map;
/**
* MNIST handwritten digits dataset from http://yann.lecun.com/exdb/mnist.
*
* <p>Each sample is a grayscale image (in 3-D NDArray) with shape (28, 28, 1).
*
* <p>It is a common starting dataset because it is small and can train within minutes. However, it
* is an overly easy task that even poor models can still perform very well on. Instead, consider
* {@link FashionMnist} which offers a comparable speed but a more reasonable difficulty task.
*/
public final class Mnist extends ArrayDataset {
private static final String ARTIFACT_ID = "mnist";
private static final String VERSION = "1.0";
public static final int IMAGE_WIDTH = 28;
public static final int IMAGE_HEIGHT = 28;
public static final int NUM_CLASSES = 10;
private NDManager manager;
private Usage usage;
private MRL mrl;
private boolean prepared;
private Mnist(Builder builder) {
super(builder);
this.manager = builder.manager;
this.manager.setName("mnist");
this.usage = builder.usage;
mrl = builder.getMrl();
}
/**
* Creates a builder to build a {@link Mnist}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException {
if (prepared) {
return;
}
Artifact artifact = mrl.getDefaultArtifact();
mrl.prepare(artifact, progress);
Map<String, Artifact.Item> map = artifact.getFiles();
Artifact.Item imageItem;
Artifact.Item labelItem;
switch (usage) {
case TRAIN:
imageItem = map.get("train_data");
labelItem = map.get("train_labels");
break;
case TEST:
imageItem = map.get("test_data");
labelItem = map.get("test_labels");
break;
case VALIDATION:
default:
throw new UnsupportedOperationException("Validation data not available.");
}
labels = new NDArray[] {readLabel(labelItem)};
data = new NDArray[] {readData(imageItem, labels[0].size())};
prepared = true;
}
private NDArray readData(Artifact.Item item, long length) throws IOException {
try (InputStream is = mrl.getRepository().openStream(item, null)) {
if (is.skip(16) != 16) {
throw new AssertionError("Failed skip data.");
}
byte[] buf = Utils.toByteArray(is);
try (NDArray array =
manager.create(
ByteBuffer.wrap(buf), new Shape(length, 28, 28, 1), DataType.UINT8)) {
return array.toType(DataType.FLOAT32, false);
}
}
}
private NDArray readLabel(Artifact.Item item) throws IOException {
try (InputStream is = mrl.getRepository().openStream(item, null)) {
if (is.skip(8) != 8) {
throw new AssertionError("Failed skip data.");
}
byte[] buf = Utils.toByteArray(is);
try (NDArray array =
manager.create(ByteBuffer.wrap(buf), new Shape(buf.length), DataType.UINT8)) {
return array.toType(DataType.FLOAT32, false);
}
}
}
/** A builder for a {@link Mnist}. */
public static final class Builder extends BaseBuilder<Builder> {
private NDManager manager;
private Repository repository;
private String groupId;
private String artifactId;
private Usage usage;
/** Constructs a new builder. */
Builder() {
repository = BasicDatasets.REPOSITORY;
groupId = BasicDatasets.GROUP_ID;
artifactId = ARTIFACT_ID;
usage = Usage.TRAIN;
pipeline = new Pipeline(new ToTensor());
manager = Engine.getInstance().newBaseManager();
}
/** {@inheritDoc} */
@Override
protected Builder self() {
return this;
}
/**
* Sets the optional manager for the dataset (default follows engine default).
*
* @param manager the manager
* @return this builder
*/
public Builder optManager(NDManager manager) {
this.manager.close();
this.manager = manager.newSubManager();
return this;
}
/**
* Sets the optional repository.
*
* @param repository the repository
* @return this builder
*/
public Builder optRepository(Repository repository) {
this.repository = repository;
return this;
}
/**
* Sets optional groupId.
*
* @param groupId the groupId}
* @return this builder
*/
public Builder optGroupId(String groupId) {
this.groupId = groupId;
return this;
}
/**
* Sets the optional artifactId.
*
* @param artifactId the artifactId
* @return this builder
*/
public Builder optArtifactId(String artifactId) {
if (artifactId.contains(":")) {
String[] tokens = artifactId.split(":");
groupId = tokens[0];
this.artifactId = tokens[1];
} else {
this.artifactId = artifactId;
}
return this;
}
/**
* Sets the optional usage.
*
* @param usage the usage
* @return this builder
*/
public Builder optUsage(Usage usage) {
this.usage = usage;
return this;
}
/**
* Builds the {@link Mnist}.
*
* @return the {@link Mnist}
*/
public Mnist build() {
return new Mnist(this);
}
MRL getMrl() {
return repository.dataset(CV.ANY, groupId, artifactId, VERSION);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/cv/classification/package-info.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains a library of built-in datasets for {@link ai.djl.Application.CV#IMAGE_CLASSIFICATION}.
*/
package ai.djl.basicdataset.cv.classification;
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/nlp/AmazonReview.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.nlp;
import ai.djl.Application.NLP;
import ai.djl.basicdataset.BasicDatasets;
import ai.djl.basicdataset.tabular.CsvDataset;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.util.Progress;
import org.apache.commons.csv.CSVFormat;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
/**
* The {@link AmazonReview} dataset contains a {@link ai.djl.Application.NLP#SENTIMENT_ANALYSIS} set
* of reviews and their sentiment ratings.
*/
public class AmazonReview extends CsvDataset {
private static final String VERSION = "1.0";
private static final String ARTIFACT_ID = "amazon_reviews";
private MRL mrl;
private String datasetName;
private boolean prepared;
/**
* Creates a new instance of {@link AmazonReview} with the given necessary configurations.
*
* @param builder a builder with the necessary configurations
*/
protected AmazonReview(Builder builder) {
super(builder);
mrl = builder.getMrl();
datasetName = builder.datasetName;
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException {
if (prepared) {
return;
}
Map<String, String> filter = new ConcurrentHashMap<>();
filter.put("dataset", datasetName);
Artifact artifact = Objects.requireNonNull(mrl.match(filter));
mrl.prepare(artifact, progress);
Path dir = mrl.getRepository().getResourceDirectory(artifact);
Path csvFile = dir.resolve(artifact.getFiles().values().iterator().next().getName());
csvUrl = csvFile.toUri().toURL();
super.prepare(progress);
prepared = true;
}
/**
* Creates a new builder to build a {@code AmazonReview}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** A builder to construct a {@code AmazonReview}. */
public static final class Builder extends CsvBuilder<AmazonReview.Builder> {
Repository repository;
String groupId;
String artifactId;
String datasetName;
/** Constructs a new builder. */
Builder() {
repository = BasicDatasets.REPOSITORY;
groupId = BasicDatasets.GROUP_ID;
artifactId = ARTIFACT_ID;
csvFormat = CSVFormat.TDF.builder().setQuote(null).setHeader().get();
datasetName = "us_Digital_Software";
}
/** {@inheritDoc} */
@Override
public Builder self() {
return this;
}
/**
* Sets the optional repository.
*
* @param repository the repository
* @return this builder
*/
public Builder optRepository(Repository repository) {
this.repository = repository;
return this;
}
/**
* Sets optional groupId.
*
* @param groupId the groupId}
* @return this builder
*/
public Builder optGroupId(String groupId) {
this.groupId = groupId;
return this;
}
/**
* Sets the optional artifactId.
*
* @param artifactId the artifactId
* @return this builder
*/
public Builder optArtifactId(String artifactId) {
if (artifactId.contains(":")) {
String[] tokens = artifactId.split(":");
groupId = tokens[0];
this.artifactId = tokens[1];
} else {
this.artifactId = artifactId;
}
return this;
}
/**
* Sets the name of the subset of Amazon Reviews.
*
* @param datasetName the name of the dataset
* @return this builder
*/
public Builder optDatasetName(String datasetName) {
this.datasetName = datasetName;
return this;
}
/** {@inheritDoc} */
@Override
public AmazonReview build() {
if (features.isEmpty()) {
throw new IllegalStateException("Missing features.");
}
if (labels.isEmpty()) {
addNumericLabel("star_rating");
}
return new AmazonReview(this);
}
MRL getMrl() {
return repository.dataset(NLP.ANY, groupId, artifactId, VERSION);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/nlp/CookingStackExchange.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.nlp;
import ai.djl.Application.NLP;
import ai.djl.basicdataset.BasicDatasets;
import ai.djl.ndarray.NDManager;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.training.dataset.Batch;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.dataset.RawDataset;
import ai.djl.util.Progress;
import java.io.IOException;
import java.nio.file.Path;
/**
* A text classification dataset contains questions from cooking.stackexchange.com and their
* associated tags on the site.
*/
public class CookingStackExchange implements RawDataset<Path> {
private static final String ARTIFACT_ID = "cooking_stackexchange";
private static final String VERSION = "1.0";
private Dataset.Usage usage;
private Path root;
private MRL mrl;
private boolean prepared;
CookingStackExchange(Builder builder) {
this.usage = builder.usage;
mrl = builder.getMrl();
}
/** {@inheritDoc} */
@Override
public Path getData() throws IOException {
prepare(null);
return root;
}
/** {@inheritDoc} */
@Override
public Iterable<Batch> getData(NDManager manager) {
return null;
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException {
if (prepared) {
return;
}
Artifact artifact = mrl.getDefaultArtifact();
mrl.prepare(artifact, progress);
Artifact.Item item;
switch (usage) {
case TRAIN:
item = artifact.getFiles().get("train");
break;
case TEST:
item = artifact.getFiles().get("test");
break;
case VALIDATION:
default:
throw new IOException("Only training and testing dataset supported.");
}
root = mrl.getRepository().getFile(item, "").toAbsolutePath();
prepared = true;
}
/**
* Creates a builder to build a {@code CookingStackExchange}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** A builder to construct a {@link CookingStackExchange}. */
public static final class Builder {
Repository repository;
String groupId;
String artifactId;
Dataset.Usage usage;
/** Constructs a new builder. */
Builder() {
repository = BasicDatasets.REPOSITORY;
groupId = BasicDatasets.GROUP_ID;
artifactId = ARTIFACT_ID;
usage = Dataset.Usage.TRAIN;
}
/**
* Sets the optional repository for the dataset.
*
* @param repository the new repository
* @return this builder
*/
public Builder optRepository(Repository repository) {
this.repository = repository;
return this;
}
/**
* Sets optional groupId.
*
* @param groupId the groupId}
* @return this builder
*/
public Builder optGroupId(String groupId) {
this.groupId = groupId;
return this;
}
/**
* Sets the optional artifactId.
*
* @param artifactId the artifactId
* @return this builder
*/
public Builder optArtifactId(String artifactId) {
if (artifactId.contains(":")) {
String[] tokens = artifactId.split(":");
groupId = tokens[0];
this.artifactId = tokens[1];
} else {
this.artifactId = artifactId;
}
return this;
}
/**
* Sets the optional usage for the dataset.
*
* @param usage the usage
* @return this builder
*/
public Builder optUsage(Dataset.Usage usage) {
this.usage = usage;
return this;
}
/**
* Builds a new {@code CookingStackExchange}.
*
* @return the new {@code CookingStackExchange}
*/
public CookingStackExchange build() {
return new CookingStackExchange(this);
}
MRL getMrl() {
return repository.dataset(NLP.ANY, groupId, artifactId, VERSION);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/nlp/GoEmotions.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.nlp;
import ai.djl.Application;
import ai.djl.modality.nlp.embedding.EmbeddingException;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.training.dataset.Record;
import ai.djl.util.Progress;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
/**
* GoEmotions is a corpus of 58k carefully curated comments extracted from Reddit, with human
* annotations to 27 emotion categories or Neutral. This version of data is filtered based on
* rater-agreement on top of the raw data, and contains a train/test/validation split. The emotion
* categories are: admiration, amusement, anger, annoyance, approval, caring, confusion, curiosity,
* desire, disappointment, disapproval, disgust, embarrassment, excitement, fear, gratitude, grief,
* joy, love, nervousness, optimism, pride, realization, relief, remorse, sadness, surprise.
*/
public class GoEmotions extends TextDataset {
private static final String ARTIFACT_ID = "goemotions";
private static final String VERSION = "1.0";
List<int[]> targetData = new ArrayList<>();
enum HeaderEnum {
text,
emotion_id,
comment_id
}
/**
* Creates a new instance of {@link GoEmotions}.
*
* @param builder the builder object to build from
*/
GoEmotions(Builder builder) {
super(builder);
this.usage = builder.usage;
mrl = builder.getMrl();
}
/**
* Prepares the dataset for use with tracked progress. In this method the TSV file will be
* parsed. All datasets will be preprocessed.
*
* @param progress the progress tracker
* @throws IOException for various exceptions depending on the dataset
*/
@Override
public void prepare(Progress progress) throws IOException, EmbeddingException {
if (prepared) {
return;
}
Artifact artifact = mrl.getDefaultArtifact();
mrl.prepare(artifact, progress);
Path root = mrl.getRepository().getResourceDirectory(artifact);
Path csvFile;
switch (usage) {
case TRAIN:
csvFile = root.resolve("train.tsv");
break;
case TEST:
csvFile = root.resolve("test.tsv");
break;
case VALIDATION:
csvFile = root.resolve("dev.tsv");
break;
default:
throw new UnsupportedOperationException("Data not available.");
}
CSVFormat csvFormat =
CSVFormat.TDF.builder().setQuote(null).setHeader(HeaderEnum.class).get();
URL csvUrl = csvFile.toUri().toURL();
List<CSVRecord> csvRecords;
List<String> sourceTextData = new ArrayList<>();
try (Reader reader =
new InputStreamReader(
new BufferedInputStream(csvUrl.openStream()), StandardCharsets.UTF_8)) {
CSVParser csvParser = CSVParser.parse(reader, csvFormat);
csvRecords = csvParser.getRecords();
}
for (CSVRecord csvRecord : csvRecords) {
sourceTextData.add(csvRecord.get(0));
String[] labels = csvRecord.get(1).split(",");
int[] labelInt = new int[labels.length];
for (int i = 0; i < labels.length; i++) {
labelInt[i] = Integer.parseInt(labels[i]);
}
targetData.add(labelInt);
}
preprocess(sourceTextData, true);
prepared = true;
}
/**
* Gets the {@link Record} for the given index from the dataset.
*
* @param manager the manager used to create the arrays
* @param index the index of the requested data item
* @return a {@link Record} that contains the data and label of the requested data item. The
* data {@link NDList} contains three {@link NDArray}s representing the embedded title,
* context and question, which are named accordingly. The label {@link NDList} contains
* multiple {@link NDArray}s corresponding to each embedded answer.
*/
@Override
public Record get(NDManager manager, long index) throws IOException {
NDList data = new NDList();
NDList labels = new NDList();
data.add(sourceTextData.getEmbedding(manager, index));
labels.add(manager.create(targetData.get((int) index)));
return new Record(data, labels);
}
/**
* Returns the number of records available to be read in this {@code Dataset}. In this
* implementation, the actual size of available records are the size of {@code
* questionInfoList}.
*
* @return the number of records available to be read in this {@code Dataset}
*/
@Override
protected long availableSize() {
return sourceTextData.getSize();
}
/**
* Creates a builder to build a {@link GoEmotions}.
*
* @return a new builder
*/
public static GoEmotions.Builder builder() {
return new GoEmotions.Builder();
}
/** A builder to construct a {@link GoEmotions}. */
public static final class Builder extends TextDataset.Builder<GoEmotions.Builder> {
/** Constructs a new builder. */
public Builder() {
artifactId = ARTIFACT_ID;
}
/** {@inheritDoc} */
@Override
public GoEmotions.Builder self() {
return this;
}
/**
* Builds the {@link TatoebaEnglishFrenchDataset}.
*
* @return the {@link TatoebaEnglishFrenchDataset}
*/
public GoEmotions build() {
return new GoEmotions(this);
}
MRL getMrl() {
return repository.dataset(Application.NLP.ANY, groupId, artifactId, VERSION);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/nlp/PennTreebankText.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.nlp;
import ai.djl.Application;
import ai.djl.basicdataset.BasicDatasets;
import ai.djl.modality.nlp.embedding.EmbeddingException;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.dataset.Record;
import ai.djl.util.Progress;
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
/**
* The Penn Treebank (PTB) project selected 2,499 stories from a three year Wall Street Journal
* (WSJ) collection of 98,732 stories for syntactic annotation (see <a
* href="https://catalog.ldc.upenn.edu/docs/LDC95T7/cl93.html">here</a> for details).
*/
public class PennTreebankText extends TextDataset {
private static final String VERSION = "1.0";
private static final String ARTIFACT_ID = "penntreebank-unlabeled-processed";
/**
* Creates a new instance of {@link PennTreebankText} with the given necessary configurations.
*
* @param builder a builder with the necessary configurations
*/
PennTreebankText(Builder builder) {
super(builder);
this.usage = builder.usage;
mrl = builder.getMrl();
}
/**
* Creates a builder to build a {@link PennTreebankText}.
*
* @return a new {@link PennTreebankText.Builder} object
*/
public static Builder builder() {
return new Builder();
}
/** {@inheritDoc} */
@Override
public Record get(NDManager manager, long index) throws IOException {
NDList data = new NDList();
NDList labels = null;
data.add(sourceTextData.getEmbedding(manager, index));
return new Record(data, labels);
}
/** {@inheritDoc} */
@Override
protected long availableSize() {
return sourceTextData.getSize();
}
/**
* Prepares the dataset for use with tracked progress.
*
* @param progress the progress tracker
* @throws IOException for various exceptions depending on the dataset
*/
@Override
public void prepare(Progress progress) throws IOException, EmbeddingException {
if (prepared) {
return;
}
Artifact artifact = mrl.getDefaultArtifact();
mrl.prepare(artifact, progress);
Artifact.Item item;
switch (usage) {
case TRAIN:
item = artifact.getFiles().get("train");
break;
case TEST:
item = artifact.getFiles().get("test");
break;
case VALIDATION:
item = artifact.getFiles().get("valid");
break;
default:
throw new UnsupportedOperationException("Unsupported usage type.");
}
Path path = mrl.getRepository().getFile(item, "").toAbsolutePath();
List<String> lineArray = new ArrayList<>();
try (BufferedReader reader = Files.newBufferedReader(path)) {
String row;
while ((row = reader.readLine()) != null) {
lineArray.add(row);
}
}
preprocess(lineArray, true);
prepared = true;
}
/** A builder to construct a {@link PennTreebankText} . */
public static class Builder extends TextDataset.Builder<Builder> {
/** Constructs a new builder. */
public Builder() {
repository = BasicDatasets.REPOSITORY;
groupId = BasicDatasets.GROUP_ID;
artifactId = ARTIFACT_ID;
usage = Dataset.Usage.TRAIN;
}
/**
* Builds a new {@link PennTreebankText} object.
*
* @return the new {@link PennTreebankText} object
*/
public PennTreebankText build() {
return new PennTreebankText(this);
}
MRL getMrl() {
return repository.dataset(Application.NLP.ANY, groupId, artifactId, VERSION);
}
/** {@inheritDoc} */
@Override
protected Builder self() {
return this;
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/nlp/StanfordMovieReview.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.nlp;
import ai.djl.Application.NLP;
import ai.djl.modality.nlp.embedding.EmbeddingException;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.training.dataset.Record;
import ai.djl.util.Progress;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
/**
* The {@link StanfordMovieReview} dataset contains a {@link
* ai.djl.Application.NLP#SENTIMENT_ANALYSIS} set of movie reviews and their sentiment ratings.
*
* <p>The data is sourced from reviews located on IMDB (see <a
* href="https://ai.stanford.edu/~amaas/data/sentiment/">here</a> for details).
*/
public class StanfordMovieReview extends TextDataset {
private static final String VERSION = "1.0";
private static final String ARTIFACT_ID = "stanford-movie-review";
private List<Boolean> reviewSentiments;
private List<Integer> reviewImdbScore;
/**
* Creates a new instance of {@link StanfordMovieReview} with the given necessary
* configurations.
*
* @param builder a builder with the necessary configurations
*/
protected StanfordMovieReview(Builder builder) {
super(builder);
this.usage = builder.usage;
mrl = builder.getMrl();
}
/**
* Creates a new builder to build a {@link StanfordMovieReview}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException, EmbeddingException {
if (prepared) {
return;
}
Artifact artifact = mrl.getDefaultArtifact();
mrl.prepare(artifact, progress);
Path cacheDir = mrl.getRepository().getCacheDirectory();
URI resourceUri = artifact.getResourceUri();
Path root = cacheDir.resolve(resourceUri.getPath()).resolve("aclImdb").resolve("aclImdb");
Path usagePath;
switch (usage) {
case TRAIN:
usagePath = Paths.get("train");
break;
case TEST:
usagePath = Paths.get("test");
break;
case VALIDATION:
default:
throw new UnsupportedOperationException("Validation data not available.");
}
usagePath = root.resolve(usagePath);
List<String> reviewTexts = new ArrayList<>();
reviewSentiments = new ArrayList<>();
reviewImdbScore = new ArrayList<>();
prepareDataSentiment(usagePath.resolve("pos"), true, reviewTexts);
prepareDataSentiment(usagePath.resolve("neg"), false, reviewTexts);
preprocess(reviewTexts, true);
prepared = true;
}
private void prepareDataSentiment(Path path, boolean sentiment, List<String> reviewTexts)
throws IOException {
File dir = path.toFile();
if (!dir.exists()) {
throw new IllegalArgumentException("Could not find Stanford Movie Review dataset");
}
File[] files = dir.listFiles(File::isFile);
if (files == null) {
throw new IllegalArgumentException(
"Could not find files in Stanford Movie Review dataset");
}
for (File reviewFile : files) {
Path reviewPath = reviewFile.toPath();
String reviewText = new String(Files.readAllBytes(reviewPath), StandardCharsets.UTF_8);
String[] splitName = reviewFile.getName().split("\\.")[0].split("_");
reviewTexts.add(reviewText);
reviewSentiments.add(sentiment);
reviewImdbScore.add(Integer.parseInt(splitName[1]));
}
}
/** {@inheritDoc} */
@Override
public Record get(NDManager manager, long index) {
NDList data = new NDList();
data.add(sourceTextData.getEmbedding(manager, index));
NDList label =
new NDList(
manager.create(reviewSentiments.get(Math.toIntExact(index)))
.toType(DataType.INT32, false));
return new Record(data, label);
}
/** {@inheritDoc} */
@Override
protected long availableSize() {
return sourceTextData.getSize();
}
/** A builder for a {@link StanfordMovieReview}. */
public static class Builder extends TextDataset.Builder<Builder> {
/** Constructs a new builder. */
public Builder() {
artifactId = ARTIFACT_ID;
}
/** {@inheritDoc} */
@Override
protected Builder self() {
return this;
}
/**
* Builds the {@link StanfordMovieReview}.
*
* @return the {@link StanfordMovieReview}
*/
public StanfordMovieReview build() {
return new StanfordMovieReview(this);
}
MRL getMrl() {
return repository.dataset(NLP.ANY, groupId, artifactId, VERSION);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/nlp/StanfordQuestionAnsweringDataset.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.nlp;
import ai.djl.Application.NLP;
import ai.djl.basicdataset.utils.TextData;
import ai.djl.modality.nlp.embedding.EmbeddingException;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.training.dataset.RawDataset;
import ai.djl.training.dataset.Record;
import ai.djl.util.JsonUtils;
import ai.djl.util.Progress;
import com.google.gson.reflect.TypeToken;
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Stanford Question Answering Dataset (SQuAD) is a reading comprehension dataset, consisting of
* questions posed by crowdworkers on a set of Wikipedia articles, where the answer to every
* question is a segment of text, or span, from the corresponding reading passage, or the question
* might be unanswerable.
*
* @see <a href="https://rajpurkar.github.io/SQuAD-explorer/">Dataset website</a>
*/
@SuppressWarnings("unchecked")
public class StanfordQuestionAnsweringDataset extends TextDataset implements RawDataset<Object> {
private static final String VERSION = "2.0";
private static final String ARTIFACT_ID = "stanford-question-answer";
/**
* Store the information of each question, so that when function {@code get()} is called, we can
* find the question corresponding to the index.
*/
private List<QuestionInfo> questionInfoList;
/**
* Creates a new instance of {@link StanfordQuestionAnsweringDataset}.
*
* @param builder the builder object to build from
*/
protected StanfordQuestionAnsweringDataset(Builder builder) {
super(builder);
this.usage = builder.usage;
mrl = builder.getMrl();
}
/**
* Creates a new builder to build a {@link StanfordQuestionAnsweringDataset}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
private Path prepareUsagePath(Progress progress) throws IOException {
Artifact artifact = mrl.getDefaultArtifact();
mrl.prepare(artifact, progress);
Path root = mrl.getRepository().getResourceDirectory(artifact);
Path usagePath;
switch (usage) {
case TRAIN:
usagePath = Paths.get("train-v2.0.json");
break;
case TEST:
usagePath = Paths.get("dev-v2.0.json");
break;
case VALIDATION:
default:
throw new UnsupportedOperationException("Validation data not available.");
}
return root.resolve(usagePath);
}
/**
* Prepares the dataset for use with tracked progress. In this method the JSON file will be
* parsed. The question, context, title will be added to {@code sourceTextData} and the answers
* will be added to {@code targetTextData}. Both of them will then be preprocessed.
*
* @param progress the progress tracker
* @throws IOException for various exceptions depending on the dataset
* @throws EmbeddingException if there are exceptions during the embedding process
*/
@Override
public void prepare(Progress progress) throws IOException, EmbeddingException {
if (prepared) {
return;
}
Path usagePath = prepareUsagePath(progress);
Map<String, Object> data;
try (BufferedReader reader = Files.newBufferedReader(usagePath)) {
data =
JsonUtils.GSON.fromJson(
reader, new TypeToken<Map<String, Object>>() {}.getType());
}
List<Map<String, Object>> articles = (List<Map<String, Object>>) data.get("data");
questionInfoList = new ArrayList<>();
List<String> sourceTextData = new ArrayList<>();
List<String> targetTextData = new ArrayList<>();
// a nested loop to handle the nested json object
List<Map<String, Object>> paragraphs;
List<Map<String, Object>> questions;
List<Map<String, Object>> answers;
int titleIndex;
int contextIndex;
int questionIndex;
int answerIndex;
QuestionInfo questionInfo;
for (Map<String, Object> article : articles) {
titleIndex = sourceTextData.size();
sourceTextData.add(article.get("title").toString());
// iterate through the paragraphs
paragraphs = (List<Map<String, Object>>) article.get("paragraphs");
for (Map<String, Object> paragraph : paragraphs) {
contextIndex = sourceTextData.size();
sourceTextData.add(paragraph.get("context").toString());
// iterate through the questions
questions = (List<Map<String, Object>>) paragraph.get("qas");
for (Map<String, Object> question : questions) {
questionIndex = sourceTextData.size();
sourceTextData.add(question.get("question").toString());
questionInfo = new QuestionInfo(questionIndex, titleIndex, contextIndex);
questionInfoList.add(questionInfo);
// iterate through the answers
answers = (List<Map<String, Object>>) question.get("answers");
for (Map<String, Object> answer : answers) {
answerIndex = targetTextData.size();
targetTextData.add(answer.get("text").toString());
questionInfo.addAnswer(answerIndex);
}
}
}
}
preprocess(sourceTextData, true);
preprocess(targetTextData, false);
prepared = true;
}
/**
* Gets the {@link Record} for the given index from the dataset.
*
* @param manager the manager used to create the arrays
* @param index the index of the requested data item
* @return a {@link Record} that contains the data and label of the requested data item. The
* data {@link NDList} contains three {@link NDArray}s representing the embedded title,
* context and question, which are named accordingly. The label {@link NDList} contains
* multiple {@link NDArray}s corresponding to each embedded answer.
*/
@Override
public Record get(NDManager manager, long index) {
NDList data = new NDList();
NDList labels = new NDList();
QuestionInfo questionInfo = questionInfoList.get(Math.toIntExact(index));
NDArray title = sourceTextData.getEmbedding(manager, questionInfo.titleIndex);
title.setName("title");
NDArray context = sourceTextData.getEmbedding(manager, questionInfo.contextIndex);
context.setName("context");
NDArray question = sourceTextData.getEmbedding(manager, questionInfo.questionIndex);
question.setName("question");
data.add(title);
data.add(context);
data.add(question);
for (Integer answerIndex : questionInfo.answerIndexList) {
labels.add(targetTextData.getEmbedding(manager, answerIndex));
}
return new Record(data, labels);
}
/**
* Returns the number of records available to be read in this {@code Dataset}. In this
* implementation, the actual size of available records are the size of {@code
* questionInfoList}.
*
* @return the number of records available to be read in this {@code Dataset}
*/
@Override
protected long availableSize() {
return questionInfoList.size();
}
/**
* Get data from the SQuAD dataset. This method will directly return the whole dataset as an
* object
*
* @return an object of {@link Object} class in the structure of JSON, e.g. {@code Map<String,
* List<Map<...>>>}
*/
@Override
public Object getData() throws IOException {
Path usagePath = prepareUsagePath(null);
Object data;
try (BufferedReader reader = Files.newBufferedReader(usagePath)) {
data = JsonUtils.GSON.fromJson(reader, new TypeToken<Object>() {}.getType());
}
return data;
}
/**
* Since a question might have no answer, we need extra logic to find the last index of the
* answer in the {@code TargetTextData}. There are not many consecutive questions without
* answer, so this logic will not cause a high cost.
*
* @param questionInfoIndex the last index of the record in {@code questionInfoList} that needs
* to be preprocessed
* @return the last index of the answer in {@code TargetTextData} that needs to be preprocessed
*/
private int getLastAnswerIndex(int questionInfoIndex) {
// Go backwards through the questionInfoList until it finds one with an answer
for (; questionInfoIndex >= 0; questionInfoIndex--) {
QuestionInfo questionInfo = questionInfoList.get(questionInfoIndex);
if (!questionInfo.answerIndexList.isEmpty()) {
return questionInfo.answerIndexList.get(questionInfo.answerIndexList.size() - 1);
}
}
// Could not find a QuestionInfo with an answer
return 0;
}
/**
* Performs pre-processing steps on text data such as tokenising, applying {@link
* ai.djl.modality.nlp.preprocess.TextProcessor}s, creating vocabulary, and word embeddings.
* Since the record number in this dataset is not equivalent to the length of {@code
* sourceTextData} and {@code targetTextData}, the limit should be processed.
*
* @param newTextData list of all unprocessed sentences in the dataset
* @param source whether the text data provided is source or target
* @throws EmbeddingException if there is an error while embedding input
*/
@Override
protected void preprocess(List<String> newTextData, boolean source) throws EmbeddingException {
TextData textData = source ? sourceTextData : targetTextData;
int index = (int) Math.min(limit, questionInfoList.size()) - 1;
int lastIndex =
source ? questionInfoList.get(index).questionIndex : getLastAnswerIndex(index);
textData.preprocess(manager, newTextData.subList(0, lastIndex + 1));
}
/** A builder for a {@link StanfordQuestionAnsweringDataset}. */
public static class Builder extends TextDataset.Builder<Builder> {
/** Constructs a new builder. */
public Builder() {
artifactId = ARTIFACT_ID;
}
/**
* Returns this {@link Builder} object.
*
* @return this {@code BaseBuilder}
*/
@Override
public Builder self() {
return this;
}
/**
* Builds the {@link StanfordQuestionAnsweringDataset}.
*
* @return the {@link StanfordQuestionAnsweringDataset}
*/
public StanfordQuestionAnsweringDataset build() {
return new StanfordQuestionAnsweringDataset(this);
}
MRL getMrl() {
return repository.dataset(NLP.ANY, groupId, artifactId, VERSION);
}
}
/**
* This class stores the information of one question. {@code sourceTextData} stores not only the
* questions, but also the titles and the contexts, and {@code targetTextData} stores right
* answers and plausible answers. Also, there are some mapping relationships between questions
* and the other entries, so we need this class to help us assemble the right record.
*/
private static class QuestionInfo {
Integer questionIndex;
Integer titleIndex;
Integer contextIndex;
List<Integer> answerIndexList;
QuestionInfo(Integer questionIndex, Integer titleIndex, Integer contextIndex) {
this.questionIndex = questionIndex;
this.titleIndex = titleIndex;
this.contextIndex = contextIndex;
this.answerIndexList = new ArrayList<>();
}
void addAnswer(Integer answerIndex) {
this.answerIndexList.add(answerIndex);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/nlp/TatoebaEnglishFrenchDataset.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.nlp;
import ai.djl.Application.NLP;
import ai.djl.modality.nlp.embedding.EmbeddingException;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.training.dataset.Record;
import ai.djl.util.Progress;
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
/**
* {@code TatoebaEnglishFrenchDataset} is a English-French machine translation dataset from The
* Tatoeba Project (http://www.manythings.org/anki/).
*/
public class TatoebaEnglishFrenchDataset extends TextDataset {
private static final String VERSION = "1.0";
private static final String ARTIFACT_ID = "tatoeba-en-fr";
/**
* Creates a new instance of {@code TatoebaEnglishFrenchDataset}.
*
* @param builder the builder object to build from
*/
protected TatoebaEnglishFrenchDataset(Builder builder) {
super(builder);
this.usage = builder.usage;
mrl = builder.getMrl();
}
/**
* Creates a new builder to build a {@link TatoebaEnglishFrenchDataset}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException, EmbeddingException {
if (prepared) {
return;
}
Artifact artifact = mrl.getDefaultArtifact();
mrl.prepare(artifact, progress);
Path root = mrl.getRepository().getResourceDirectory(artifact);
Path usagePath;
switch (usage) {
case TRAIN:
usagePath = Paths.get("fra-eng-train.txt");
break;
case TEST:
usagePath = Paths.get("fra-eng-test.txt");
break;
case VALIDATION:
default:
throw new UnsupportedOperationException("Validation data not available.");
}
usagePath = root.resolve(usagePath);
List<String> sourceTextData = new ArrayList<>();
List<String> targetTextData = new ArrayList<>();
try (BufferedReader reader = Files.newBufferedReader(usagePath)) {
String row;
while ((row = reader.readLine()) != null) {
String[] text = row.split("\t");
sourceTextData.add(text[0]);
targetTextData.add(text[1]);
}
}
preprocess(sourceTextData, true);
preprocess(targetTextData, false);
prepared = true;
}
/** {@inheritDoc} */
@Override
public Record get(NDManager manager, long index) {
NDList data = new NDList();
NDList labels = new NDList();
data.add(sourceTextData.getEmbedding(manager, index));
labels.add(targetTextData.getEmbedding(manager, index));
return new Record(data, labels);
}
/** {@inheritDoc} */
@Override
protected long availableSize() {
return sourceTextData.getSize();
}
/** A builder for a {@link TatoebaEnglishFrenchDataset}. */
public static class Builder extends TextDataset.Builder<Builder> {
/** Constructs a new builder. */
public Builder() {
artifactId = ARTIFACT_ID;
}
/** {@inheritDoc} */
@Override
public Builder self() {
return this;
}
/**
* Builds the {@link TatoebaEnglishFrenchDataset}.
*
* @return the {@link TatoebaEnglishFrenchDataset}
*/
public TatoebaEnglishFrenchDataset build() {
return new TatoebaEnglishFrenchDataset(this);
}
MRL getMrl() {
return repository.dataset(NLP.ANY, groupId, artifactId, VERSION);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/nlp/TextDataset.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.nlp;
import ai.djl.basicdataset.BasicDatasets;
import ai.djl.basicdataset.utils.TextData;
import ai.djl.basicdataset.utils.TextData.Configuration;
import ai.djl.engine.Engine;
import ai.djl.modality.nlp.DefaultVocabulary;
import ai.djl.modality.nlp.Vocabulary;
import ai.djl.modality.nlp.embedding.EmbeddingException;
import ai.djl.modality.nlp.embedding.TextEmbedding;
import ai.djl.modality.nlp.embedding.TrainableWordEmbedding;
import ai.djl.ndarray.NDManager;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.training.dataset.RandomAccessDataset;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
/**
* {@code TextDataset} is an abstract dataset that can be used for datasets for natural language
* processing where either the source or target are text-based data.
*
* <p>The {@code TextDataset} fetches the data in the form of {@link String}, processes the data as
* required, and creates embeddings for the tokens. Embeddings can be either pre-trained or trained
* on the go. Pre-trained {@link TextEmbedding} must be set in the {@link Builder}. If no embeddings
* are set, the dataset creates {@link TrainableWordEmbedding} based {@link TrainableWordEmbedding}
* from the {@link Vocabulary} created within the dataset.
*/
public abstract class TextDataset extends RandomAccessDataset {
protected TextData sourceTextData;
protected TextData targetTextData;
protected NDManager manager;
protected Usage usage;
protected MRL mrl;
protected boolean prepared;
protected List<Sample> samples;
/**
* Creates a new instance of {@link RandomAccessDataset} with the given necessary
* configurations.
*
* @param builder a builder with the necessary configurations
*/
public TextDataset(Builder<?> builder) {
super(builder);
sourceTextData =
new TextData(
TextData.getDefaultConfiguration().update(builder.sourceConfiguration));
targetTextData =
new TextData(
TextData.getDefaultConfiguration().update(builder.targetConfiguration));
manager = builder.manager;
manager.setName("textDataset");
usage = builder.usage;
}
/**
* Gets the word embedding used while pre-processing the dataset. This method must be called
* after preprocess has been called on this instance.
*
* @param source whether to get source or target text embedding
* @return the text embedding
*/
public TextEmbedding getTextEmbedding(boolean source) {
TextData textData = source ? sourceTextData : targetTextData;
return textData.getTextEmbedding();
}
/**
* Gets the {@link DefaultVocabulary} built while preprocessing the text data.
*
* @param source whether to get source or target vocabulary
* @return the {@link DefaultVocabulary}
*/
public Vocabulary getVocabulary(boolean source) {
TextData textData = source ? sourceTextData : targetTextData;
return textData.getVocabulary();
}
/**
* Gets the raw textual input.
*
* @param index the index of the text input
* @param source whether to get text from source or target
* @return the raw text
*/
public String getRawText(long index, boolean source) {
TextData textData = source ? sourceTextData : targetTextData;
return textData.getRawText(index);
}
/**
* Gets the processed textual input.
*
* @param index the index of the text input
* @param source whether to get text from source or target
* @return the processed text
*/
public List<String> getProcessedText(long index, boolean source) {
TextData textData = source ? sourceTextData : targetTextData;
return textData.getProcessedText(index);
}
/**
* Returns a list of sample information.
*
* @return a list of sample information
*/
public List<Sample> getSamples() {
if (samples == null) {
samples = new ArrayList<>();
for (int i = 0; i < size(); i++) {
List<String> text = getProcessedText(i, true);
samples.add(new Sample(i, text.size()));
}
samples.sort(Comparator.comparingInt(o -> o.sentenceLength));
}
return samples;
}
/**
* Performs pre-processing steps on text data such as tokenising, applying {@link
* ai.djl.modality.nlp.preprocess.TextProcessor}s, creating vocabulary, and word embeddings.
*
* @param newTextData list of all unprocessed sentences in the dataset
* @param source whether the text data provided is source or target
* @throws EmbeddingException if there is an error while embedding input
*/
protected void preprocess(List<String> newTextData, boolean source) throws EmbeddingException {
TextData textData = source ? sourceTextData : targetTextData;
textData.preprocess(
manager, newTextData.subList(0, (int) Math.min(limit, newTextData.size())));
}
/** A class stores {@code TextDataset} sample information. */
public static final class Sample {
private int sentenceLength;
private long index;
/**
* Constructs a new {@code Sample} instance.
*
* @param index the index
* @param sentenceLength the sentence length
*/
public Sample(int index, int sentenceLength) {
this.index = index;
this.sentenceLength = sentenceLength;
}
/**
* Returns the sentence length.
*
* @return the sentence length
*/
public int getSentenceLength() {
return sentenceLength;
}
/**
* Returns the sample index.
*
* @return the sample index
*/
public long getIndex() {
return index;
}
}
/** Abstract Builder that helps build a {@link TextDataset}. */
public abstract static class Builder<T extends Builder<T>> extends BaseBuilder<T> {
TextData.Configuration sourceConfiguration = new Configuration();
TextData.Configuration targetConfiguration = new Configuration();
NDManager manager = Engine.getInstance().newBaseManager();
protected Repository repository;
protected String groupId;
protected String artifactId;
protected Usage usage;
/** Constructs a new builder. */
protected Builder() {
repository = BasicDatasets.REPOSITORY;
groupId = BasicDatasets.GROUP_ID;
usage = Usage.TRAIN;
}
/**
* Sets the {@link TextData.Configuration} to use for the source text data.
*
* @param sourceConfiguration the {@link TextData.Configuration}
* @return this builder
*/
public T setSourceConfiguration(Configuration sourceConfiguration) {
this.sourceConfiguration = sourceConfiguration;
return self();
}
/**
* Sets the {@link TextData.Configuration} to use for the target text data.
*
* @param targetConfiguration the {@link TextData.Configuration}
* @return this builder
*/
public T setTargetConfiguration(Configuration targetConfiguration) {
this.targetConfiguration = targetConfiguration;
return self();
}
/**
* Sets the optional manager for the dataset (default follows engine default).
*
* @param manager the manager
* @return this builder
*/
public T optManager(NDManager manager) {
this.manager = manager.newSubManager();
return self();
}
/**
* Sets the optional usage.
*
* @param usage the usage
* @return this builder
*/
public T optUsage(Usage usage) {
this.usage = usage;
return self();
}
/**
* Sets the optional repository.
*
* @param repository the repository
* @return this builder
*/
public T optRepository(Repository repository) {
this.repository = repository;
return self();
}
/**
* Sets optional groupId.
*
* @param groupId the groupId}
* @return this builder
*/
public T optGroupId(String groupId) {
this.groupId = groupId;
return self();
}
/**
* Sets the optional artifactId.
*
* @param artifactId the artifactId
* @return this builder
*/
public T optArtifactId(String artifactId) {
if (artifactId.contains(":")) {
String[] tokens = artifactId.split(":");
groupId = tokens[0];
this.artifactId = tokens[1];
} else {
this.artifactId = artifactId;
}
return self();
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/nlp/UniversalDependenciesEnglishEWT.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.nlp;
import ai.djl.Application.NLP;
import ai.djl.basicdataset.BasicDatasets;
import ai.djl.modality.nlp.embedding.EmbeddingException;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.training.dataset.Record;
import ai.djl.util.Progress;
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
/**
* A Gold Standard Universal Dependencies Corpus for English, built over the source material of the
* English Web Treebank LDC2012T13.
*
* @see <a href="https://catalog.ldc.upenn.edu/LDC2012T13">English Web Treebank LDC2012T13</a>
*/
public class UniversalDependenciesEnglishEWT extends TextDataset {
private static final String VERSION = "2.0";
private static final String ARTIFACT_ID = "universal-dependencies-en-ewt";
private List<List<Integer>> universalPosTags;
/**
* Creates a new instance of {@code UniversalDependenciesEnglish}.
*
* @param builder the builder object to build from
*/
protected UniversalDependenciesEnglishEWT(Builder builder) {
super(builder);
this.usage = builder.usage;
mrl = builder.getMrl();
}
/**
* Creates a new builder to build a {@link UniversalDependenciesEnglishEWT}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/**
* Prepares the dataset for use with tracked progress. In this method the TXT file will be
* parsed. The texts will be added to {@code sourceTextData} and the Universal POS tags will be
* added to {@code universalPosTags}. Only {@code sourceTextData} will then be preprocessed.
*
* @param progress the progress tracker
* @throws IOException for various exceptions depending on the dataset
* @throws EmbeddingException if there are exceptions during the embedding process
*/
@Override
public void prepare(Progress progress) throws IOException, EmbeddingException {
if (prepared) {
return;
}
Artifact artifact = mrl.getDefaultArtifact();
mrl.prepare(artifact, progress);
Path root = mrl.getRepository().getResourceDirectory(artifact);
Path usagePath = null;
switch (usage) {
case TRAIN:
usagePath = Paths.get("en-ud-v2/en-ud-v2/en-ud-tag.v2.train.txt");
break;
case TEST:
usagePath = Paths.get("en-ud-v2/en-ud-v2/en-ud-tag.v2.test.txt");
break;
case VALIDATION:
usagePath = Paths.get("en-ud-v2/en-ud-v2/en-ud-tag.v2.dev.txt");
break;
default:
break;
}
usagePath = root.resolve(usagePath);
StringBuilder sourceTextDatum = new StringBuilder();
List<String> sourceTextData = new ArrayList<>();
universalPosTags = new ArrayList<>();
List<Integer> universalPosTag = new ArrayList<>();
try (BufferedReader reader = Files.newBufferedReader(usagePath)) {
String row;
while ((row = reader.readLine()) != null) {
if (("").equals(row)) {
sourceTextData.add(sourceTextDatum.toString());
universalPosTags.add(universalPosTag);
sourceTextDatum.delete(0, sourceTextDatum.length());
universalPosTag = new ArrayList<>();
continue;
}
String[] splits = row.split("\t");
if (sourceTextDatum.length() != 0) {
sourceTextDatum.append(' ');
}
sourceTextDatum.append(splits[0]);
universalPosTag.add(UniversalPosTag.valueOf(splits[1]).ordinal());
}
}
preprocess(sourceTextData, true);
prepared = true;
}
/**
* Gets the {@link Record} for the given index from the dataset.
*
* @param manager the manager used to create the arrays
* @param index the index of the requested data item
* @return a {@link Record} that contains the data and label of the requested data item. The
* data {@link NDList} contains one {@link NDArray} representing the text embedding, The
* label {@link NDList} contains one {@link NDArray} including the indices of the Universal
* POS tags of each token. For the index of each Universal POS tag, see the enum class
* {@link UniversalPosTag}.
*/
@Override
public Record get(NDManager manager, long index) {
NDList data = new NDList(sourceTextData.getEmbedding(manager, index));
NDList labels =
new NDList(
manager.create(
universalPosTags.get(Math.toIntExact(index)).stream()
.mapToInt(Integer::intValue)
.toArray())
.toType(DataType.INT32, false));
return new Record(data, labels);
}
/**
* Returns the number of records available to be read in this {@code Dataset}.
*
* @return the number of records available to be read in this {@code Dataset}
*/
@Override
protected long availableSize() {
return sourceTextData.getSize();
}
/** A builder for a {@link UniversalDependenciesEnglishEWT}. */
public static class Builder extends TextDataset.Builder<Builder> {
/** Constructs a new builder. */
public Builder() {
groupId = BasicDatasets.GROUP_ID + ".universal-dependencies";
artifactId = ARTIFACT_ID;
}
/** {@inheritDoc} */
@Override
public Builder self() {
return this;
}
/**
* Builds the {@link UniversalDependenciesEnglishEWT}.
*
* @return the {@link UniversalDependenciesEnglishEWT}
*/
public UniversalDependenciesEnglishEWT build() {
return new UniversalDependenciesEnglishEWT(this);
}
MRL getMrl() {
return repository.dataset(NLP.ANY, groupId, artifactId, VERSION);
}
}
/**
* An enum class for Universal POS tags which mark the core part-of-speech categories.
*
* @see <a href="https://universaldependencies.org/u/pos/">Universal POS tags</a>
*/
enum UniversalPosTag {
ADJ,
ADV,
INTJ,
NOUN,
PROPN,
VERB,
ADP,
AUX,
CCONJ,
DET,
NUM,
PART,
PRON,
SCONJ,
PUNCT,
SYM,
X;
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/nlp/WikiText2.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.nlp;
import ai.djl.Application;
import ai.djl.basicdataset.BasicDatasets;
import ai.djl.ndarray.NDManager;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.training.dataset.Batch;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.dataset.RawDataset;
import ai.djl.translate.TranslateException;
import ai.djl.util.Progress;
import java.io.IOException;
import java.nio.file.Path;
/**
* The WikiText language modeling dataset is a collection of over 100 million tokens extracted from
* the set of verified Good and Featured articles on Wikipedia.
*/
public class WikiText2 implements RawDataset<Path> {
private static final String VERSION = "1.0";
private static final String ARTIFACT_ID = "wikitext-2";
private Dataset.Usage usage;
private Path root;
private MRL mrl;
private boolean prepared;
WikiText2(Builder builder) {
this.usage = builder.usage;
mrl = builder.getMrl();
}
/**
* Creates a builder to build a {@link WikiText2}.
*
* @return a new {@link WikiText2.Builder} object
*/
public static Builder builder() {
return new Builder();
}
/**
* Prepares the dataset for use with tracked progress.
*
* @param progress the progress tracker
* @throws IOException for various exceptions depending on the dataset
*/
@Override
public void prepare(Progress progress) throws IOException {
if (prepared) {
return;
}
Artifact artifact = mrl.getDefaultArtifact();
mrl.prepare(artifact, progress);
Artifact.Item item;
item = artifact.getFiles().get("wikitext-2");
String path;
switch (usage) {
case TRAIN:
path = "wikitext-2/wiki.train.tokens";
break;
case TEST:
path = "wikitext-2/wiki.test.tokens";
break;
case VALIDATION:
path = "wikitext-2/wiki.valid.tokens";
break;
default:
throw new UnsupportedOperationException("Unsupported usage type.");
}
root = mrl.getRepository().getFile(item, path).toAbsolutePath();
prepared = true;
}
/**
* Fetches an iterator that can iterate through the {@link Dataset}. This method is not
* implemented for the WikiText2 dataset because the WikiText2 dataset is not suitable for
* iteration. If the method is called, it will directly return {@code null}.
*
* @param manager the dataset to iterate through
* @return an {@link Iterable} of {@link Batch} that contains batches of data from the dataset
*/
@Override
public Iterable<Batch> getData(NDManager manager) throws IOException, TranslateException {
return null;
}
/**
* Get data from the WikiText2 dataset. This method will directly return the whole dataset.
*
* @return a {@link Path} object locating the WikiText2 dataset file
*/
@Override
public Path getData() throws IOException {
prepare(null);
return root;
}
/** A builder to construct a {@link WikiText2} . */
public static final class Builder {
Repository repository;
String groupId;
String artifactId;
Dataset.Usage usage;
/** Constructs a new builder. */
Builder() {
repository = BasicDatasets.REPOSITORY;
groupId = BasicDatasets.GROUP_ID;
artifactId = ARTIFACT_ID;
usage = Dataset.Usage.TRAIN;
}
/**
* Sets the optional repository for the dataset.
*
* @param repository the new repository
* @return this builder
*/
public Builder optRepository(Repository repository) {
this.repository = repository;
return this;
}
/**
* Sets optional groupId.
*
* @param groupId the groupId
* @return this builder
*/
public Builder optGroupId(String groupId) {
this.groupId = groupId;
return this;
}
/**
* Sets the optional artifactId.
*
* @param artifactId the artifactId
* @return this builder
*/
public Builder optArtifactId(String artifactId) {
if (artifactId.contains(":")) {
String[] tokens = artifactId.split(":");
groupId = tokens[0];
this.artifactId = tokens[1];
} else {
this.artifactId = artifactId;
}
return this;
}
/**
* Sets the optional usage for the dataset.
*
* @param usage the usage
* @return this builder
*/
public Builder optUsage(Dataset.Usage usage) {
this.usage = usage;
return this;
}
/**
* Builds a new {@link WikiText2} object.
*
* @return the new {@link WikiText2} object
*/
public WikiText2 build() {
return new WikiText2(this);
}
MRL getMrl() {
return repository.dataset(Application.NLP.ANY, groupId, artifactId, VERSION);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/nlp/package-info.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains a library of built-in datasets for {@link ai.djl.Application.NLP}. */
package ai.djl.basicdataset.nlp;
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular/AirfoilRandomAccess.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.tabular;
import ai.djl.Application.Tabular;
import ai.djl.basicdataset.BasicDatasets;
import ai.djl.basicdataset.tabular.utils.Feature;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.util.Progress;
import org.apache.commons.csv.CSVFormat;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.List;
/**
* Airfoil Self-Noise Data Set from <a
* href="https://archive.ics.uci.edu/ml/datasets/Airfoil+Self-Noise">https://archive.ics.uci.edu/ml/datasets/Airfoil+Self-Noise</a>.
*
* <p>1503 instances 6 attributes
*/
public final class AirfoilRandomAccess extends CsvDataset {
private static final String ARTIFACT_ID = "airfoil";
private static final String VERSION = "1.0";
private static final String[] COLUMNS = {
"freq", "aoa", "chordlen", "freestreamvel", "ssdt", "ssoundpres"
};
private MRL mrl;
private Usage usage;
private boolean prepared;
/**
* Creates an instance of {@code RandomAccessDataset} with the arguments in {@link Builder}.
*
* @param builder a builder with the required arguments
*/
AirfoilRandomAccess(Builder builder) {
super(builder);
usage = builder.usage;
mrl = builder.getMrl();
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException {
if (prepared) {
return;
}
Artifact artifact = mrl.getDefaultArtifact();
mrl.prepare(artifact);
Path root = mrl.getRepository().getResourceDirectory(artifact);
Path csvFile;
switch (usage) {
case TRAIN:
csvFile = root.resolve("airfoil_self_noise.dat");
break;
case TEST:
throw new UnsupportedOperationException("Test data not available.");
case VALIDATION:
default:
throw new UnsupportedOperationException("Validation data not available.");
}
csvUrl = csvFile.toUri().toURL();
super.prepare(progress);
prepared = true;
}
/** {@inheritDoc} */
@Override
public List<String> getColumnNames() {
return Arrays.asList(COLUMNS).subList(0, 5);
}
/**
* Creates a builder to build a {@link AirfoilRandomAccess}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** A builder to construct a {@link AirfoilRandomAccess}. */
public static final class Builder extends CsvBuilder<Builder> {
Repository repository;
String groupId;
String artifactId;
Usage usage;
boolean normalize;
/** Constructs a new builder. */
Builder() {
repository = BasicDatasets.REPOSITORY;
groupId = BasicDatasets.GROUP_ID;
artifactId = ARTIFACT_ID;
usage = Usage.TRAIN;
csvFormat =
CSVFormat.TDF
.builder()
.setHeader(COLUMNS)
.setIgnoreHeaderCase(true)
.setTrim(true)
.get();
}
/** {@inheritDoc} */
@Override
public Builder self() {
return this;
}
/**
* Sets the optional usage.
*
* @param usage the new usage
* @return this builder
*/
public Builder optUsage(Usage usage) {
this.usage = usage;
return this;
}
/**
* Sets the optional repository.
*
* @param repository the repository
* @return this builder
*/
public Builder optRepository(Repository repository) {
this.repository = repository;
return this;
}
/**
* Sets optional groupId.
*
* @param groupId the groupId}
* @return this builder
*/
public Builder optGroupId(String groupId) {
this.groupId = groupId;
return this;
}
/**
* Sets the optional artifactId.
*
* @param artifactId the artifactId
* @return this builder
*/
public Builder optArtifactId(String artifactId) {
if (artifactId.contains(":")) {
String[] tokens = artifactId.split(":");
groupId = tokens[0];
this.artifactId = tokens[1];
} else {
this.artifactId = artifactId;
}
return this;
}
/**
* Sets if normalize the dataset.
*
* @param normalize true to normalize the dataset
* @return the builder
*/
public Builder optNormalize(boolean normalize) {
this.normalize = normalize;
return this;
}
/**
* Returns the available features of this dataset.
*
* @return a list of feature names
*/
public List<String> getAvailableFeatures() {
return Arrays.asList(COLUMNS);
}
/**
* Adds a feature to the features set.
*
* @param name the name of the feature
* @return this builder
*/
public Builder addFeature(String name) {
return addFeature(new Feature(name, true));
}
/** {@inheritDoc} */
@Override
public AirfoilRandomAccess build() {
if (features.isEmpty()) {
for (int i = 0; i < 5; ++i) {
addNumericFeature(COLUMNS[i], normalize);
}
}
if (labels.isEmpty()) {
addNumericLabel("ssoundpres", normalize);
}
return new AirfoilRandomAccess(this);
}
MRL getMrl() {
return repository.dataset(Tabular.ANY, groupId, artifactId, VERSION);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular/AmesRandomAccess.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.tabular;
import ai.djl.Application.Tabular;
import ai.djl.basicdataset.BasicDatasets;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.util.JsonUtils;
import ai.djl.util.Progress;
import org.apache.commons.csv.CSVFormat;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.List;
import java.util.Objects;
import java.util.Set;
/**
* Ames house pricing dataset from
* https://www.kaggle.com/c/house-prices-advanced-regression-techniques/data.
*
* <p>80 features
*
* <p>Training Set: 1460 Records
*
* <p>Test Set: 1459 Records
*
* <p>Can enable/disable features Set one hot vector for categorical variables
*
* <p>Call {@link Builder#addAllFeatures()} to include all features from the dataset. The label is a
* numeric column named "saleprice".
*/
public class AmesRandomAccess extends CsvDataset {
private static final String ARTIFACT_ID = "ames";
private static final String VERSION = "1.0";
private Usage usage;
private MRL mrl;
private boolean prepared;
AmesRandomAccess(Builder builder) {
super(builder);
usage = builder.usage;
mrl = builder.getMrl();
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException {
if (prepared) {
return;
}
Artifact artifact = mrl.getDefaultArtifact();
mrl.prepare(artifact, progress);
Path dir = mrl.getRepository().getResourceDirectory(artifact);
Path root = dir.resolve("house-prices-advanced-regression-techniques");
Path csvFile;
switch (usage) {
case TRAIN:
csvFile = root.resolve("train.csv");
break;
case TEST:
csvFile = root.resolve("test.csv");
break;
case VALIDATION:
default:
throw new UnsupportedOperationException("Validation data not available.");
}
csvUrl = csvFile.toUri().toURL();
super.prepare(progress);
prepared = true;
}
/**
* Creates a builder to build a {@link AmesRandomAccess}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** A builder to construct a {@link AmesRandomAccess}. */
public static final class Builder extends CsvBuilder<Builder> {
Repository repository;
String groupId;
String artifactId;
Usage usage;
AmesFeatures af;
/** Constructs a new builder. */
Builder() {
repository = BasicDatasets.REPOSITORY;
groupId = BasicDatasets.GROUP_ID;
artifactId = ARTIFACT_ID;
usage = Usage.TRAIN;
csvFormat =
CSVFormat.DEFAULT
.builder()
.setHeader()
.setSkipHeaderRecord(true)
.setIgnoreHeaderCase(true)
.setTrim(true)
.get();
}
/** {@inheritDoc} */
@Override
public Builder self() {
return this;
}
/**
* Sets the optional usage.
*
* @param usage the new usage
* @return this builder
*/
public Builder optUsage(Usage usage) {
this.usage = usage;
return self();
}
/**
* Sets the optional repository.
*
* @param repository the repository
* @return this builder
*/
public Builder optRepository(Repository repository) {
this.repository = repository;
return self();
}
/**
* Sets optional groupId.
*
* @param groupId the groupId}
* @return this builder
*/
public Builder optGroupId(String groupId) {
this.groupId = groupId;
return self();
}
/**
* Sets the optional artifactId.
*
* @param artifactId the artifactId
* @return this builder
*/
public Builder optArtifactId(String artifactId) {
if (artifactId.contains(":")) {
String[] tokens = artifactId.split(":");
groupId = tokens[0];
this.artifactId = tokens[1];
} else {
this.artifactId = artifactId;
}
return self();
}
/**
* Adds a feature to the features set.
*
* @param name the name of the feature
* @return this builder
*/
public Builder addFeature(String name) {
return addFeature(name, false);
}
/**
* Adds a feature to the features set with onehot encoding.
*
* @param name the name of the feature
* @param onehotEncode true if use onehot encoding
* @return this builder
*/
public Builder addFeature(String name, boolean onehotEncode) {
parseFeatures();
if (af.categorical.contains(name)) {
return addCategoricalFeature(name, onehotEncode);
}
return addNumericFeature(name);
}
/**
* Adds all features to the features set.
*
* @return this builder
*/
public Builder addAllFeatures() {
if (features.isEmpty()) {
parseFeatures();
for (String name : af.featureArray) {
addFeature(name);
}
}
if (labels.isEmpty()) {
addNumericLabel("saleprice");
}
return this;
}
/**
* Returns the available features of this dataset.
*
* @return a list of feature names
*/
public List<String> getAvailableFeatures() {
parseFeatures();
return af.featureArray;
}
/**
* Builds the new {@link AmesRandomAccess}.
*
* @return the new {@link AmesRandomAccess}
*/
@Override
public AmesRandomAccess build() {
return new AmesRandomAccess(this);
}
private void parseFeatures() {
if (af == null) {
try (InputStream is =
Objects.requireNonNull(
AmesRandomAccess.class.getResourceAsStream("ames.json"));
Reader reader = new InputStreamReader(is, StandardCharsets.UTF_8)) {
af = JsonUtils.GSON.fromJson(reader, AmesFeatures.class);
} catch (IOException e) {
throw new AssertionError("Failed to read ames.json from classpath", e);
}
}
}
MRL getMrl() {
return repository.dataset(Tabular.ANY, groupId, artifactId, VERSION);
}
}
private static final class AmesFeatures {
List<String> featureArray;
Set<String> categorical;
/**
* Sets the feature array.
*
* @param featureArray the feature array
*/
public void setFeatureArray(List<String> featureArray) {
this.featureArray = featureArray;
}
/**
* Sets the categorical value.
*
* @param categorical the categorical value
*/
public void setCategorical(Set<String> categorical) {
this.categorical = categorical;
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular/CsvDataset.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.tabular;
import ai.djl.util.Progress;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
import java.util.zip.GZIPInputStream;
/** {@code CsvDataset} represents the dataset that stored in a .csv file. */
public class CsvDataset extends TabularDataset {
protected URL csvUrl;
protected CSVFormat csvFormat;
protected List<CSVRecord> csvRecords;
protected CsvDataset(CsvBuilder<?> builder) {
super(builder);
csvUrl = builder.csvUrl;
csvFormat = builder.csvFormat;
}
/** {@inheritDoc} */
@Override
public String getCell(long rowIndex, String featureName) {
CSVRecord record = csvRecords.get(Math.toIntExact(rowIndex));
return record.get(featureName);
}
/** {@inheritDoc} */
@Override
protected long availableSize() {
return csvRecords.size();
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException {
try (Reader reader = new InputStreamReader(getCsvStream(), StandardCharsets.UTF_8)) {
CSVParser csvParser = CSVParser.parse(reader, csvFormat);
csvRecords = csvParser.getRecords();
}
prepareFeaturizers();
}
private InputStream getCsvStream() throws IOException {
if (csvUrl.getFile().endsWith(".gz")) {
return new GZIPInputStream(csvUrl.openStream());
}
return new BufferedInputStream(csvUrl.openStream());
}
/**
* Creates a builder to build a {@link AmesRandomAccess}.
*
* @return a new builder
*/
public static CsvBuilder<?> builder() {
return new CsvBuilder<>();
}
/**
* Returns the column names of the CSV file.
*
* @return a list of column name
*/
public List<String> getColumnNames() {
if (csvRecords.isEmpty()) {
return Collections.emptyList();
}
return csvRecords.get(0).getParser().getHeaderNames();
}
/** Used to build a {@link CsvDataset}. */
public static class CsvBuilder<T extends CsvBuilder<T>> extends TabularDataset.BaseBuilder<T> {
protected URL csvUrl;
protected CSVFormat csvFormat;
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
protected T self() {
return (T) this;
}
/**
* Sets the optional CSV file path.
*
* @param csvFile the CSV file path
* @return this builder
*/
public T optCsvFile(Path csvFile) {
try {
this.csvUrl = csvFile.toAbsolutePath().toUri().toURL();
} catch (MalformedURLException e) {
throw new IllegalArgumentException("Invalid file path: " + csvFile, e);
}
return self();
}
/**
* Sets the optional CSV file URL.
*
* @param csvUrl the CSV file URL
* @return this builder
*/
public T optCsvUrl(String csvUrl) {
try {
this.csvUrl = new URL(csvUrl);
} catch (MalformedURLException e) {
throw new IllegalArgumentException("Invalid url: " + csvUrl, e);
}
return self();
}
/**
* Sets the CSV file format.
*
* @param csvFormat the {@code CSVFormat}
* @return this builder
*/
public T setCsvFormat(CSVFormat csvFormat) {
this.csvFormat = csvFormat;
return self();
}
/**
* Builds the new {@link CsvDataset}.
*
* @return the new {@link CsvDataset}
*/
public CsvDataset build() {
return new CsvDataset(this);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular/DailyDelhiClimate.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.tabular;
import ai.djl.Application.Tabular;
import ai.djl.basicdataset.BasicDatasets;
import ai.djl.basicdataset.tabular.utils.Feature;
import ai.djl.basicdataset.tabular.utils.Featurizers;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.util.Progress;
import org.apache.commons.csv.CSVFormat;
import java.io.IOException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Daily Delhi climate dataset from <a
* href="https://www.kaggle.com/datasets/sumanthvrao/daily-climate-time-series-data">https://www.kaggle.com/datasets/sumanthvrao/daily-climate-time-series-data</a>.
*
* <p>The Dataset is fully dedicated for the developers who want to train the model on Weather
* Forecasting for Indian climate. This dataset provides data from 1st January 2013 to 24th April
* 2017 in the city of Delhi, India. The 4 parameters here are meantemp, humidity, wind_speed,
* meanpressure.
*/
public class DailyDelhiClimate extends CsvDataset {
private static final String ARTIFACT_ID = "daily-delhi-climate";
private static final String VERSION = "3.0";
private Usage usage;
private MRL mrl;
private boolean prepared;
DailyDelhiClimate(Builder builder) {
super(builder);
usage = builder.usage;
mrl = builder.getMrl();
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException {
if (prepared) {
return;
}
Artifact artifact = mrl.getDefaultArtifact();
mrl.prepare(artifact, progress);
Path root = mrl.getRepository().getResourceDirectory(artifact);
Path csvFile;
switch (usage) {
case TRAIN:
csvFile = root.resolve("DailyDelhiClimateTrain.csv");
break;
case TEST:
csvFile = root.resolve("DailyDelhiClimateTest.csv");
break;
case VALIDATION:
default:
throw new UnsupportedOperationException("Validation data not available.");
}
csvUrl = csvFile.toUri().toURL();
super.prepare(progress);
prepared = true;
}
/**
* Creates a builder to build a {@link DailyDelhiClimate}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** A builder to construct a {@link DailyDelhiClimate}. */
public static final class Builder extends CsvBuilder<Builder> {
Repository repository;
String groupId;
String artifactId;
Usage usage;
List<String> featureArray =
new ArrayList<>(
Arrays.asList(
"date", "meantemp", "humidity", "wind_speed", "meanpressure"));
/** Constructs a new builder. */
Builder() {
repository = BasicDatasets.REPOSITORY;
groupId = BasicDatasets.GROUP_ID;
artifactId = ARTIFACT_ID;
usage = Usage.TRAIN;
csvFormat =
CSVFormat.DEFAULT
.builder()
.setHeader()
.setSkipHeaderRecord(true)
.setIgnoreHeaderCase(true)
.setTrim(true)
.get();
}
/**
* Returns this {code Builder} object.
*
* @return this {@code BaseBuilder}
*/
@Override
public Builder self() {
return this;
}
/**
* Sets the optional usage.
*
* @param usage the new usage
* @return this builder
*/
public Builder optUsage(Usage usage) {
this.usage = usage;
return self();
}
/**
* Sets the optional repository.
*
* @param repository the repository
* @return this builder
*/
public Builder optRepository(Repository repository) {
this.repository = repository;
return self();
}
/**
* Sets optional groupId.
*
* @param groupId the groupId}
* @return this builder
*/
public Builder optGroupId(String groupId) {
this.groupId = groupId;
return self();
}
/**
* Sets the optional artifactId.
*
* @param artifactId the artifactId
* @return this builder
*/
public Builder optArtifactId(String artifactId) {
if (artifactId.contains(":")) {
String[] tokens = artifactId.split(":");
groupId = tokens[0];
this.artifactId = tokens[1];
} else {
this.artifactId = artifactId;
}
return self();
}
/**
* Adds a feature to the features set.
*
* @param name the name of the feature
* @return this builder
*/
public Builder addFeature(String name) {
if ("date".equals(name)) {
return addFeature(
new Feature(name, Featurizers.getEpochDayFeaturizer("yyyy-MM-dd")));
} else {
return addNumericFeature(name);
}
}
/**
* Returns the available features of this dataset.
*
* @return a list of feature names
*/
public List<String> getAvailableFeatures() {
return featureArray;
}
/**
* Builds the new {@link DailyDelhiClimate}.
*
* @return the new {@link DailyDelhiClimate}
*/
@Override
public DailyDelhiClimate build() {
if (features.isEmpty()) {
for (String name : featureArray) {
addFeature(name);
}
}
return new DailyDelhiClimate(this);
}
MRL getMrl() {
return repository.dataset(Tabular.ANY, groupId, artifactId, VERSION);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular/ListFeatures.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.tabular;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/** An extension of the {@link ArrayList} for use in the {@link TabularTranslator}. */
public class ListFeatures extends ArrayList<String> {
private static final long serialVersionUID = 1L;
/**
* Constructs a {@code ListFeatures} instance.
*
* @see ArrayList#ArrayList()
*/
public ListFeatures() {}
/**
* Constructs a {@code ListFeatures} instance.
*
* @param initialCapacity the initial capacity of the list
* @throws IllegalArgumentException if the specified initial capacity is negative
* @see ArrayList#ArrayList(int)
*/
public ListFeatures(int initialCapacity) {
super(initialCapacity);
}
/**
* Constructs a {@code ListFeatures} instance from a source list.
*
* @param source the source list
*/
@SuppressWarnings("this-escape")
public ListFeatures(List<String> source) {
super(source.size());
addAll(source);
}
/**
* Constructs a {@code ListFeatures} instance.
*
* @param c the collection whose elements are to be placed into this list
* @throws NullPointerException if the specified collection is null
* @see ArrayList#ArrayList(Collection)
*/
public ListFeatures(Collection<? extends String> c) {
super(c);
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular/MapFeatures.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.tabular;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/** An extension of the {@link ConcurrentHashMap} for use in the {@link TabularTranslator}. */
public class MapFeatures extends ConcurrentHashMap<String, String> {
private static final long serialVersionUID = 1L;
/**
* Constructs a {@code MapFeatures} instance.
*
* @see ConcurrentHashMap#ConcurrentHashMap()
*/
public MapFeatures() {}
/**
* Constructs a {@code MapFeatures} instance.
*
* @param initialCapacity The implementation performs internal sizing to accommodate this many
* elements.
* @throws IllegalArgumentException if the initial capacity of elements is negative
* @see ConcurrentHashMap#ConcurrentHashMap(int)
*/
public MapFeatures(int initialCapacity) {
super(initialCapacity);
}
/**
* Constructs a {@code MapFeatures} instance.
*
* @param m the map
* @see ConcurrentHashMap#ConcurrentHashMap(Map)
*/
public MapFeatures(Map<? extends String, ? extends String> m) {
super(m);
}
/**
* Constructs a {@code MapFeatures} instance.
*
* @param initialCapacity the initial capacity. The implementation performs internal sizing to
* accommodate this many elements, given the specified load factor.
* @param loadFactor the load factor (table density) for establishing the initial table size
* @throws IllegalArgumentException if the initial capacity of elements is negative or the load
* factor is nonpositive
* @see ConcurrentHashMap#ConcurrentHashMap(int, float)
*/
public MapFeatures(int initialCapacity, float loadFactor) {
super(initialCapacity, loadFactor);
}
/**
* Constructs a {@link MapFeatures}.
*
* @param initialCapacity the initial capacity. The implementation performs internal sizing to
* accommodate this many elements, given the specified load factor.
* @param loadFactor the load factor (table density) for establishing the initial table size
* @param concurrencyLevel the estimated number of concurrently updating threads. The
* implementation may use this value as a sizing hint.
* @throws IllegalArgumentException if the initial capacity is negative or the load factor or
* concurrencyLevel are nonpositive
* @see ConcurrentHashMap#ConcurrentHashMap(int, float, int)
*/
public MapFeatures(int initialCapacity, float loadFactor, int concurrencyLevel) {
super(initialCapacity, loadFactor, concurrencyLevel);
}
/**
* Creates a {@link MapFeatures} from a source list.
*
* @param source the source list
* @return a new {@link MapFeatures}
*/
public static MapFeatures fromMap(Map<String, String> source) {
MapFeatures map = new MapFeatures(source.size());
map.putAll(source);
return map;
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular/MovieLens100k.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.tabular;
import ai.djl.Application;
import ai.djl.basicdataset.BasicDatasets;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.util.Progress;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/** Movielens 100k movie reviews dataset from https://grouplens.org/datasets/movielens/100k/. */
public final class MovieLens100k extends CsvDataset {
private static final String ARTIFACT_ID = "movielens-100k";
private static final String VERSION = "1.0";
private static final String[] USER_FEATURES = {
"user_id", "user_age", "user_gender", "user_occupation", "user_zipcode"
};
private static final String[] MOVIE_FEATURES = {
"movie_id",
"movie_title",
"movie_release_date",
"movie_video_release_date",
"imdb_url",
"unknown",
"action",
"adventure",
"animation",
"childrens",
"comedy",
"crime",
"documentary",
"drama",
"fantasy",
"film-noir",
"horror",
"musical",
"mystery",
"romance",
"sci-fi",
"thriller",
"war",
"western"
};
enum HeaderEnum {
user_id,
movie_id,
rating,
timestamp
}
private Usage usage;
private MRL mrl;
private boolean prepared;
private Map<String, Map<String, String>> userFeaturesMap;
private Map<String, Map<String, String>> movieFeaturesMap;
MovieLens100k(Builder builder) {
super(builder);
usage = builder.usage;
mrl = builder.getMrl();
}
/** {@inheritDoc} */
@Override
public String getCell(long rowIndex, String featureName) {
CSVRecord record = csvRecords.get(Math.toIntExact(rowIndex));
if (HeaderEnum.rating.toString().equals(featureName)) {
return record.get(HeaderEnum.rating);
}
if (featureName.startsWith("user")) {
String userId = record.get(HeaderEnum.user_id);
return userFeaturesMap.get(userId).get(featureName);
}
String movieId = record.get(HeaderEnum.movie_id);
return movieFeaturesMap.get(movieId).get(featureName);
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException {
if (prepared) {
return;
}
Artifact artifact = mrl.getDefaultArtifact();
mrl.prepare(artifact, progress);
Path dir = mrl.getRepository().getResourceDirectory(artifact);
Path root = dir.resolve("ml-100k/ml-100k");
// The actual feature values to use for training/testing are stored in separate files
Path userFeaturesFile = root.resolve("u.user");
userFeaturesMap = prepareFeaturesMap(userFeaturesFile, USER_FEATURES);
Path movieFeaturesFile = root.resolve("u.item");
movieFeaturesMap = prepareFeaturesMap(movieFeaturesFile, MOVIE_FEATURES);
Path csvFile;
switch (usage) {
case TRAIN:
csvFile = root.resolve("ua.base");
break;
case TEST:
csvFile = root.resolve("ua.test");
break;
case VALIDATION:
default:
throw new UnsupportedOperationException("Validation data not available");
}
csvUrl = csvFile.toUri().toURL();
super.prepare(progress);
prepared = true;
}
private Map<String, Map<String, String>> prepareFeaturesMap(
Path featureFile, String[] featureNames) throws IOException {
URL featureFileUrl = featureFile.toUri().toURL();
CSVFormat format = CSVFormat.Builder.create(CSVFormat.newFormat('|')).get();
Reader reader =
new InputStreamReader(
new BufferedInputStream(featureFileUrl.openStream()),
StandardCharsets.UTF_8);
CSVParser csvParser = CSVParser.parse(reader, format);
List<CSVRecord> featureRecords = csvParser.getRecords();
Map<String, Map<String, String>> featuresMap = new ConcurrentHashMap<>();
for (CSVRecord record : featureRecords) {
Map<String, String> featureValues = new ConcurrentHashMap<>();
for (int i = 0; i < featureNames.length; i++) {
featureValues.put(featureNames[i], record.get(i));
}
featuresMap.put(record.get(0), featureValues);
}
return featuresMap;
}
/**
* Creates a builder to build a {@link MovieLens100k}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** A builder to construct a {@link MovieLens100k}. */
public static final class Builder extends CsvBuilder<Builder> {
Repository repository;
String groupId;
String artifactId;
Usage usage;
List<String> featureArray =
new ArrayList<>(
Arrays.asList(
"user_age",
"user_gender",
"user_occupation",
"user_zipcode",
"movie_title",
"movie_genres"));
List<String> movieGenres =
new ArrayList<>(
Arrays.asList(
"unknown",
"action",
"adventure",
"animation",
"childrens",
"comedy",
"crime",
"documentary",
"drama",
"fantasy",
"film-noir",
"horror",
"musical",
"mystery",
"romance",
"sci-fi",
"thriller",
"war",
"western"));
/** Constructs a new builder. */
Builder() {
repository = BasicDatasets.REPOSITORY;
groupId = BasicDatasets.GROUP_ID;
artifactId = ARTIFACT_ID;
usage = Usage.TRAIN;
csvFormat = CSVFormat.TDF.builder().setHeader(HeaderEnum.class).setQuote(null).get();
}
/** {@inheritDoc} */
@Override
public Builder self() {
return this;
}
/**
* Sets the optional usage.
*
* @param usage the new usage
* @return this builder
*/
public Builder optUsage(Usage usage) {
this.usage = usage;
return self();
}
/**
* Sets the optional repository.
*
* @param repository the repository
* @return this builder
*/
public Builder optRepository(Repository repository) {
this.repository = repository;
return self();
}
/**
* Sets optional groupId.
*
* @param groupId the groupId}
* @return this builder
*/
public Builder optGroupId(String groupId) {
this.groupId = groupId;
return self();
}
/**
* Sets the optional artifactId.
*
* @param artifactId the artifactId
* @return this builder
*/
public Builder optArtifactId(String artifactId) {
if (artifactId.contains(":")) {
String[] tokens = artifactId.split(":");
groupId = tokens[0];
this.artifactId = tokens[1];
} else {
this.artifactId = artifactId;
}
return self();
}
/**
* Returns the available features of this dataset.
*
* @return a list of feature names
*/
public List<String> getAvailableFeatures() {
return featureArray;
}
/**
* Adds a feature to the features set.
*
* @param name the name of the feature
* @return this builder
*/
public Builder addFeature(String name) {
if (getAvailableFeatures().contains(name)) {
switch (name) {
case "user_age":
addNumericFeature(name);
break;
case "user_gender":
case "user_occupation":
addCategoricalFeature(name, true);
break;
case "user_zipcode":
case "movie_title":
addCategoricalFeature(name, false);
break;
case "movie_genres":
movieGenres.forEach(genre -> addNumericFeature(genre));
break;
default:
break;
}
} else {
throw new IllegalArgumentException(
String.format(
"Provided feature %s is not valid. Valid features are: %s",
name, featureArray));
}
return self();
}
/**
* Builds the new {@link MovieLens100k}.
*
* @return the new {@link MovieLens100k}
*/
@Override
public MovieLens100k build() {
if (features.isEmpty()) {
featureArray.forEach(feature -> addFeature(feature));
}
if (labels.isEmpty()) {
addCategoricalLabel("rating", true);
}
return new MovieLens100k(this);
}
MRL getMrl() {
return repository.dataset(Application.Tabular.ANY, groupId, artifactId, VERSION);
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular/TabularDataset.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.tabular;
import ai.djl.basicdataset.tabular.utils.DynamicBuffer;
import ai.djl.basicdataset.tabular.utils.Feature;
import ai.djl.basicdataset.tabular.utils.Featurizers;
import ai.djl.basicdataset.tabular.utils.PreparedFeaturizer;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.Shape;
import ai.djl.training.dataset.RandomAccessDataset;
import ai.djl.training.dataset.Record;
import ai.djl.translate.TranslatorOptions;
import java.nio.FloatBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/** A abstract class for creating tabular datasets. */
public abstract class TabularDataset extends RandomAccessDataset {
protected List<Feature> features;
protected List<Feature> labels;
/**
* Creates a new instance of {@link RandomAccessDataset} with the given necessary
* configurations.
*
* @param builder a builder with the necessary configurations
*/
public TabularDataset(BaseBuilder<?> builder) {
super(builder);
features = builder.features;
labels = builder.labels;
if (features.isEmpty()) {
throw new IllegalArgumentException("Missing features.");
}
if (labels.isEmpty() && !builder.allowNoLabels) {
throw new IllegalArgumentException("Missing labels.");
}
}
/**
* Gets the feature size of current {@link TabularDataset}.
*
* @return the feature size
*/
public int getFeatureSize() {
return features.size();
}
/**
* Gets the label size of current {@link TabularDataset}.
*
* @return the feature size
*/
public int getLabelSize() {
return labels.size();
}
/**
* Returns the dataset features.
*
* @return the dataset features
*/
public List<Feature> getFeatures() {
return features;
}
/**
* Returns the dataset labels.
*
* @return the dataset labels
*/
public List<Feature> getLabels() {
return labels;
}
/** {@inheritDoc} */
@Override
public Record get(NDManager manager, long index) {
NDList data = getRowFeatures(manager, index, features);
NDList label;
if (labels.isEmpty()) {
label = new NDList();
} else {
label = getRowFeatures(manager, index, labels);
}
return new Record(data, label);
}
/**
* Returns the direct designated features (either data or label features) from a row.
*
* @param index the index of the requested data item
* @param selected the features to pull from the row
* @return the direct features
*/
public List<String> getRowDirect(long index, List<Feature> selected) {
List<String> results = new ArrayList<>(selected.size());
for (Feature feature : selected) {
results.add(getCell(index, feature.getName()));
}
return results;
}
/**
* Returns the designated features (either data or label features) from a row.
*
* @param manager the manager used to create the arrays
* @param index the index of the requested data item
* @param selected the features to pull from the row
* @return the features formatted as an {@link NDList}
*/
public NDList getRowFeatures(NDManager manager, long index, List<Feature> selected) {
DynamicBuffer bb = new DynamicBuffer();
for (Feature feature : selected) {
String name = feature.getName();
String value = getCell(index, name);
feature.getFeaturizer().featurize(bb, value);
}
FloatBuffer buf = bb.getBuffer();
return new NDList(manager.create(buf, new Shape(bb.getLength())));
}
/** Prepares the {@link ai.djl.basicdataset.tabular.utils.PreparedFeaturizer}s. */
protected void prepareFeaturizers() {
int availableSize = Math.toIntExact(availableSize());
List<Feature> featuresToPrepare = new ArrayList<>(features.size() + labels.size());
featuresToPrepare.addAll(features);
featuresToPrepare.addAll(labels);
for (Feature feature : featuresToPrepare) {
if (feature.getFeaturizer() instanceof PreparedFeaturizer) {
PreparedFeaturizer featurizer = (PreparedFeaturizer) feature.getFeaturizer();
List<String> inputs = new ArrayList<>(Math.toIntExact(availableSize));
for (int i = 0; i < availableSize; i++) {
inputs.add(getCell(i, feature.getName()));
}
featurizer.prepare(inputs);
}
}
}
/**
* Returns a cell in the dataset.
*
* @param rowIndex the row index or record index for the cell
* @param featureName the feature or column of the cell
* @return the value of the cell at that row and column
*/
public abstract String getCell(long rowIndex, String featureName);
/** {@inheritDoc} */
@Override
public TranslatorOptions matchingTranslatorOptions() {
return new TabularTranslator(features, labels).getExpansions();
}
/**
* Used to build a {@link TabularDataset}.
*
* @param <T> the builder type
*/
public abstract static class BaseBuilder<T extends BaseBuilder<T>>
extends RandomAccessDataset.BaseBuilder<T> {
protected List<Feature> features;
protected List<Feature> labels;
protected boolean allowNoLabels;
protected BaseBuilder() {
features = new ArrayList<>();
labels = new ArrayList<>();
}
/**
* Adds the features to the feature set.
*
* @param features the features
* @return this builder
*/
public T addFeature(Feature... features) {
Collections.addAll(this.features, features);
return self();
}
/**
* Adds a numeric feature to the feature set.
*
* @param name the feature name
* @return this builder
*/
public T addNumericFeature(String name) {
features.add(new Feature(name, true));
return self();
}
/**
* Adds a numeric feature to the feature set.
*
* @param name the feature name
* @param normalize true to normalize the column
* @return this builder
*/
public T addNumericFeature(String name, boolean normalize) {
features.add(new Feature(name, Featurizers.getNumericFeaturizer(normalize)));
return self();
}
/**
* Adds a categorical feature to the feature set.
*
* @param name the feature name
* @return this builder
*/
public T addCategoricalFeature(String name) {
features.add(new Feature(name, false));
return self();
}
/**
* Adds a categorical feature to the feature set.
*
* @param name the feature name
* @param onehotEncode true to use onehot encode
* @return this builder
*/
public T addCategoricalFeature(String name, boolean onehotEncode) {
features.add(new Feature(name, Featurizers.getStringFeaturizer(onehotEncode)));
return self();
}
/**
* Adds a categorical feature to the feature set with specified mapping.
*
* @param name the feature name
* @param map a map contains categorical value maps to index
* @param onehotEncode true to use onehot encode
* @return this builder
*/
public T addCategoricalFeature(
String name, Map<String, Integer> map, boolean onehotEncode) {
features.add(new Feature(name, map, onehotEncode));
return self();
}
/**
* Adds the features to the label set.
*
* @param labels the labels
* @return this builder
*/
public T addLabel(Feature... labels) {
Collections.addAll(this.labels, labels);
return self();
}
/**
* Adds a number feature to the label set.
*
* @param name the label name
* @return this builder
*/
public T addNumericLabel(String name) {
labels.add(new Feature(name, true));
return self();
}
/**
* Adds a number feature to the label set.
*
* @param name the label name
* @param normalize true to normalize the column
* @return this builder
*/
public T addNumericLabel(String name, boolean normalize) {
labels.add(new Feature(name, Featurizers.getNumericFeaturizer(normalize)));
return self();
}
/**
* Adds a categorical feature to the label set.
*
* @param name the feature name
* @return this builder
*/
public T addCategoricalLabel(String name) {
labels.add(new Feature(name, false));
return self();
}
/**
* Adds a categorical feature to the label set.
*
* @param name the feature name
* @param onehotEncode true if use onehot encode
* @return this builder
*/
public T addCategoricalLabel(String name, boolean onehotEncode) {
labels.add(new Feature(name, Featurizers.getStringFeaturizer(onehotEncode)));
return self();
}
/**
* Adds a categorical feature to the feature set with specified mapping.
*
* @param name the feature name
* @param map a map contains categorical value maps to index
* @param onehotEncode true if use onehot encode
* @return this builder
*/
public T addCategoricalLabel(String name, Map<String, Integer> map, boolean onehotEncode) {
labels.add(new Feature(name, map, onehotEncode));
return self();
}
/**
* Indicates the dataset should not have any labels.
*
* @return this builder
*/
public T noLabels() {
allowNoLabels = true;
return self();
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular/TabularResults.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.tabular;
import java.util.List;
/** A list of results from running a tabular model. */
public class TabularResults {
private List<TabularResult> results;
/**
* Constructs a {@link TabularResults} with the given results.
*
* @param results the results
*/
public TabularResults(List<TabularResult> results) {
this.results = results;
}
/**
* Returns the result for the given feature index.
*
* @param index the feature/label index
* @return the result
*/
public TabularResult getFeature(int index) {
return results.get(index);
}
/**
* Returns the result for the given feature name.
*
* @param name the feature/label name
* @return the result
*/
public TabularResult getFeature(String name) {
for (TabularResult result : results) {
if (result.getName().equals(name)) {
return result;
}
}
throw new IllegalArgumentException(
"The TabularResults does not contain a result with name " + name);
}
/**
* Returns all of the {@link TabularResult}.
*
* @return all of the {@link TabularResult}
*/
public List<TabularResult> getAll() {
return results;
}
/**
* Returns the number of results.
*
* @return the number of results
*/
public int size() {
return results.size();
}
/** A single result corresponding to a single feature. */
public static final class TabularResult {
private String name;
private Object result;
/**
* Constructs the result.
*
* @param name the feature name
* @param result the computed feature result
*/
public TabularResult(String name, Object result) {
this.name = name;
this.result = result;
}
/**
* Returns the result (feature) name.
*
* @return the result (feature) name
*/
public String getName() {
return name;
}
/**
* Returns the computed result.
*
* @return the computed result
*/
public Object getResult() {
return result;
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular/TabularTranslator.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.tabular;
import ai.djl.Model;
import ai.djl.basicdataset.tabular.TabularResults.TabularResult;
import ai.djl.basicdataset.tabular.utils.DynamicBuffer;
import ai.djl.basicdataset.tabular.utils.Feature;
import ai.djl.basicdataset.tabular.utils.Featurizer;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.Shape;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import ai.djl.translate.TranslatorOptions;
import java.nio.FloatBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/** A {@link Translator} that can be used for {@link ai.djl.Application.Tabular} tasks. */
public class TabularTranslator implements Translator<ListFeatures, TabularResults> {
private List<Feature> features;
private List<Feature> labels;
/**
* Constructs a {@code TabularTranslator} with the given features and labels.
*
* @param features the features for inputs
* @param labels the labels for outputs
*/
public TabularTranslator(List<Feature> features, List<Feature> labels) {
this.features = features;
this.labels = labels;
}
/**
* Constructs a tabular translator for a model.
*
* @param model the model
* @param arguments the arguments to build the translator with
*/
@SuppressWarnings("PMD.UnusedFormalParameter") // TODO: Remove when implementing function
public TabularTranslator(Model model, Map<String, ?> arguments) {
throw new UnsupportedOperationException(
"Constructing the TabularTranslator from arguments is not currently supported");
}
/** {@inheritDoc} */
@Override
public TabularResults processOutput(TranslatorContext ctx, NDList list) throws Exception {
List<TabularResult> results = new ArrayList<>(labels.size());
float[] data = list.head().toFloatArray();
int dataIndex = 0;
for (Feature label : labels) {
Featurizer featurizer = label.getFeaturizer();
int dataRequired = featurizer.dataRequired();
Object deFeaturized =
featurizer.deFeaturize(
Arrays.copyOfRange(data, dataIndex, dataIndex + dataRequired));
results.add(new TabularResult(label.getName(), deFeaturized));
dataIndex += dataRequired;
}
return new TabularResults(results);
}
/** {@inheritDoc} */
@Override
public NDList processInput(TranslatorContext ctx, ListFeatures input) throws Exception {
if (input.size() != features.size()) {
throw new IllegalArgumentException(
"The TabularTranslator expects "
+ features.size()
+ " arguments but received "
+ input.size());
}
DynamicBuffer bb = new DynamicBuffer();
for (int i = 0; i < features.size(); i++) {
String value = input.get(i);
features.get(i).getFeaturizer().featurize(bb, value);
}
FloatBuffer buf = bb.getBuffer();
return new NDList(ctx.getNDManager().create(buf, new Shape(bb.getLength())));
}
/** {@inheritDoc} */
@Override
public TranslatorOptions getExpansions() {
return new TabularTranslatorFactory().withTranslator(this);
}
/**
* Returns the features for the translator.
*
* @return the features for the translator
*/
public List<Feature> getFeatures() {
return features;
}
/**
* Returns the labels for the translator.
*
* @return the labels for the translator
*/
public List<Feature> getLabels() {
return labels;
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular/TabularTranslatorFactory.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.tabular;
import ai.djl.Model;
import ai.djl.basicdataset.tabular.utils.Feature;
import ai.djl.modality.Classifications;
import ai.djl.ndarray.NDList;
import ai.djl.translate.ExpansionTranslatorFactory;
import ai.djl.translate.PostProcessor;
import ai.djl.translate.PreProcessor;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import java.lang.reflect.Type;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
/** A {@link ai.djl.translate.TranslatorFactory} to extend the {@link TabularTranslator}. */
public class TabularTranslatorFactory
extends ExpansionTranslatorFactory<ListFeatures, TabularResults> {
/** {@inheritDoc} */
@Override
protected Translator<ListFeatures, TabularResults> buildBaseTranslator(
Model model, Map<String, ?> arguments) {
return new TabularTranslator(model, arguments);
}
/** {@inheritDoc} */
@Override
public Class<ListFeatures> getBaseInputType() {
return ListFeatures.class;
}
/** {@inheritDoc} */
@Override
public Class<TabularResults> getBaseOutputType() {
return TabularResults.class;
}
/** {@inheritDoc} */
@Override
protected Map<Type, Function<PreProcessor<ListFeatures>, PreProcessor<?>>>
getPreprocessorExpansions() {
Map<Type, Function<PreProcessor<ListFeatures>, PreProcessor<?>>> expansions =
new ConcurrentHashMap<>();
expansions.put(MapFeatures.class, MapPreProcessor::new);
return expansions;
}
/** {@inheritDoc} */
@Override
protected Map<Type, Function<PostProcessor<TabularResults>, PostProcessor<?>>>
getPostprocessorExpansions() {
Map<Type, Function<PostProcessor<TabularResults>, PostProcessor<?>>> expansions =
new ConcurrentHashMap<>();
expansions.put(Classifications.class, ClassificationsTabularPostProcessor::new);
expansions.put(Float.class, RegressionTabularPostProcessor::new);
return expansions;
}
static final class MapPreProcessor implements PreProcessor<MapFeatures> {
private TabularTranslator preProcessor;
MapPreProcessor(PreProcessor<ListFeatures> preProcessor) {
if (!(preProcessor instanceof TabularTranslator)) {
throw new IllegalArgumentException(
"The MapPreProcessor for the TabularTranslatorFactory expects a"
+ " TabularTranslator, but received "
+ preProcessor.getClass().getName());
}
this.preProcessor = (TabularTranslator) preProcessor;
}
/** {@inheritDoc} */
@Override
public NDList processInput(TranslatorContext ctx, MapFeatures input) throws Exception {
ListFeatures list = new ListFeatures(preProcessor.getFeatures().size());
for (Feature feature : preProcessor.getFeatures()) {
if (input.containsKey(feature.getName())) {
list.add(input.get(feature.getName()));
} else {
throw new IllegalArgumentException(
"The input to the TabularTranslator is missing the feature: "
+ feature.getName());
}
}
return preProcessor.processInput(ctx, list);
}
}
static final class ClassificationsTabularPostProcessor
implements PostProcessor<Classifications> {
private PostProcessor<TabularResults> postProcessor;
ClassificationsTabularPostProcessor(PostProcessor<TabularResults> postProcessor) {
this.postProcessor = postProcessor;
}
/** {@inheritDoc} */
@Override
public Classifications processOutput(TranslatorContext ctx, NDList list) throws Exception {
TabularResults results = postProcessor.processOutput(ctx, list);
if (results.size() != 1) {
throw new IllegalStateException(
"The ClassificationsTabularPostProcessor expected the model to produce one"
+ " output, but instead it produced "
+ results.size());
}
Object result = results.getFeature(0).getResult();
if (result instanceof Classifications) {
return (Classifications) result;
}
throw new IllegalStateException(
"The ClassificationsTabularPostProcessor expected the model to produce a"
+ " Classifications, but instead it produced "
+ result.getClass().getName());
}
}
static final class RegressionTabularPostProcessor implements PostProcessor<Float> {
private PostProcessor<TabularResults> postProcessor;
RegressionTabularPostProcessor(PostProcessor<TabularResults> postProcessor) {
this.postProcessor = postProcessor;
}
/** {@inheritDoc} */
@Override
public Float processOutput(TranslatorContext ctx, NDList list) throws Exception {
TabularResults results = postProcessor.processOutput(ctx, list);
if (results.size() != 1) {
throw new IllegalStateException(
"The RegressionTabularPostProcessor expected the model to produce one"
+ " output, but instead it produced "
+ results.size());
}
Object result = results.getFeature(0).getResult();
if (result instanceof Float) {
return (Float) result;
}
throw new IllegalStateException(
"The RegressionTabularPostProcessor expected the model to produce a float, but"
+ " instead it produced "
+ result.getClass().getName());
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular/package-info.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains a library of built-in datasets for {@link ai.djl.Application.Tabular}. */
package ai.djl.basicdataset.tabular;
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular/utils/DynamicBuffer.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.tabular.utils;
import java.nio.FloatBuffer;
/** A float buffer that can dynamically change it's capacity. */
public class DynamicBuffer {
private FloatBuffer buffer;
private int length;
/** Constructs a new instance of {@code DynamicBuffer}. */
public DynamicBuffer() {
buffer = FloatBuffer.allocate(128);
}
/**
* Writes the given float into this buffer at the current position.
*
* @param f the float to be written
* @return this buffer
*/
public DynamicBuffer put(float f) {
++length;
buffer.put(f);
if (buffer.capacity() == length) {
FloatBuffer buf = buffer;
buf.rewind();
buffer = FloatBuffer.allocate(length * 2);
buffer.put(buf);
}
return this;
}
/**
* Returns a {@code FloatBuffer} that contains all the data.
*
* @return a {@code FloatBuffer}
*/
public FloatBuffer getBuffer() {
buffer.rewind();
buffer.limit(length);
return buffer;
}
/**
* Returns the buffer size.
*
* @return the buffer size
*/
public int getLength() {
return length;
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular/utils/Feature.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.tabular.utils;
import ai.djl.basicdataset.tabular.utils.Featurizer.DataFeaturizer;
import java.util.Map;
/** A class contains feature name and its {@code Featurizer}. */
public final class Feature {
String name;
Featurizer featurizer;
/**
* Constructs a {@code Feature} instance.
*
* @param name the feature name
* @param featurizer the {@code Featurizer}
*/
public Feature(String name, Featurizer featurizer) {
this.name = name;
this.featurizer = featurizer;
}
/**
* Constructs a {@code Feature} instance.
*
* @param name the feature name
* @param featurizer the {@code Featurizer}
*/
public Feature(String name, DataFeaturizer featurizer) {
this.name = name;
this.featurizer = featurizer;
}
/**
* Constructs a {@code Feature} instance.
*
* @param name the feature name
* @param numeric true if input is numeric data
*/
public Feature(String name, boolean numeric) {
this.name = name;
if (numeric) {
featurizer = Featurizers.getNumericFeaturizer();
} else {
featurizer = Featurizers.getStringFeaturizer();
}
}
/**
* Constructs a {@code Feature} instance.
*
* @param name the feature name
* @param map a map contains categorical value maps to index
* @param onehotEncode true if use onehot encode
*/
public Feature(String name, Map<String, Integer> map, boolean onehotEncode) {
this.name = name;
this.featurizer = Featurizers.getStringFeaturizer(map, onehotEncode);
}
/**
* Returns the feature name.
*
* @return the feature name
*/
public String getName() {
return name;
}
/**
* Returns the {@code Featurizer}.
*
* @return the {@code Featurizer}
*/
public Featurizer getFeaturizer() {
return featurizer;
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular/utils/Featurizer.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.tabular.utils;
/** An interface that convert String to numeric data. */
public interface Featurizer {
/**
* Puts encoded data into the float buffer.
*
* @param buf the float buffer to be filled
* @param input the string input
*/
void featurize(DynamicBuffer buf, String input);
/**
* Returns the length of the data array required by {@link #deFeaturize(float[])}.
*
* @return the length of the data array required by {@link #deFeaturize(float[])}
*/
int dataRequired();
/**
* Converts the output data for a label back into the Java type.
*
* @param data the data vector correspondign to the feature
* @return a Java type (depending on the {@link Featurizer}) representing the data.
*/
Object deFeaturize(float[] data);
/**
* A {@link Featurizer} that only supports the data featurize operations, but not the full
* deFeaturize operations used by labels.
*/
interface DataFeaturizer extends Featurizer {
/** {@inheritDoc} */
@Override
default int dataRequired() {
throw new IllegalStateException(
"DataFeaturizers only support featurize, not deFeaturize");
}
/** {@inheritDoc} */
@Override
default Object deFeaturize(float[] data) {
throw new IllegalStateException(
"DataFeaturizers only support featurize, not deFeaturize");
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular/utils/Featurizers.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.tabular.utils;
import ai.djl.modality.Classifications;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
/** A utility class provides helper functions to create {@link Featurizer}. */
public final class Featurizers {
private static final Featurizer NUMERIC_FEATURIZER = new NumericFeaturizer();
private Featurizers() {}
/**
* Returns the default numeric {@link Featurizer}.
*
* @return the default numeric {@link Featurizer}
*/
public static Featurizer getNumericFeaturizer() {
return getNumericFeaturizer(false);
}
/**
* Returns the default numeric {@link Featurizer}.
*
* @param normalize true to normalize (with mean and std) the values
* @return the default numeric {@link Featurizer}
*/
public static Featurizer getNumericFeaturizer(boolean normalize) {
if (normalize) {
return new NormalizedNumericFeaturizer();
} else {
return NUMERIC_FEATURIZER;
}
}
/**
* Returns the default String {@link Featurizer}.
*
* @return the default String {@link Featurizer}
*/
public static Featurizer getStringFeaturizer() {
return getStringFeaturizer(true);
}
/**
* Returns the default String {@link Featurizer}.
*
* @param onehotEncode true to use onehot encoding
* @return the default String {@link Featurizer}
*/
public static Featurizer getStringFeaturizer(boolean onehotEncode) {
if (onehotEncode) {
return new PreparedOneHotStringFeaturizer();
} else {
return new StringFeaturizer();
}
}
/**
* Returns a new instance of String {@link Featurizer}.
*
* @param map a map contains categorical value maps to index
* @param onehotEncode true to use onehot encoding
* @return a new instance of String {@link Featurizer}
*/
public static Featurizer getStringFeaturizer(Map<String, Integer> map, boolean onehotEncode) {
if (onehotEncode) {
return new OneHotStringFeaturizer(map);
} else {
return new StringFeaturizer(map);
}
}
/**
* Constructs an {@link EpochDayFeaturizer} for representing dates using the epoch day (number
* of days since 1970-01-01).
*
* @param datePattern the pattern that dates are found in the data table column
* @return a new instance of {@link EpochDayFeaturizer}
*/
public static Featurizer getEpochDayFeaturizer(String datePattern) {
return new EpochDayFeaturizer(datePattern);
}
private static final class NumericFeaturizer implements Featurizer {
/** {@inheritDoc} */
@Override
public void featurize(DynamicBuffer buf, String input) {
buf.put(Float.parseFloat(input));
}
/** {@inheritDoc} */
@Override
public int dataRequired() {
return 1;
}
/** {@inheritDoc} */
@Override
public Object deFeaturize(float[] data) {
return data[0];
}
}
private static final class NormalizedNumericFeaturizer implements PreparedFeaturizer {
private float mean;
private float std;
/** {@inheritDoc} */
@Override
public void featurize(DynamicBuffer buf, String input) {
float value = (Float.parseFloat(input) - mean) / std;
buf.put(value);
}
/** {@inheritDoc} */
@Override
public void prepare(List<String> inputs) {
calculateMean(inputs);
calculateStd(inputs);
}
private void calculateMean(List<String> inputs) {
double sum = 0;
for (String input : inputs) {
sum += Float.parseFloat(input);
}
mean = (float) (sum / inputs.size());
}
private void calculateStd(List<String> inputs) {
double sum = 0;
for (String input : inputs) {
sum += Math.pow(Float.parseFloat(input) - mean, 2);
}
std = (float) Math.sqrt(sum / inputs.size());
}
/** {@inheritDoc} */
@Override
public int dataRequired() {
return 1;
}
/** {@inheritDoc} */
@Override
public Object deFeaturize(float[] data) {
return data[0];
}
}
private abstract static class BaseStringFeaturizer implements Featurizer {
protected Map<String, Integer> map;
protected List<String> classNames;
public BaseStringFeaturizer(Map<String, Integer> map) {
this.map = map;
if (map != null) {
buildClassNames();
}
}
/** {@inheritDoc} */
@Override
public int dataRequired() {
return map.size();
}
/** {@inheritDoc} */
@Override
public Object deFeaturize(float[] data) {
List<Double> probabilities = new ArrayList<>(data.length);
for (Float d : data) {
probabilities.add((double) d);
}
return new Classifications(classNames, probabilities);
}
protected final void buildClassNames() {
classNames = Arrays.asList(new String[map.size()]);
for (Map.Entry<String, Integer> entry : map.entrySet()) {
classNames.set(entry.getValue(), entry.getKey());
}
}
}
private static class OneHotStringFeaturizer extends BaseStringFeaturizer {
public OneHotStringFeaturizer(Map<String, Integer> map) {
super(map);
}
/** {@inheritDoc} */
@Override
public void featurize(DynamicBuffer buf, String input) {
for (int i = 0; i < map.size(); ++i) {
buf.put(i == map.get(input) ? 1 : 0);
}
}
}
private static final class PreparedOneHotStringFeaturizer extends OneHotStringFeaturizer
implements PreparedFeaturizer {
public PreparedOneHotStringFeaturizer() {
super(null);
}
/** {@inheritDoc} */
@Override
public void prepare(List<String> inputs) {
map = new ConcurrentHashMap<>();
TreeSet<String> uniqueInputs = new TreeSet<>(inputs);
for (String input : uniqueInputs) {
if (!map.containsKey(input)) {
map.put(input, map.size());
}
}
buildClassNames();
}
}
private static final class StringFeaturizer extends BaseStringFeaturizer {
private boolean autoMap;
StringFeaturizer() {
super(new HashMap<>());
this.autoMap = true;
}
StringFeaturizer(Map<String, Integer> map) {
super(map);
}
/** {@inheritDoc} */
@Override
public void featurize(DynamicBuffer buf, String input) {
Integer index = map.get(input);
if (index != null) {
buf.put(index);
return;
}
if (!autoMap) {
throw new IllegalArgumentException("Value: " + input + " not found in the map.");
}
int value = map.size();
map.put(input, value);
buf.put(value);
}
/** {@inheritDoc} */
@Override
public Object deFeaturize(float[] data) {
if (classNames.size() != map.size()) {
// May have to rebuild class names first if new ones were added
buildClassNames();
}
return super.deFeaturize(data);
}
}
/**
* A featurizer implemented for feature of date type using epoch day (number of days since
* 1970-01-01).
*/
private static final class EpochDayFeaturizer implements Featurizer {
String datePattern;
/**
* Constructs a {@code EpochDayFeaturizer}.
*
* @param datePattern the pattern that dates are found in the data table column
*/
EpochDayFeaturizer(String datePattern) {
this.datePattern = datePattern;
}
/**
* Featurize the feature of date type to epoch day (the number of days passed since
* 1970-01-01) and put it into float buffer, so that it can be used for future training in a
* simple way.
*
* @param buf the float buffer to be filled
* @param input the date string in the format {@code yyyy-MM-dd}
*/
@Override
public void featurize(DynamicBuffer buf, String input) {
LocalDate ld = LocalDate.parse(input, DateTimeFormatter.ofPattern(datePattern));
long day = ld.toEpochDay();
buf.put(day);
}
/** {@inheritDoc} */
@Override
public int dataRequired() {
return 1;
}
/** {@inheritDoc} */
@Override
public Object deFeaturize(float[] data) {
return LocalDate.ofEpochDay(Math.round(data[0]));
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular/utils/PreparedFeaturizer.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.tabular.utils;
import java.util.List;
/** A {@link Featurizer} that must be prepared with the possible feature values before use. */
public interface PreparedFeaturizer extends Featurizer {
/**
* Prepares the featurizer with the list of possible inputs.
*
* @param inputs the possible inputs
*/
void prepare(List<String> inputs);
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/tabular/utils/package-info.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains utilities used within datasets that are {@link ai.djl.Application.Tabular}. */
package ai.djl.basicdataset.tabular.utils;
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/utils/FixedBucketSampler.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.utils;
import ai.djl.basicdataset.nlp.TextDataset;
import ai.djl.training.dataset.RandomAccessDataset;
import ai.djl.training.dataset.Sampler;
import ai.djl.util.RandomUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
/**
* {@code FixedBucketSampler} is a {@code Sampler} to be used with {@link TextDataset}, and {@link
* ai.djl.translate.PaddingStackBatchifier}. It groups text data of same length, and samples them
* together so that the amount of padding required is minimised. It also makes sure that the
* sampling is random across epochs.
*/
public class FixedBucketSampler implements Sampler {
private static final Logger logger = LoggerFactory.getLogger(FixedBucketSampler.class);
private int numBuckets;
private int batchSize;
private boolean shuffle;
/**
* Constructs a new instance of {@link FixedBucketSampler} with the given number of buckets, and
* the given batch size.
*
* @param batchSize the batch size
* @param numBuckets the number of buckets
* @param shuffle whether to shuffle data randomly while sampling
*/
public FixedBucketSampler(int batchSize, int numBuckets, boolean shuffle) {
this.numBuckets = numBuckets;
this.batchSize = batchSize;
this.shuffle = shuffle;
if (batchSize == 1) {
logger.warn("FixedBucketSampler is not meaningful with batch size 1.");
}
}
/**
* Constructs a new instance of {@link FixedBucketSampler} with the given number of buckets, and
* the given batch size.
*
* @param batchSize the batch size
* @param numBuckets the number of buckets
*/
public FixedBucketSampler(int batchSize, int numBuckets) {
this(batchSize, numBuckets, true);
}
/**
* Constructs a new instance of {@link FixedBucketSampler} with the given number of buckets, and
* the given batch size.
*
* @param batchSize the batch size
*/
public FixedBucketSampler(int batchSize) {
this(batchSize, 10);
}
/** {@inheritDoc} */
@Override
public Iterator<List<Long>> sample(RandomAccessDataset dataset) {
if (!(dataset instanceof TextDataset)) {
throw new IllegalArgumentException(
"FixedBucketSampler can only be used with TextDataset");
}
return new Iterate((TextDataset) dataset);
}
/** {@inheritDoc} */
@Override
public int getBatchSize() {
return batchSize;
}
private class Iterate implements Iterator<List<Long>> {
private List<List<TextDataset.Sample>> buckets;
private List<int[]> bucketBatch;
private int current;
public Iterate(TextDataset dataset) {
buckets = new ArrayList<>(numBuckets);
bucketBatch = new ArrayList<>();
List<TextDataset.Sample> samples = dataset.getSamples();
int min = samples.get(0).getSentenceLength();
int max = samples.get(samples.size() - 1).getSentenceLength();
int step = Math.max((1 + max - min) / numBuckets, 1);
Set<Integer> set = new HashSet<>(numBuckets);
for (int i = 0; i < numBuckets; ++i) {
set.add(Math.max(max - (numBuckets - i - 1) * step, min));
}
int[] bucketKeys = set.stream().mapToInt(Integer::intValue).toArray();
int index = 0;
List<TextDataset.Sample> list = new ArrayList<>();
for (TextDataset.Sample sample : samples) {
if (sample.getSentenceLength() > bucketKeys[index]) {
if (!list.isEmpty()) {
buckets.add(list);
list = new ArrayList<>();
}
++index;
}
list.add(sample);
}
if (!list.isEmpty()) {
buckets.add(list);
}
for (int i = 0; i < buckets.size(); ++i) {
List<TextDataset.Sample> bucket = buckets.get(i);
for (int j = 0; j < bucket.size(); j += batchSize) {
bucketBatch.add(new int[] {i, j});
}
}
if (shuffle) {
Collections.shuffle(bucketBatch, RandomUtils.RANDOM);
buckets.forEach(l -> Collections.shuffle(l, RandomUtils.RANDOM));
}
}
/** {@inheritDoc} */
@Override
public boolean hasNext() {
return current < bucketBatch.size();
}
/** {@inheritDoc} */
@Override
public List<Long> next() {
int[] batch = bucketBatch.get(current);
List<Long> ret = new ArrayList<>();
List<TextDataset.Sample> bucket = buckets.get(batch[0]);
int end = Math.min(bucket.size(), batch[1] + batchSize);
for (int i = batch[1]; i < end; ++i) {
ret.add(bucket.get(i).getIndex());
}
current++;
return ret;
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/utils/TextData.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.utils;
import ai.djl.basicdataset.nlp.TextDataset;
import ai.djl.modality.nlp.DefaultVocabulary;
import ai.djl.modality.nlp.Vocabulary;
import ai.djl.modality.nlp.embedding.EmbeddingException;
import ai.djl.modality.nlp.embedding.TextEmbedding;
import ai.djl.modality.nlp.embedding.TrainableTextEmbedding;
import ai.djl.modality.nlp.embedding.TrainableWordEmbedding;
import ai.djl.modality.nlp.preprocess.LowerCaseConvertor;
import ai.djl.modality.nlp.preprocess.PunctuationSeparator;
import ai.djl.modality.nlp.preprocess.SimpleTokenizer;
import ai.djl.modality.nlp.preprocess.TextProcessor;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.nn.AbstractBlock;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
/**
* {@link TextData} is a utility for managing textual data within a {@link
* ai.djl.training.dataset.Dataset}.
*
* <p>See {@link TextDataset} for an example.
*/
public class TextData {
private List<NDArray> textEmbeddingList;
private List<String> rawText;
private List<TextProcessor> textProcessors;
private List<String> reservedTokens;
private TextEmbedding textEmbedding;
private Vocabulary vocabulary;
private String unknownToken;
private int embeddingSize;
private int size;
/**
* Constructs a new {@link TextData}.
*
* @param config the configuration for the {@link TextData}
*/
public TextData(Configuration config) {
this.textProcessors = config.textProcessors;
this.textEmbedding = config.textEmbedding;
this.vocabulary = config.vocabulary;
this.embeddingSize = config.embeddingSize;
this.unknownToken = config.unknownToken;
this.reservedTokens = config.reservedTokens;
}
/**
* Returns a good default {@link Configuration} to use for the constructor with defaults.
*
* @return a good default {@link Configuration} to use for the constructor with defaults
*/
public static Configuration getDefaultConfiguration() {
List<TextProcessor> defaultTextProcessors =
Arrays.asList(
new SimpleTokenizer(),
new LowerCaseConvertor(Locale.ENGLISH),
new PunctuationSeparator());
return new TextData.Configuration()
.setEmbeddingSize(15)
.setTextProcessors(defaultTextProcessors)
.setUnknownToken("<unk>")
.setReservedTokens(Arrays.asList("<bos>", "<eos>", "<pad>"));
}
/**
* Preprocess the textData into {@link NDArray} by providing the data from the dataset.
*
* @param manager the
* @param newTextData the data from the dataset
* @throws EmbeddingException if there is an error while embedding input
*/
public void preprocess(NDManager manager, List<String> newTextData) throws EmbeddingException {
rawText = newTextData;
List<List<String>> textData = new ArrayList<>();
for (String textDatum : newTextData) {
List<String> tokens = Collections.singletonList(textDatum);
for (TextProcessor processor : textProcessors) {
tokens = processor.preprocess(tokens);
}
textData.add(tokens);
}
if (vocabulary == null) {
DefaultVocabulary.Builder vocabularyBuilder = DefaultVocabulary.builder();
vocabularyBuilder
.optMinFrequency(3)
.optReservedTokens(reservedTokens)
.optUnknownToken(unknownToken);
for (List<String> tokens : textData) {
vocabularyBuilder.add(tokens);
}
vocabulary = vocabularyBuilder.build();
}
if (textEmbedding == null) {
textEmbedding =
new TrainableTextEmbedding(
new TrainableWordEmbedding(vocabulary, embeddingSize));
}
size = textData.size();
textEmbeddingList = new ArrayList<>();
for (int i = 0; i < size; i++) {
List<String> tokenizedTextDatum = textData.get(i);
for (int j = 0; j < tokenizedTextDatum.size(); j++) {
tokenizedTextDatum.set(
j, vocabulary.getToken(vocabulary.getIndex(tokenizedTextDatum.get(j))));
}
textData.set(i, tokenizedTextDatum);
if (textEmbedding instanceof AbstractBlock) {
textEmbeddingList.add(
manager.create(textEmbedding.preprocessTextToEmbed(tokenizedTextDatum)));
} else {
textEmbeddingList.add(textEmbedding.embedText(manager, tokenizedTextDatum));
}
}
}
/**
* Sets the text processors.
*
* @param textProcessors the new textProcessors
*/
public void setTextProcessors(List<TextProcessor> textProcessors) {
this.textProcessors = textProcessors;
}
/**
* Sets the textEmbedding to embed the data with.
*
* @param textEmbedding the textEmbedding
*/
public void setTextEmbedding(TextEmbedding textEmbedding) {
this.textEmbedding = textEmbedding;
}
/**
* Gets the {@link TextEmbedding} used to embed the data with.
*
* @return the {@link TextEmbedding}
*/
public TextEmbedding getTextEmbedding() {
return textEmbedding;
}
/**
* Sets the embedding size.
*
* @param embeddingSize the embedding size
*/
public void setEmbeddingSize(int embeddingSize) {
this.embeddingSize = embeddingSize;
}
/**
* Gets the {@link DefaultVocabulary} built while preprocessing the text data.
*
* @return the {@link DefaultVocabulary}
*/
public Vocabulary getVocabulary() {
if (vocabulary == null) {
throw new IllegalStateException(
"This method must be called after preprocess is called on this object");
}
return vocabulary;
}
/**
* Gets the text embedding for the given index of the text input.
*
* @param manager the manager for the embedding array
* @param index the index of the text input
* @return the {@link NDArray} containing the text embedding
*/
public NDArray getEmbedding(NDManager manager, long index) {
NDArray embedding = textEmbeddingList.get(Math.toIntExact(index)).duplicate();
embedding.attach(manager);
return embedding;
}
/**
* Gets the raw textual input.
*
* @param index the index of the text input
* @return the raw text
*/
public String getRawText(long index) {
return rawText.get(Math.toIntExact(index));
}
/**
* Gets the textual input after preprocessing.
*
* @param index the index of the text input
* @return the list of processed tokens
*/
public List<String> getProcessedText(long index) {
List<String> tokens = Collections.singletonList(getRawText(index));
for (TextProcessor processor : textProcessors) {
tokens = processor.preprocess(tokens);
}
return tokens;
}
/**
* Returns the size of the data.
*
* @return the size of the data
*/
public int getSize() {
return size;
}
/**
* The configuration for creating a {@link TextData} value in a {@link
* ai.djl.training.dataset.Dataset}.
*/
public static final class Configuration {
private List<TextProcessor> textProcessors;
private TextEmbedding textEmbedding;
private Vocabulary vocabulary;
private Integer embeddingSize;
private String unknownToken;
private List<String> reservedTokens;
/**
* Sets the {@link TextProcessor}s to use for the text data.
*
* @param textProcessors the {@link TextProcessor}s
* @return this configuration
*/
public Configuration setTextProcessors(List<TextProcessor> textProcessors) {
this.textProcessors = textProcessors;
return this;
}
/**
* Sets the {@link TextEmbedding} to use to embed the text data.
*
* @param textEmbedding the {@link TextEmbedding}
* @return this configuration
*/
public Configuration setTextEmbedding(TextEmbedding textEmbedding) {
this.textEmbedding = textEmbedding;
return this;
}
/**
* Sets the {@link Vocabulary} to use to hold the text data.
*
* @param vocabulary the {@link Vocabulary}
* @return this configuration
*/
public Configuration setVocabulary(Vocabulary vocabulary) {
this.vocabulary = vocabulary;
return this;
}
/**
* Sets the size for new {@link TextEmbedding}s.
*
* @param embeddingSize the embedding size
* @return this configuration
*/
public Configuration setEmbeddingSize(int embeddingSize) {
this.embeddingSize = embeddingSize;
return this;
}
/**
* Sets the default unknown token.
*
* @param unknownToken the {@link String} value of unknown token
* @return this configuration
*/
public Configuration setUnknownToken(String unknownToken) {
this.unknownToken = unknownToken;
return this;
}
/**
* Sets the list of reserved tokens.
*
* @param reservedTokens true to train the text embedding
* @return this configuration
*/
public Configuration setReservedTokens(List<String> reservedTokens) {
this.reservedTokens = reservedTokens;
return this;
}
/**
* Updates this {@link Configuration} with the non-null values from another configuration.
*
* @param other the other configuration to use to update this
* @return this configuration after updating
*/
public Configuration update(Configuration other) {
textProcessors = other.textProcessors != null ? other.textProcessors : textProcessors;
textEmbedding = other.textEmbedding != null ? other.textEmbedding : textEmbedding;
vocabulary = other.vocabulary != null ? other.vocabulary : vocabulary;
embeddingSize = other.embeddingSize != null ? other.embeddingSize : embeddingSize;
unknownToken = other.unknownToken != null ? other.unknownToken : unknownToken;
reservedTokens = other.reservedTokens != null ? other.reservedTokens : reservedTokens;
return this;
}
}
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/utils/ThrowingFunction.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.basicdataset.utils;
/**
* Represents a function that accepts one argument, produces a result, and throws an Exception.
*
* <p>This is a <a href="package-summary.html">functional interface</a> whose functional method is
* {@link #apply(Object)}.
*
* @param <T> the type of the input to the function
* @param <R> the type of the result of the function
* @param <E> the type of the Exception that can be thrown
*/
@FunctionalInterface
public interface ThrowingFunction<T, R, E extends Exception> {
/**
* Applies this function to the given argument.
*
* @param t the function argument
* @return the function result
* @throws E Throws Exception E
*/
R apply(T t) throws E;
}
|
0
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset
|
java-sources/ai/djl/basicdataset/0.34.0/ai/djl/basicdataset/utils/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains utilities used within the basic datasets. */
package ai.djl.basicdataset.utils;
|
0
|
java-sources/ai/djl/djl-zero/0.34.0/ai/djl
|
java-sources/ai/djl/djl-zero/0.34.0/ai/djl/zero/Performance.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.zero;
/**
* Describes the speed/accuracy tradeoff.
*
* <p>In deep learning, it is often possible to improve the accuracy of a model by using a larger
* model. However, this then results in slower latency and worse throughput. So, there is a tradeoff
* between the choices of speed and accuracy.
*/
public enum Performance {
/** Fast prioritizes speed over accuracy. */
FAST,
/** Balanced has a more even tradeoff of speed and accuracy. */
BALANCED,
/** Accurate prioritizes accuracy over speed. */
ACCURATE;
/**
* Returns the value matching this.
*
* @param fast the value to return if this is fast
* @param balanced the value to return if this is balanced
* @param accurate the value to return if this is accurate
* @param <T> the value type
* @return the value matching this
*/
public <T> T switchPerformance(T fast, T balanced, T accurate) {
switch (this) {
case FAST:
return fast;
case BALANCED:
return balanced;
case ACCURATE:
return accurate;
default:
throw new IllegalArgumentException("Unknown performance");
}
}
}
|
0
|
java-sources/ai/djl/djl-zero/0.34.0/ai/djl
|
java-sources/ai/djl/djl-zero/0.34.0/ai/djl/zero/RequireZoo.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.zero;
import ai.djl.engine.Engine;
import ai.djl.repository.zoo.ModelZoo;
/**
* A set of utilities for requiring a {@link ModelZoo}.
*
* <p>Throws an exception if the {@link ModelZoo} is not available.
*/
public final class RequireZoo {
private RequireZoo() {}
/** Requires {@code ai.djl.basicmodelzoo.BasicModelZoo}. */
public static void basic() {
if (!ModelZoo.hasModelZoo("ai.djl.zoo")) {
throw new IllegalStateException(
"The basic model zoo is required, but not found.Please install it by following"
+ " https://docs.djl.ai/model-zoo/index.html#installation");
}
}
/** Requires {@code ai.djl.mxnet.zoo.MxModelZoo}. */
public static void mxnet() {
if (!ModelZoo.hasModelZoo("ai.djl.mxnet")) {
throw new IllegalStateException(
"The MXNet model zoo is required, but not found.Please install it by following"
+ " https://docs.djl.ai/master/engines/mxnet/mxnet-model-zoo/index.html#installation");
}
if (!Engine.hasEngine("MXNet")) {
throw new IllegalStateException(
"The MXNet engine is required, but not found.Please install it by following"
+ " https://docs.djl.ai/master/engines/mxnet/mxnet-engine/index.html#installation");
}
}
/** Requires {@code ai.djl.pytorch.zoo.PtModelZoo}. */
public static void pytorch() {
if (!ModelZoo.hasModelZoo("ai.djl.pytorch")) {
throw new IllegalStateException(
"The PyTorch model zoo is required, but not found.Please install it by"
+ " following"
+ " https://docs.djl.ai/master/pytorch/pytorch-model-zoo/index.html#installation");
}
if (!Engine.hasEngine("PyTorch")) {
throw new IllegalStateException(
"The PyTorch engine is required, but not found.Please install it by following"
+ " https://docs.djl.ai/master/pytorch/pytorch-engine/index.html#installation");
}
}
/** Requires {@code ai.djl.tensorflow.zoo.TfModelZoo}. */
public static void tensorflow() {
if (!ModelZoo.hasModelZoo("ai.djl.tensorflow")) {
throw new IllegalStateException(
"The TensorFlow model zoo is required, but not found.Please install it by"
+ " following"
+ " https://docs.djl.ai/master/engines/tensorflow/tensorflow-model-zoo/index.html#installation");
}
if (!Engine.hasEngine("TensorFlow")) {
throw new IllegalStateException(
"The TensorFlow engine is required, but not found.Please install it by"
+ " following"
+ " https://docs.djl.ai/master/engines/tensorflow/tensorflow-engine/index.html#installation");
}
}
}
|
0
|
java-sources/ai/djl/djl-zero/0.34.0/ai/djl
|
java-sources/ai/djl/djl-zero/0.34.0/ai/djl/zero/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains a zero deep learning knowledge required wrapper over DJL.
*
* <p><a href="https://docs.djl.ai/master/zero/index.html">See more details</a>.
*/
package ai.djl.zero;
|
0
|
java-sources/ai/djl/djl-zero/0.34.0/ai/djl/zero
|
java-sources/ai/djl/djl-zero/0.34.0/ai/djl/zero/cv/ImageClassification.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.zero.cv;
import ai.djl.Application.CV;
import ai.djl.MalformedModelException;
import ai.djl.Model;
import ai.djl.basicdataset.cv.classification.ImageClassificationDataset;
import ai.djl.basicdataset.cv.classification.ImageNet;
import ai.djl.basicdataset.cv.classification.Mnist;
import ai.djl.basicmodelzoo.cv.classification.MobileNetV2;
import ai.djl.basicmodelzoo.cv.classification.ResNetV1;
import ai.djl.modality.Classifications;
import ai.djl.modality.cv.Image;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Block;
import ai.djl.repository.zoo.Criteria;
import ai.djl.repository.zoo.ModelNotFoundException;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.training.DefaultTrainingConfig;
import ai.djl.training.EasyTrain;
import ai.djl.training.Trainer;
import ai.djl.training.TrainingConfig;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.evaluator.Accuracy;
import ai.djl.training.listener.TrainingListener;
import ai.djl.training.loss.Loss;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import ai.djl.zero.Performance;
import ai.djl.zero.RequireZoo;
import java.io.IOException;
import java.util.List;
/** ImageClassification takes an image and classifies the main subject of the image. */
public final class ImageClassification {
private ImageClassification() {}
/**
* Returns a pretrained and ready to use image classification model from our model zoo.
*
* @param input the input class between {@link ai.djl.modality.cv.Image}, {@link
* java.nio.file.Path}, {@link java.net.URL}, and {@link java.io.InputStream}
* @param classes what {@link Classes} the image is classified into
* @param performance the performance tradeoff (see {@link Performance}
* @param <I> the input type
* @return the model as a {@link ZooModel} with the {@link Translator} included
* @throws MalformedModelException if the model zoo model is broken
* @throws ModelNotFoundException if the model could not be found
* @throws IOException if the model could not be loaded
*/
public static <I> ZooModel<I, Classifications> pretrained(
Class<I> input, Classes classes, Performance performance)
throws MalformedModelException, ModelNotFoundException, IOException {
Criteria.Builder<I, Classifications> criteria =
Criteria.builder()
.setTypes(input, Classifications.class)
.optApplication(CV.IMAGE_CLASSIFICATION);
switch (classes) {
case IMAGENET:
RequireZoo.mxnet();
String layers = performance.switchPerformance("18", "50", "152");
criteria.optGroupId("ai.djl.mxnet")
.optArtifactId("resnet")
.optFilter("dataset", "imagenet")
.optFilter("layers", layers);
break;
case DIGITS:
RequireZoo.basic();
criteria.optGroupId("ai.djl.zoo")
.optArtifactId("mlp")
.optFilter("dataset", "mnist");
break;
default:
throw new IllegalArgumentException("Unknown classes");
}
return criteria.build().loadModel();
}
/**
* Trains the recommended image classification model on a custom dataset.
*
* <p>In order to train on a custom dataset, you must create a custom {@link
* ImageClassificationDataset} to load your data.
*
* @param dataset the data to train with
* @param performance to determine the desired model tradeoffs
* @return the model as a {@link ZooModel} with the {@link Translator} included
* @throws IOException if the dataset could not be loaded
* @throws TranslateException if the translator has errors
*/
public static ZooModel<Image, Classifications> train(
ImageClassificationDataset dataset, Performance performance)
throws IOException, TranslateException {
int channels = dataset.getImageChannels();
int width =
dataset.getImageWidth()
.orElseThrow(
() ->
new IllegalArgumentException(
"The dataset must have a fixed image width"));
int height =
dataset.getImageHeight()
.orElseThrow(
() ->
new IllegalArgumentException(
"The dataset must have a fixed image height"));
Shape imageShape = new Shape(channels, height, width);
List<String> classes = dataset.getClasses();
Dataset[] splitDataset = dataset.randomSplit(8, 2);
Dataset trainDataset = splitDataset[0];
Dataset validateDataset = splitDataset[1];
// Determine the layers based on performance
int numLayers = performance.switchPerformance(18, 50, 152);
Block block;
if (performance.equals(Performance.FAST)) {
// for small and fast cases, build a MobileNetV2
block = MobileNetV2.builder().setOutSize(classes.size()).build();
} else {
// for large cases, build a ResNet
block =
ResNetV1.builder()
.setImageShape(imageShape)
.setNumLayers(numLayers)
.setOutSize(classes.size())
.build();
}
Model model = Model.newInstance("ImageClassification");
model.setBlock(block);
TrainingConfig trainingConfig =
new DefaultTrainingConfig(Loss.softmaxCrossEntropyLoss())
.addEvaluator(new Accuracy())
.addTrainingListeners(TrainingListener.Defaults.basic());
try (Trainer trainer = model.newTrainer(trainingConfig)) {
trainer.initialize(new Shape(1).addAll(imageShape));
EasyTrain.fit(trainer, 35, trainDataset, validateDataset);
}
Translator<Image, Classifications> translator =
dataset.matchingTranslatorOptions().option(Image.class, Classifications.class);
return new ZooModel<>(model, translator);
}
/**
* The possible classes to classify the images into.
*
* <p>The classes available depends on the data that the model was trained with.
*/
public enum Classes {
/**
* Imagenet is a standard dataset of 1000 diverse classes.
*
* <p>The dataset can be found at {@link ImageNet}. You can <a
* href="https://djl-ai.s3.amazonaws.com/mlrepo/model/cv/image_classification/ai/djl/mxnet/synset.txt">view
* the list of classes here</a>.
*/
IMAGENET,
/**
* Classify images of the digits 0-9.
*
* <p>This contains models trained using the {@link Mnist} dataset.
*/
DIGITS
}
}
|
0
|
java-sources/ai/djl/djl-zero/0.34.0/ai/djl/zero
|
java-sources/ai/djl/djl-zero/0.34.0/ai/djl/zero/cv/ObjectDetection.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.zero.cv;
import ai.djl.Model;
import ai.djl.basicdataset.cv.ObjectDetectionDataset;
import ai.djl.basicmodelzoo.cv.object_detection.ssd.SingleShotDetection;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.modality.cv.translator.SingleShotDetectionTranslator;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Block;
import ai.djl.nn.SequentialBlock;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.training.DefaultTrainingConfig;
import ai.djl.training.EasyTrain;
import ai.djl.training.Trainer;
import ai.djl.training.TrainingConfig;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.evaluator.BoundingBoxError;
import ai.djl.training.evaluator.SingleShotDetectionAccuracy;
import ai.djl.training.listener.TrainingListener;
import ai.djl.training.loss.SingleShotDetectionLoss;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import ai.djl.zero.Performance;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/** ObjectDetection takes an image and extract one or more main subjects in the image. */
public final class ObjectDetection {
private ObjectDetection() {}
/**
* Trains the recommended object detection model on a custom dataset. Currently, trains a
* SingleShotDetection Model.
*
* <p>In order to train on a custom dataset, you must create a custom {@link
* ObjectDetectionDataset} to load your data.
*
* @param dataset the data to train with
* @param performance to determine the desired model tradeoffs
* @return the model as a {@link ZooModel} with the {@link Translator} included
* @throws IOException if the dataset could not be loaded
* @throws TranslateException if the translator has errors
*/
public static ZooModel<Image, DetectedObjects> train(
ObjectDetectionDataset dataset, Performance performance)
throws IOException, TranslateException {
List<String> classes = dataset.getClasses();
int channels = dataset.getImageChannels();
int width =
dataset.getImageWidth()
.orElseThrow(
() ->
new IllegalArgumentException(
"The dataset must have a fixed image width"));
int height =
dataset.getImageHeight()
.orElseThrow(
() ->
new IllegalArgumentException(
"The dataset must have a fixed image height"));
Shape imageShape = new Shape(channels, height, width);
Dataset[] splitDataset = dataset.randomSplit(8, 2);
Dataset trainDataset = splitDataset[0];
Dataset validateDataset = splitDataset[1];
Block block = getSsdTrainBlock(classes.size());
Model model = Model.newInstance("ObjectDetection");
model.setBlock(block);
TrainingConfig trainingConfig =
new DefaultTrainingConfig(new SingleShotDetectionLoss())
.addEvaluator(new SingleShotDetectionAccuracy("classAccuracy"))
.addEvaluator(new BoundingBoxError("boundingBoxError"))
.addTrainingListeners(TrainingListener.Defaults.basic());
try (Trainer trainer = model.newTrainer(trainingConfig)) {
trainer.initialize(new Shape(1).addAll(imageShape));
EasyTrain.fit(trainer, 50, trainDataset, validateDataset);
}
Translator<Image, DetectedObjects> translator =
SingleShotDetectionTranslator.builder()
.addTransform(new ToTensor())
.optSynset(classes)
.optThreshold(0.6f)
.build();
return new ZooModel<>(model, translator);
}
private static Block getSsdTrainBlock(int numClasses) {
int[] numFilters = {16, 32, 64};
SequentialBlock baseBlock = new SequentialBlock();
for (int numFilter : numFilters) {
baseBlock.add(SingleShotDetection.getDownSamplingBlock(numFilter));
}
List<List<Float>> sizes = new ArrayList<>();
List<List<Float>> ratios = new ArrayList<>();
for (int i = 0; i < 5; i++) {
ratios.add(Arrays.asList(1f, 2f, 0.5f));
}
sizes.add(Arrays.asList(0.2f, 0.272f));
sizes.add(Arrays.asList(0.37f, 0.447f));
sizes.add(Arrays.asList(0.54f, 0.619f));
sizes.add(Arrays.asList(0.71f, 0.79f));
sizes.add(Arrays.asList(0.88f, 0.961f));
return SingleShotDetection.builder()
.setNumClasses(numClasses)
.setNumFeatures(3)
.optGlobalPool(true)
.setRatios(ratios)
.setSizes(sizes)
.setBaseNetwork(baseBlock)
.build();
}
}
|
0
|
java-sources/ai/djl/djl-zero/0.34.0/ai/djl/zero
|
java-sources/ai/djl/djl-zero/0.34.0/ai/djl/zero/cv/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains pretrained models and training for Computer Vision({@link ai.djl.Application.CV}) tasks.
*/
package ai.djl.zero.cv;
|
0
|
java-sources/ai/djl/djl-zero/0.34.0/ai/djl/zero
|
java-sources/ai/djl/djl-zero/0.34.0/ai/djl/zero/tabular/TabularRegression.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.zero.tabular;
import ai.djl.Model;
import ai.djl.basicdataset.tabular.ListFeatures;
import ai.djl.basicdataset.tabular.TabularDataset;
import ai.djl.basicmodelzoo.tabular.TabNet;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Block;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.training.DefaultTrainingConfig;
import ai.djl.training.EasyTrain;
import ai.djl.training.Trainer;
import ai.djl.training.TrainingConfig;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.listener.TrainingListener;
import ai.djl.training.loss.TabNetRegressionLoss;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import ai.djl.zero.Performance;
import java.io.IOException;
/** TabularRegression takes a NDList as input and output an NDList (for supervised learning). */
public final class TabularRegression {
private TabularRegression() {}
/**
* Trains a Model on a custom dataset. Currently, trains a TabNet Model.
*
* <p>In order to train on a custom dataset, you must create a custom {@link TabularDataset} to
* load your data.
*
* @param dataset the data to train with
* @param performance to determine the desired model tradeoffs
* @return the model as a {@link ZooModel}
* @throws IOException if the dataset could not be loaded
* @throws TranslateException if the translator has errors
*/
public static ZooModel<ListFeatures, Float> train(
TabularDataset dataset, Performance performance)
throws IOException, TranslateException {
Dataset[] splitDataset = dataset.randomSplit(8, 2);
Dataset trainDataset = splitDataset[0];
Dataset validateDataset = splitDataset[1];
int featureSize = dataset.getFeatureSize();
int labelSize = dataset.getLabelSize();
Block block;
if (performance.equals(Performance.FAST)) {
// for fast cases, we set the number of independent layers and shared layers lower
block =
TabNet.builder()
.setInputDim(featureSize)
.setOutDim(labelSize)
.optNumIndependent(1)
.optNumShared(1)
.build();
} else if (performance.equals(Performance.BALANCED)) {
block = TabNet.builder().setInputDim(featureSize).setOutDim(labelSize).build();
} else {
// for accurate cases, we set the number of independent layers and shared layers higher
block =
TabNet.builder()
.setInputDim(featureSize)
.setOutDim(labelSize)
.optNumIndependent(4)
.optNumShared(4)
.build();
}
Model model = Model.newInstance("tabular");
model.setBlock(block);
TrainingConfig trainingConfig =
new DefaultTrainingConfig(new TabNetRegressionLoss())
.addTrainingListeners(TrainingListener.Defaults.basic());
try (Trainer trainer = model.newTrainer(trainingConfig)) {
trainer.initialize(new Shape(1, featureSize));
EasyTrain.fit(trainer, 20, trainDataset, validateDataset);
}
Translator<ListFeatures, Float> translator =
dataset.matchingTranslatorOptions().option(ListFeatures.class, Float.class);
return new ZooModel<>(model, translator);
}
}
|
0
|
java-sources/ai/djl/djl-zero/0.34.0/ai/djl/zero
|
java-sources/ai/djl/djl-zero/0.34.0/ai/djl/zero/tabular/package-info.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains training for Tabular({@link ai.djl.Application.Tabular}) tasks. */
package ai.djl.zero.tabular;
|
0
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr/engine/DlrEngine.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.dlr.engine;
import ai.djl.Device;
import ai.djl.Model;
import ai.djl.dlr.jni.JniUtils;
import ai.djl.dlr.jni.LibUtils;
import ai.djl.engine.Engine;
import ai.djl.engine.EngineException;
import ai.djl.ndarray.NDManager;
import ai.djl.nn.SymbolBlock;
import ai.djl.training.GradientCollector;
/**
* The {@code DlrEngine} is an implementation of the {@link Engine} based on the <a
* href="https://github.com/neo-ai/neo-ai-dlr">Neo DLR</a>.
*
* <p>To get an instance of the {@code DlrEngine} when it is not the default Engine, call {@link
* Engine#getEngine(String)} with the Engine name "DLR".
*/
public final class DlrEngine extends Engine {
public static final String ENGINE_NAME = "DLR";
static final int RANK = 10;
private Engine alternativeEngine;
private boolean initialized;
private DlrEngine() {}
static Engine newInstance() {
try {
LibUtils.loadLibrary();
return new DlrEngine();
} catch (Throwable t) {
throw new EngineException("Failed to load DLR native library", t);
}
}
/** {@inheritDoc} */
@Override
public Engine getAlternativeEngine() {
if (!initialized && !Boolean.getBoolean("ai.djl.dlr.disable_alternative")) {
Engine engine = Engine.getInstance();
if (engine.getRank() < getRank()) {
// alternativeEngine should not have the same rank as DLR
alternativeEngine = engine;
}
initialized = true;
}
return alternativeEngine;
}
/** {@inheritDoc} */
@Override
public String getEngineName() {
return ENGINE_NAME;
}
/** {@inheritDoc} */
@Override
public int getRank() {
return RANK;
}
/** {@inheritDoc} */
@Override
public String getVersion() {
return JniUtils.getDlrVersion();
}
/** {@inheritDoc} */
@Override
public boolean hasCapability(String capability) {
return false;
}
/** {@inheritDoc} */
@Override
public SymbolBlock newSymbolBlock(NDManager manager) {
throw new UnsupportedOperationException("DLR does not support empty SymbolBlock");
}
/** {@inheritDoc} */
@Override
public Model newModel(String name, Device device) {
// Only support CPU for now
if (device != null && device != Device.cpu()) {
throw new IllegalArgumentException("DLR only support CPU");
}
return new DlrModel(name, newBaseManager(Device.cpu()));
}
/** {@inheritDoc} */
@Override
public NDManager newBaseManager() {
return newBaseManager(null);
}
/** {@inheritDoc} */
@Override
public NDManager newBaseManager(Device device) {
return DlrNDManager.getSystemManager().newSubManager(device);
}
/** {@inheritDoc} */
@Override
public GradientCollector newGradientCollector() {
throw new UnsupportedOperationException("Not supported for DLR");
}
/** {@inheritDoc} */
@Override
public void setRandomSeed(int seed) {
throw new UnsupportedOperationException("Not supported for DLR");
}
}
|
0
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr/engine/DlrEngineProvider.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.dlr.engine;
import ai.djl.engine.Engine;
import ai.djl.engine.EngineProvider;
/** {@code DlrEngineProvider} is the DLR implementation of {@link EngineProvider}. */
public class DlrEngineProvider implements EngineProvider {
private static volatile Engine engine; // NOPMD
/** {@inheritDoc} */
@Override
public String getEngineName() {
return DlrEngine.ENGINE_NAME;
}
/** {@inheritDoc} */
@Override
public int getEngineRank() {
return DlrEngine.RANK;
}
/** {@inheritDoc} */
@Override
public Engine getEngine() {
if (engine == null) {
synchronized (this) {
engine = DlrEngine.newInstance();
}
}
return engine;
}
}
|
0
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr/engine/DlrModel.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.dlr.engine;
import ai.djl.BaseModel;
import ai.djl.Device;
import ai.djl.Model;
import ai.djl.inference.Predictor;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.translate.Translator;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Map;
/**
* {@code DlrModel} is the DLR implementation of {@link Model}.
*
* <p>OrtModel contains all the methods in Model to load and process a model. In addition, it
* provides DLR Specific functionality
*/
public class DlrModel extends BaseModel {
/**
* Constructs a new Model on a given device.
*
* @param name the model name
* @param manager the {@link NDManager} to holds the NDArray
*/
DlrModel(String name, NDManager manager) {
super(name);
this.manager = manager;
this.manager.setName("dlrModel");
// DLR only support float32
dataType = DataType.FLOAT32;
}
/** {@inheritDoc} */
@Override
public void load(Path modelPath, String prefix, Map<String, ?> options) throws IOException {
setModelDir(modelPath);
if (prefix == null) {
prefix = modelName;
}
if (block != null) {
throw new UnsupportedOperationException("DLR does not support dynamic blocks");
}
checkModelFiles(prefix);
}
/** {@inheritDoc} */
@Override
public <I, O> Predictor<I, O> newPredictor(Translator<I, O> translator, Device device) {
return new DlrPredictor<>(this, modelDir.toString(), device, translator);
}
private void checkModelFiles(String prefix) throws IOException {
String libExt;
String os = System.getProperty("os.name").toLowerCase();
if (os.startsWith("mac")) {
libExt = ".dylib";
} else if (os.startsWith("linux")) {
libExt = ".so";
} else if (os.startsWith("win")) {
libExt = ".dll";
} else {
throw new IllegalStateException("found unsupported os");
}
// TODO make the check platform independent
Path module = modelDir.resolve(prefix + libExt);
if (Files.notExists(module) || !Files.isRegularFile(module)) {
throw new FileNotFoundException("module file(.so/.dylib/.dll) is missing");
}
Path params = modelDir.resolve(prefix + ".params");
if (Files.notExists(params) || !Files.isRegularFile(module)) {
throw new FileNotFoundException("params file(.params) is missing");
}
Path graph = modelDir.resolve(prefix + ".json");
if (Files.notExists(graph) || !Files.isRegularFile(graph)) {
throw new FileNotFoundException("graph file(.json) is missing");
}
}
}
|
0
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr/engine/DlrNDArray.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.dlr.engine;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrayAdapter;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import java.nio.ByteBuffer;
import java.util.UUID;
/** {@code DlrNDArray} is the DLR implementation of {@link NDArray}. */
public class DlrNDArray extends NDArrayAdapter {
private ByteBuffer data;
/**
* Constructs an DLR NDArray from a {@link DlrNDManager} (internal. Use {@link NDManager}
* instead).
*
* @param manager the manager to attach the new array to
* @param alternativeManager the alternative manager to execute unsupported operation
* @param data the underlying data
* @param shape the shape of {@code DlrNDArray}
* @param dataType the {@link DataType} of the {@link NDArray}
*/
DlrNDArray(
DlrNDManager manager,
NDManager alternativeManager,
ByteBuffer data,
Shape shape,
DataType dataType) {
super(manager, alternativeManager, shape, dataType, UUID.randomUUID().toString());
this.data = data;
manager.attachInternal(uid, this);
}
/** {@inheritDoc} */
@Override
public void intern(NDArray replaced) {
this.data = ((DlrNDArray) replaced).data;
}
/** {@inheritDoc} */
@Override
public void detach() {
manager.detachInternal(getUid());
manager = DlrNDManager.getSystemManager();
}
/** {@inheritDoc} */
@Override
public ByteBuffer toByteBuffer() {
data.rewind();
return data;
}
}
|
0
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr/engine/DlrNDManager.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.dlr.engine;
import ai.djl.Device;
import ai.djl.engine.Engine;
import ai.djl.ndarray.BaseNDManager;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
/** {@code DlrNDManager} is the DLR implementation of {@link NDManager}. */
public class DlrNDManager extends BaseNDManager {
private static final DlrNDManager SYSTEM_MANAGER = new SystemManager();
private DlrNDManager(NDManager parent, Device device) {
super(parent, device);
}
static DlrNDManager getSystemManager() {
return SYSTEM_MANAGER;
}
/** {@inheritDoc} */
@Override
public final Engine getEngine() {
return Engine.getEngine(DlrEngine.ENGINE_NAME);
}
/** {@inheritDoc} */
@Override
public ByteBuffer allocateDirect(int capacity) {
return ByteBuffer.allocateDirect(capacity).order(ByteOrder.nativeOrder());
}
/** {@inheritDoc} */
@Override
public DlrNDArray from(NDArray array) {
if (array == null || array instanceof DlrNDArray) {
return (DlrNDArray) array;
}
return (DlrNDArray) create(array.toByteBuffer(), array.getShape(), array.getDataType());
}
/** {@inheritDoc} */
@Override
public DlrNDManager newSubManager(Device dev) {
DlrNDManager manager = new DlrNDManager(this, dev);
attachInternal(manager.uid, manager);
return manager;
}
/** {@inheritDoc} */
@Override
public NDArray create(Buffer data, Shape shape, DataType dataType) {
if (dataType != DataType.FLOAT32) {
if (data instanceof ByteBuffer) {
return new DlrNDArray(this, alternativeManager, (ByteBuffer) data, shape, dataType);
}
if (alternativeManager != null) {
return alternativeManager.create(data, shape, dataType);
}
throw new UnsupportedOperationException("DlrNDArray only supports float32.");
}
int size = Math.toIntExact(shape.size());
BaseNDManager.validateBuffer(data, dataType, size);
if (data instanceof ByteBuffer) {
return new DlrNDArray(this, alternativeManager, (ByteBuffer) data, shape, dataType);
}
ByteBuffer bb = ByteBuffer.allocate(size * dataType.getNumOfBytes());
bb.asFloatBuffer().put((FloatBuffer) data);
bb.rewind();
return new DlrNDArray(this, alternativeManager, bb, shape, dataType);
}
/** {@inheritDoc} */
@Override
public void close() {
super.close();
if (alternativeManager != null) {
alternativeManager.close();
alternativeManager = null;
}
}
/** The SystemManager is the root {@link DlrNDManager} of which all others are children. */
private static final class SystemManager extends DlrNDManager implements SystemNDManager {
SystemManager() {
super(null, null);
}
/** {@inheritDoc} */
@Override
public void close() {}
}
}
|
0
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr/engine/DlrPredictor.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.dlr.engine;
import ai.djl.Device;
import ai.djl.dlr.jni.JniUtils;
import ai.djl.inference.Predictor;
import ai.djl.translate.Translator;
/**
* {@code DlrPredictor} is special implementation of {@link Predictor} for DLR.
*
* <p>The native Dlr doesn't support multi-threading feature, when creating a new DlrPredictor, we
* copy the Dlr model handle to workaround the issue.
*/
public class DlrPredictor<I, O> extends Predictor<I, O> {
/**
* Creates a new instance of {@code DlrPredictor}.
*
* @param model the model on which the predictions are based
* @param modelDir the path to the model artifacts
* @param device the device that the model use
* @param translator the translator to be used
*/
public DlrPredictor(
DlrModel model, String modelDir, Device device, Translator<I, O> translator) {
super(model, translator, device, false);
long modelHandle = JniUtils.createDlrModel(modelDir, device);
block = new DlrSymbolBlock((DlrNDManager) manager, modelHandle);
// disable cpu affinity by default
JniUtils.useDlrCpuAffinity(modelHandle, false);
}
/** {@inheritDoc} */
@Override
public void close() {
super.close();
((DlrSymbolBlock) block).close();
}
}
|
0
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr/engine/DlrSymbolBlock.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.dlr.engine;
import ai.djl.dlr.jni.JniUtils;
import ai.djl.ndarray.NDList;
import ai.djl.nn.AbstractSymbolBlock;
import ai.djl.nn.ParameterList;
import ai.djl.nn.SymbolBlock;
import ai.djl.training.ParameterStore;
import ai.djl.util.PairList;
import java.util.concurrent.atomic.AtomicReference;
/**
* {@code DlrSymbolBlock} is the DLR implementation of {@link SymbolBlock}.
*
* <p>You can create a {@code DlrSymbolBlock} using {@link ai.djl.Model#load(java.nio.file.Path,
* String)}.
*/
public class DlrSymbolBlock extends AbstractSymbolBlock implements AutoCloseable {
private AtomicReference<Long> handle;
private DlrNDManager manager;
/**
* Constructs a {@code DlrSymbolBlock}.
*
* <p>You can create a {@code DlrSymbolBlock} using {@link ai.djl.Model#load(java.nio.file.Path,
* String)}.
*
* @param manager the manager to use for the block
* @param handle the handle for native DLR model
*/
public DlrSymbolBlock(DlrNDManager manager, long handle) {
this.handle = new AtomicReference<>(handle);
this.manager = manager;
}
/** {@inheritDoc} */
@Override
protected NDList forwardInternal(
ParameterStore parameterStore,
NDList inputs,
boolean training,
PairList<String, Object> params) {
long modelHandle = handle.get();
// TODO maybe verify the number of inputs
// currently we assume the order of the input NDList is the same
// as the model input
try (DlrNDManager sub = (DlrNDManager) manager.newSubManager()) {
for (int i = 0; i < inputs.size(); ++i) {
DlrNDArray array = sub.from(inputs.get(i));
JniUtils.setDlrInput(modelHandle, array, i);
}
}
JniUtils.runDlrModel(modelHandle);
return JniUtils.getDlrOutputs(modelHandle, inputs.head().getManager());
}
/** {@inheritDoc} */
@Override
public void close() {
Long pointer = handle.getAndSet(null);
if (pointer != null) {
JniUtils.deleteDlrModel(pointer);
}
}
/** {@inheritDoc} */
@Override
public ParameterList getDirectParameters() {
throw new UnsupportedOperationException("Not yet supported");
}
}
|
0
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr/engine/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes to interface with the underlying DLR Engine. */
package ai.djl.dlr.engine;
|
0
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr/jni/DlrLibrary.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.dlr.jni;
/** A class containing utilities to interact with the DLR Engine's JNI layer. */
@SuppressWarnings("MissingJavadocMethod")
final class DlrLibrary {
static final DlrLibrary LIB = new DlrLibrary();
private DlrLibrary() {}
native int getDlrNumInputs(long handle);
native int getDlrNumWeights(long handle);
native String getDlrInputName(long handle, int index);
native String getDlrWeightName(long handle, int index);
native void setDLRInput(long handle, String name, long[] shape, float[] input, int dim);
native long[] getDlrOutputShape(long handle, int index);
native float[] getDlrOutput(long handle, int index);
native int getDlrNumOutputs(long handle);
native long createDlrModel(String modelPath, int deviceType, int deviceId);
native void deleteDlrModel(long handle);
native void runDlrModel(long handle);
native String getDlrBackend(long handle);
native String getDlrVersion();
native void setDlrNumThreads(long handle, int threads);
native void useDlrCPUAffinity(long handle, boolean use);
}
|
0
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr/jni/JniUtils.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.dlr.jni;
import ai.djl.Device;
import ai.djl.dlr.engine.DlrNDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.Shape;
/**
* A class containing utilities to interact with the PyTorch Engine's Java Native Interface (JNI)
* layer.
*/
@SuppressWarnings("MissingJavadocMethod")
public final class JniUtils {
private JniUtils() {}
public static void setDlrInput(long modelHandle, DlrNDArray input, int index) {
long[] shape = input.getShape().getShape();
float[] data = input.toFloatArray();
String name = DlrLibrary.LIB.getDlrInputName(modelHandle, index);
DlrLibrary.LIB.setDLRInput(modelHandle, name, shape, data, shape.length);
}
public static NDList getDlrOutputs(long modelHandle, NDManager manager) {
int numOutputs = DlrLibrary.LIB.getDlrNumOutputs(modelHandle);
NDList res = new NDList(numOutputs);
for (int i = 0; i < numOutputs; i++) {
float[] data = DlrLibrary.LIB.getDlrOutput(modelHandle, i);
long[] shape = DlrLibrary.LIB.getDlrOutputShape(modelHandle, i);
res.add(manager.create(data, new Shape(shape)));
}
return res;
}
public static long createDlrModel(String path, Device device) {
int deviceId = 0;
if (!device.equals(Device.cpu())) {
deviceId = device.getDeviceId();
}
return DlrLibrary.LIB.createDlrModel(path, mapDevice(device.getDeviceType()), deviceId);
}
public static void deleteDlrModel(long modelHandle) {
DlrLibrary.LIB.deleteDlrModel(modelHandle);
}
public static void runDlrModel(long modelHandle) {
DlrLibrary.LIB.runDlrModel(modelHandle);
}
public static void setDlrNumThreads(long modelHandle, int threads) {
DlrLibrary.LIB.setDlrNumThreads(modelHandle, threads);
}
public static void useDlrCpuAffinity(long modelHandle, boolean use) {
DlrLibrary.LIB.useDlrCPUAffinity(modelHandle, use);
}
public static String getDlrVersion() {
return DlrLibrary.LIB.getDlrVersion();
}
private static int mapDevice(String deviceType) {
if (Device.Type.CPU.equals(deviceType)) {
return 1;
} else if (Device.Type.GPU.equals(deviceType)) {
return 2;
} else {
throw new IllegalArgumentException("The device " + deviceType + " is not supported");
}
}
}
|
0
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr/jni/LibUtils.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.dlr.jni;
import ai.djl.util.ClassLoaderUtils;
import ai.djl.util.Platform;
import ai.djl.util.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.Collections;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Utilities for finding the DLR Engine binary on the System.
*
* <p>The Engine will be searched for in a variety of locations in the following order:
*
* <ol>
* <li>In the path specified by the DLR_LIBRARY_PATH environment variable
* </ol>
*/
@SuppressWarnings("MissingJavadocMethod")
public final class LibUtils {
private static final Logger logger = LoggerFactory.getLogger(LibUtils.class);
private static final String LIB_NAME = "djl_dlr";
private static final String NATIVE_LIB_NAME = "dlr";
private static final Pattern VERSION_PATTERN =
Pattern.compile("(\\d+\\.\\d+\\.\\d+(-[a-z]+)?)(-SNAPSHOT)?(-\\d+)?");
private LibUtils() {}
public static void loadLibrary() {
String libName = findNativeOverrideLibrary();
if (libName == null) {
libName = findNativeLibrary();
if (libName == null) {
throw new IllegalStateException("Native library not found");
}
}
Path nativeLibDir = Paths.get(libName).getParent();
if (nativeLibDir == null || !nativeLibDir.toFile().isDirectory()) {
throw new IllegalStateException("Native folder cannot be found");
}
String jniPath = copyJniLibraryFromClasspath(nativeLibDir);
System.load(libName); // NOPMD
logger.debug("Loading DLR native library from: {}", libName);
System.load(jniPath); // NOPMD
logger.debug("Loading DLR JNI library from: {}", jniPath);
}
private static synchronized String findNativeLibrary() {
Platform platform = Platform.detectPlatform("dlr");
if (platform.isPlaceholder()) {
return downloadDlr(platform);
}
return copyNativeLibraryFromClasspath(platform);
}
private static String copyNativeLibraryFromClasspath(Platform platform) {
Path tmp = null;
try {
String libName = System.mapLibraryName(NATIVE_LIB_NAME);
Path cacheDir = getCacheDir(platform);
Path path = cacheDir.resolve(libName);
if (Files.exists(path)) {
return path.toAbsolutePath().toString();
}
Path dlrCacheRoot = Utils.getEngineCacheDir("dlr");
Files.createDirectories(dlrCacheRoot);
tmp = Files.createTempDirectory(dlrCacheRoot, "tmp");
for (String file : platform.getLibraries()) {
String libPath = "native/lib/" + file;
logger.info("Extracting {} to cache ...", libPath);
try (InputStream is = ClassLoaderUtils.getResourceAsStream(libPath)) {
Files.copy(is, tmp.resolve(file), StandardCopyOption.REPLACE_EXISTING);
}
}
Utils.moveQuietly(tmp, cacheDir);
return path.toAbsolutePath().toString();
} catch (IOException e) {
throw new IllegalStateException("Failed to extract DLR native library", e);
} finally {
if (tmp != null) {
Utils.deleteQuietly(tmp);
}
}
}
private static String findLibraryInPath(String libPath) {
String[] paths = libPath.split(File.pathSeparator);
List<String> mappedLibNames;
mappedLibNames = Collections.singletonList(System.mapLibraryName(NATIVE_LIB_NAME));
for (String path : paths) {
File p = new File(path);
if (!p.exists()) {
continue;
}
for (String name : mappedLibNames) {
if (p.isFile() && p.getName().endsWith(name)) {
return p.getAbsolutePath();
}
File file = new File(path, name);
if (file.exists() && file.isFile()) {
return file.getAbsolutePath();
}
}
}
return null;
}
private static String findNativeOverrideLibrary() {
String libPath = Utils.getEnvOrSystemProperty("DLR_LIBRARY_PATH");
if (libPath != null) {
String libName = findLibraryInPath(libPath);
if (libName != null) {
return libName;
}
}
libPath = System.getProperty("java.library.path");
if (libPath != null) {
return findLibraryInPath(libPath);
}
return null;
}
private static String copyJniLibraryFromClasspath(Path nativeDir) {
String name = System.mapLibraryName(LIB_NAME);
Platform platform = Platform.detectPlatform("dlr");
String classifier = platform.getClassifier();
String djlVersion = platform.getApiVersion();
Path path = nativeDir.resolve(djlVersion + '-' + name);
if (Files.exists(path)) {
return path.toAbsolutePath().toString();
}
Path tmp = null;
// both cpu & gpu share the same jnilib
String lib = "jnilib/" + classifier + '/' + name;
try (InputStream is = ClassLoaderUtils.getResourceAsStream(lib)) {
tmp = Files.createTempFile(nativeDir, "jni", "tmp");
Files.copy(is, tmp, StandardCopyOption.REPLACE_EXISTING);
Utils.moveQuietly(tmp, path);
return path.toAbsolutePath().toString();
} catch (IOException e) {
throw new IllegalStateException("Cannot copy jni files", e);
} finally {
if (tmp != null) {
Utils.deleteQuietly(tmp);
}
}
}
private static String downloadDlr(Platform platform) {
String version = platform.getVersion();
String flavor = platform.getFlavor();
String os = platform.getOsPrefix();
String libName = System.mapLibraryName(NATIVE_LIB_NAME);
Path cacheDir = getCacheDir(platform);
Path path = cacheDir.resolve(libName);
if (Files.exists(path)) {
return path.toAbsolutePath().toString();
}
// if files not found
Path dlrCacheRoot = Utils.getEngineCacheDir("dlr");
Matcher matcher = VERSION_PATTERN.matcher(version);
if (!matcher.matches()) {
throw new IllegalArgumentException("Unexpected version: " + version);
}
String link = "https://publish.djl.ai/dlr-" + matcher.group(1) + "/native";
Path tmp = null;
try (InputStream is = Utils.openUrl(link + "/files.txt")) {
Files.createDirectories(dlrCacheRoot);
List<String> lines = Utils.readLines(is);
if (flavor.startsWith("cu")
&& !lines.contains(flavor + '/' + os + "/native/lib/" + libName)) {
logger.warn("No matching cuda flavor for {} found: {}.", os, flavor);
// fallback to CPU
flavor = "cpu";
// check again
path = cacheDir.resolve(libName);
if (Files.exists(path)) {
return path.toAbsolutePath().toString();
}
}
tmp = Files.createTempDirectory(dlrCacheRoot, "tmp");
boolean found = false;
for (String line : lines) {
if (line.startsWith(os + '/' + flavor + '/')) {
found = true;
URL url = new URL(link + '/' + line);
String fileName = line.substring(line.lastIndexOf('/') + 1);
logger.info("Downloading {} ...", url);
try (InputStream fis = Utils.openUrl(url)) {
Files.copy(fis, tmp.resolve(fileName), StandardCopyOption.REPLACE_EXISTING);
}
}
}
if (!found) {
throw new IllegalStateException(
"No DLR native library matches your operating system: " + platform);
}
Utils.moveQuietly(tmp, cacheDir);
return path.toAbsolutePath().toString();
} catch (IOException e) {
throw new IllegalStateException("Failed to download DLR native library", e);
} finally {
if (tmp != null) {
Utils.deleteQuietly(tmp);
}
}
}
private static Path getCacheDir(Platform platform) {
String version = platform.getVersion();
String flavor = platform.getFlavor();
String classifier = platform.getClassifier();
Path cacheDir = Utils.getEngineCacheDir("dlr");
logger.debug("Using cache dir: {}", cacheDir);
return cacheDir.resolve(version + '-' + flavor + '-' + classifier);
}
}
|
0
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr/jni/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes to interface with the underlying DLR Engine. */
package ai.djl.dlr.jni;
|
0
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr/zoo/DlrModelZoo.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.dlr.zoo;
import ai.djl.Application.CV;
import ai.djl.dlr.engine.DlrEngine;
import ai.djl.repository.Repository;
import ai.djl.repository.zoo.ModelZoo;
import java.util.Collections;
import java.util.Set;
/** DlrModelZoo is a repository that contains all dlr models for DJL. */
public class DlrModelZoo extends ModelZoo {
private static final String DJL_REPO_URL = "https://mlrepo.djl.ai/";
private static final Repository REPOSITORY = Repository.newInstance("Dlr", DJL_REPO_URL);
public static final String GROUP_ID = "ai.djl.dlr";
DlrModelZoo() {
addModel(REPOSITORY.model(CV.IMAGE_CLASSIFICATION, GROUP_ID, "resnet", "0.0.1"));
}
/** {@inheritDoc} */
@Override
public String getGroupId() {
return GROUP_ID;
}
/** {@inheritDoc} */
@Override
public Set<String> getSupportedEngines() {
return Collections.singleton(DlrEngine.ENGINE_NAME);
}
}
|
0
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr/zoo/DlrZooProvider.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.dlr.zoo;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooProvider;
/** An DLR model zoo provider implements the {@link ai.djl.repository.zoo.ZooProvider} interface. */
public class DlrZooProvider implements ZooProvider {
/** {@inheritDoc} */
@Override
public ModelZoo getModelZoo() {
return new DlrModelZoo();
}
}
|
0
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr
|
java-sources/ai/djl/dlr/dlr-engine/0.20.0/ai/djl/dlr/zoo/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains the built-in {@link ai.djl.dlr.zoo.DlrModelZoo}. */
package ai.djl.dlr.zoo;
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference/ActionRecognition.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.inference;
import ai.djl.Application;
import ai.djl.ModelException;
import ai.djl.inference.Predictor;
import ai.djl.modality.Classifications;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.repository.zoo.Criteria;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.TranslateException;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An example of inference using an action recognition model.
*
* <p>See this <a
* href="https://github.com/awslabs/djl/blob/master/examples/docs/action_recognition.md">doc</a> for
* information about this example.
*/
public final class ActionRecognition {
private static final Logger logger = LoggerFactory.getLogger(ActionRecognition.class);
private ActionRecognition() {}
public static void main(String[] args) throws IOException, ModelException, TranslateException {
Classifications classification = ActionRecognition.predict();
logger.info("{}", classification);
}
public static Classifications predict() throws IOException, ModelException, TranslateException {
Path imageFile = Paths.get("src/test/resources/action_discus_throw.png");
Image img = ImageFactory.getInstance().fromFile(imageFile);
Criteria<Image, Classifications> criteria =
Criteria.builder()
.optApplication(Application.CV.ACTION_RECOGNITION)
.setTypes(Image.class, Classifications.class)
.optFilter("backbone", "inceptionv3")
.optFilter("dataset", "ucf101")
.optProgress(new ProgressBar())
.build();
try (ZooModel<Image, Classifications> inception = ModelZoo.loadModel(criteria)) {
try (Predictor<Image, Classifications> action = inception.newPredictor()) {
return action.predict(img);
}
}
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference/BertQaInference.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.inference;
import ai.djl.Application;
import ai.djl.ModelException;
import ai.djl.inference.Predictor;
import ai.djl.modality.nlp.qa.QAInput;
import ai.djl.repository.zoo.Criteria;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.TranslateException;
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An example of inference using BertQA.
*
* <p>See:
*
* <ul>
* <li>the <a href="https://github.com/awslabs/djl/blob/master/jupyter/BERTQA.ipynb">jupyter
* demo</a> with more information about BERT.
* <li>the <a
* href="https://github.com/awslabs/djl/blob/master/examples/docs/BERT_question_and_answer.md">docs</a>
* for information about running this example.
* </ul>
*/
public final class BertQaInference {
private static final Logger logger = LoggerFactory.getLogger(BertQaInference.class);
private BertQaInference() {}
public static void main(String[] args) throws IOException, TranslateException, ModelException {
String answer = BertQaInference.predict();
logger.info("Answer: {}", answer);
}
public static String predict() throws IOException, TranslateException, ModelException {
String question = "When did BBC Japan start broadcasting?";
String paragraph =
"BBC Japan was a general entertainment Channel.\n"
+ "Which operated between December 2004 and April 2006.\n"
+ "It ceased operations after its Japanese distributor folded.";
QAInput input = new QAInput(question, paragraph);
logger.info("Paragraph: {}", input.getParagraph());
logger.info("Question: {}", input.getQuestion());
Criteria<QAInput, String> criteria =
Criteria.builder()
.optApplication(Application.NLP.QUESTION_ANSWER)
.setTypes(QAInput.class, String.class)
.optFilter("backbone", "bert")
.optProgress(new ProgressBar())
.build();
try (ZooModel<QAInput, String> model = ModelZoo.loadModel(criteria)) {
try (Predictor<QAInput, String> predictor = model.newPredictor()) {
return predictor.predict(input);
}
}
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference/ImageClassification.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.inference;
import ai.djl.Model;
import ai.djl.ModelException;
import ai.djl.basicmodelzoo.basic.Mlp;
import ai.djl.inference.Predictor;
import ai.djl.modality.Classifications;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.modality.cv.translator.ImageClassificationTranslator;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An example of inference using an image classification model.
*
* <p>See this <a
* href="https://github.com/awslabs/djl/blob/master/examples/docs/image_classification.md">doc</a>
* for information about this example.
*/
public final class ImageClassification {
private static final Logger logger = LoggerFactory.getLogger(ImageClassification.class);
private ImageClassification() {}
public static void main(String[] args) throws IOException, ModelException, TranslateException {
Classifications classifications = ImageClassification.predict();
logger.info("{}", classifications);
}
public static Classifications predict() throws IOException, ModelException, TranslateException {
Path imageFile = Paths.get("src/test/resources/0.png");
Image img = ImageFactory.getInstance().fromFile(imageFile);
String modelName = "mlp";
try (Model model = Model.newInstance(modelName)) {
model.setBlock(new Mlp(28 * 28, 10, new int[] {128, 64}));
// Assume you have run TrainMnist.java example, and saved model in build/model folder.
Path modelDir = Paths.get("build/model");
model.load(modelDir);
List<String> classes =
IntStream.range(0, 10).mapToObj(String::valueOf).collect(Collectors.toList());
Translator<Image, Classifications> translator =
ImageClassificationTranslator.builder()
.addTransform(new ToTensor())
.optSynset(classes)
.build();
try (Predictor<Image, Classifications> predictor = model.newPredictor(translator)) {
return predictor.predict(img);
}
}
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference/InstanceSegmentation.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.inference;
import ai.djl.Application;
import ai.djl.ModelException;
import ai.djl.inference.Predictor;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.repository.zoo.Criteria;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.TranslateException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An example of inference using an instance segmentation model.
*
* <p>See this <a
* href="https://github.com/awslabs/djl/blob/master/examples/docs/instance_segmentation.md">doc</a>
* for information about this example.
*/
public final class InstanceSegmentation {
private static final Logger logger = LoggerFactory.getLogger(InstanceSegmentation.class);
private InstanceSegmentation() {}
public static void main(String[] args) throws IOException, ModelException, TranslateException {
DetectedObjects detection = InstanceSegmentation.predict();
logger.info("{}", detection);
}
public static DetectedObjects predict() throws IOException, ModelException, TranslateException {
Path imageFile = Paths.get("src/test/resources/segmentation.jpg");
Image img = ImageFactory.getInstance().fromFile(imageFile);
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optApplication(Application.CV.INSTANCE_SEGMENTATION)
.setTypes(Image.class, DetectedObjects.class)
.optFilter("backbone", "resnet18")
.optFilter("flavor", "v1b")
.optFilter("dataset", "coco")
.optProgress(new ProgressBar())
.build();
try (ZooModel<Image, DetectedObjects> model = ModelZoo.loadModel(criteria)) {
try (Predictor<Image, DetectedObjects> predictor = model.newPredictor()) {
DetectedObjects detection = predictor.predict(img);
saveBoundingBoxImage(img, detection);
return detection;
}
}
}
private static void saveBoundingBoxImage(Image img, DetectedObjects detection)
throws IOException {
Path outputDir = Paths.get("build/output");
Files.createDirectories(outputDir);
// Make image copy with alpha channel because original image was jpg
Image newImage = img.duplicate(Image.Type.TYPE_INT_ARGB);
newImage.drawBoundingBoxes(detection);
Path imagePath = outputDir.resolve("instances.png");
newImage.save(Files.newOutputStream(imagePath), "png");
logger.info("Segmentation result image has been saved in: {}", imagePath);
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference/ListModels.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.inference;
import ai.djl.Application;
import ai.djl.repository.Artifact;
import ai.djl.repository.zoo.ModelNotFoundException;
import ai.djl.repository.zoo.ModelZoo;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class ListModels {
private static final Logger logger = LoggerFactory.getLogger(ListModels.class);
private ListModels() {}
public static void main(String[] args) throws IOException, ModelNotFoundException {
Map<Application, List<Artifact>> models = ModelZoo.listModels();
models.forEach(
(app, list) -> {
String appName = app.getPath().replace('/', '.').toUpperCase();
list.forEach(artifact -> logger.info("{} {}", appName, artifact));
});
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference/LoadModel.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.inference;
import ai.djl.Application;
import ai.djl.ModelException;
import ai.djl.examples.inference.benchmark.util.Arguments;
import ai.djl.inference.Predictor;
import ai.djl.modality.Classifications;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.transform.CenterCrop;
import ai.djl.modality.cv.transform.Resize;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.modality.cv.translator.ImageClassificationTranslator;
import ai.djl.repository.zoo.Criteria;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import java.io.IOException;
import java.nio.file.Path;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class LoadModel {
private static final Logger logger = LoggerFactory.getLogger(LoadModel.class);
private LoadModel() {}
public static void main(String[] args) throws IOException, ModelException, TranslateException {
Options options = Arguments.getOptions();
try {
DefaultParser parser = new DefaultParser();
CommandLine cmd = parser.parse(options, args, null, false);
Arguments arguments = new Arguments(cmd);
Classifications classifications = predict(arguments);
logger.info("{}", classifications);
} catch (ParseException e) {
HelpFormatter formatter = new HelpFormatter();
formatter.setLeftPadding(1);
formatter.setWidth(120);
formatter.printHelp(e.getMessage(), options);
}
}
public static Classifications predict(Arguments arguments)
throws IOException, ModelException, TranslateException {
Path imageFile = arguments.getImageFile();
Image img = ImageFactory.getInstance().fromFile(imageFile);
String artifactId = arguments.getArtifactId();
Criteria.Builder<Image, Classifications> builder =
Criteria.builder()
.optApplication(Application.CV.IMAGE_CLASSIFICATION)
.setTypes(Image.class, Classifications.class)
.optArtifactId(artifactId)
.optFilters(arguments.getCriteria())
.optProgress(new ProgressBar());
if (artifactId.startsWith("ai.djl.localmodelzoo")) {
// load model from local folder
// since local pre-trained model doesn't have a translator defined,
// we need to supply a translator manually.
builder.optTranslator(getTranslator());
}
Criteria<Image, Classifications> criteria = builder.build();
try (ZooModel<Image, Classifications> model = ModelZoo.loadModel(criteria);
Predictor<Image, Classifications> predictor = model.newPredictor()) {
return predictor.predict(img);
}
}
private static Translator<Image, Classifications> getTranslator() {
// This ImageClassificationTranslator is just a default, you need to
// make proper changes to match your local model's behavior.
return ImageClassificationTranslator.builder()
.addTransform(new CenterCrop())
.addTransform(new Resize(224, 224))
.addTransform(new ToTensor())
.build();
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference/ObjectDetection.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.inference;
import ai.djl.Application;
import ai.djl.ModelException;
import ai.djl.inference.Predictor;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.repository.zoo.Criteria;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.TranslateException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An example of inference using an object detection model.
*
* <p>See this <a
* href="https://github.com/awslabs/djl/blob/master/examples/docs/object_detection.md">doc</a> for
* information about this example.
*/
public final class ObjectDetection {
private static final Logger logger = LoggerFactory.getLogger(ObjectDetection.class);
private ObjectDetection() {}
public static void main(String[] args) throws IOException, ModelException, TranslateException {
DetectedObjects detection = ObjectDetection.predict();
logger.info("{}", detection);
}
public static DetectedObjects predict() throws IOException, ModelException, TranslateException {
Path imageFile = Paths.get("src/test/resources/dog_bike_car.jpg");
Image img = ImageFactory.getInstance().fromFile(imageFile);
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optApplication(Application.CV.OBJECT_DETECTION)
.setTypes(Image.class, DetectedObjects.class)
.optFilter("backbone", "resnet50")
.optProgress(new ProgressBar())
.build();
try (ZooModel<Image, DetectedObjects> model = ModelZoo.loadModel(criteria)) {
try (Predictor<Image, DetectedObjects> predictor = model.newPredictor()) {
DetectedObjects detection = predictor.predict(img);
saveBoundingBoxImage(img, detection);
return detection;
}
}
}
private static void saveBoundingBoxImage(Image img, DetectedObjects detection)
throws IOException {
Path outputDir = Paths.get("build/output");
Files.createDirectories(outputDir);
// Make image copy with alpha channel because original image was jpg
Image newImage = img.duplicate(Image.Type.TYPE_INT_ARGB);
newImage.drawBoundingBoxes(detection);
Path imagePath = outputDir.resolve("detected-dog_bike_car.png");
// OpenJDK can't save jpg with alpha channel
newImage.save(Files.newOutputStream(imagePath), "png");
logger.info("Detected objects image has been saved in: {}", imagePath);
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference/PoseEstimation.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.inference;
import ai.djl.Application;
import ai.djl.MalformedModelException;
import ai.djl.ModelException;
import ai.djl.inference.Predictor;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Joints;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.repository.zoo.Criteria;
import ai.djl.repository.zoo.ModelNotFoundException;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.TranslateException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An example of inference using a pose estimation model.
*
* <p>See this <a
* href="https://github.com/awslabs/djl/blob/master/examples/docs/pose_estimation.md">doc</a> for
* information about this example.
*/
public final class PoseEstimation {
private static final Logger logger = LoggerFactory.getLogger(PoseEstimation.class);
private PoseEstimation() {}
public static void main(String[] args) throws IOException, ModelException, TranslateException {
Joints joints = PoseEstimation.predict();
logger.info("{}", joints);
}
public static Joints predict() throws IOException, ModelException, TranslateException {
Path imageFile = Paths.get("src/test/resources/pose_soccer.png");
Image img = ImageFactory.getInstance().fromFile(imageFile);
Image person = predictPersonInImage(img);
if (person == null) {
logger.warn("No person found in image.");
return null;
}
return predictJointsInPerson(person);
}
private static Image predictPersonInImage(Image img)
throws MalformedModelException, ModelNotFoundException, IOException,
TranslateException {
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optApplication(Application.CV.OBJECT_DETECTION)
.setTypes(Image.class, DetectedObjects.class)
.optFilter("size", "512")
.optFilter("backbone", "resnet50")
.optFilter("flavor", "v1")
.optFilter("dataset", "voc")
.optProgress(new ProgressBar())
.build();
DetectedObjects detectedObjects;
try (ZooModel<Image, DetectedObjects> ssd = ModelZoo.loadModel(criteria)) {
try (Predictor<Image, DetectedObjects> predictor = ssd.newPredictor()) {
detectedObjects = predictor.predict(img);
}
}
List<DetectedObjects.DetectedObject> items = detectedObjects.items();
for (DetectedObjects.DetectedObject item : items) {
if ("person".equals(item.getClassName())) {
Rectangle rect = item.getBoundingBox().getBounds();
int width = img.getWidth();
int height = img.getHeight();
return img.getSubimage(
(int) (rect.getX() * width),
(int) (rect.getY() * height),
(int) (rect.getWidth() * width),
(int) (rect.getHeight() * height));
}
}
return null;
}
private static Joints predictJointsInPerson(Image person)
throws MalformedModelException, ModelNotFoundException, IOException,
TranslateException {
Criteria<Image, Joints> criteria =
Criteria.builder()
.optApplication(Application.CV.POSE_ESTIMATION)
.setTypes(Image.class, Joints.class)
.optFilter("backbone", "resnet18")
.optFilter("flavor", "v1b")
.optFilter("dataset", "imagenet")
.build();
try (ZooModel<Image, Joints> pose = ModelZoo.loadModel(criteria)) {
try (Predictor<Image, Joints> predictor = pose.newPredictor()) {
Joints joints = predictor.predict(person);
saveJointsImage(person, joints);
return joints;
}
}
}
private static void saveJointsImage(Image img, Joints joints) throws IOException {
Path outputDir = Paths.get("build/output");
Files.createDirectories(outputDir);
img.drawJoints(joints);
Path imagePath = outputDir.resolve("joints.png");
// Must use png format because you can't save as jpg with an alpha channel
img.save(Files.newOutputStream(imagePath), "png");
logger.info("Pose image has been saved in: {}", imagePath);
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains examples of performing inference using pre-trained models. */
package ai.djl.examples.inference;
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference/benchmark/Benchmark.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.inference.benchmark;
import ai.djl.ModelException;
import ai.djl.examples.inference.benchmark.util.AbstractBenchmark;
import ai.djl.examples.inference.benchmark.util.Arguments;
import ai.djl.inference.Predictor;
import ai.djl.metric.Metrics;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.training.listener.MemoryTrainingListener;
import ai.djl.translate.TranslateException;
import java.io.IOException;
public final class Benchmark extends AbstractBenchmark {
public static void main(String[] args) {
if (new Benchmark().runBenchmark(args)) {
System.exit(0); // NOPMD
}
System.exit(-1); // NOPMD
}
/** {@inheritDoc} */
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public Object predict(Arguments arguments, Metrics metrics, int iteration)
throws IOException, ModelException, TranslateException, ClassNotFoundException {
Object inputData = arguments.getInputData();
try (ZooModel<?, ?> model = loadModel(arguments, metrics)) {
Object predictResult = null;
try (Predictor predictor = model.newPredictor()) {
predictor.setMetrics(metrics); // Let predictor collect metrics
for (int i = 0; i < iteration; ++i) {
predictResult = predictor.predict(inputData);
progressBar.update(i);
MemoryTrainingListener.collectMemoryInfo(metrics);
}
}
return predictResult;
}
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference/benchmark/MultithreadedBenchmark.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.inference.benchmark;
import ai.djl.ModelException;
import ai.djl.examples.inference.benchmark.util.AbstractBenchmark;
import ai.djl.examples.inference.benchmark.util.Arguments;
import ai.djl.inference.Predictor;
import ai.djl.metric.Metrics;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.training.listener.MemoryTrainingListener;
import ai.djl.translate.TranslateException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MultithreadedBenchmark extends AbstractBenchmark {
private static final Logger logger = LoggerFactory.getLogger(MultithreadedBenchmark.class);
public static void main(String[] args) {
if (new MultithreadedBenchmark().runBenchmark(args)) {
System.exit(0); // NOPMD
}
System.exit(-1); // NOPMD
}
/** {@inheritDoc} */
@Override
public Object predict(Arguments arguments, Metrics metrics, int iteration)
throws IOException, ModelException, ClassNotFoundException {
Object inputData = arguments.getInputData();
ZooModel<?, ?> model = loadModel(arguments, metrics);
int numOfThreads = arguments.getThreads();
AtomicInteger counter = new AtomicInteger(iteration);
logger.info("Multithreaded inference with {} threads.", numOfThreads);
List<PredictorCallable> callables = new ArrayList<>(numOfThreads);
for (int i = 0; i < numOfThreads; ++i) {
callables.add(new PredictorCallable(model, inputData, metrics, counter, i, i == 0));
}
Object classification = null;
ExecutorService executorService = Executors.newFixedThreadPool(numOfThreads);
int successThreads = 0;
try {
metrics.addMetric("mt_start", System.currentTimeMillis(), "mills");
List<Future<Object>> futures = executorService.invokeAll(callables);
for (Future<Object> future : futures) {
try {
classification = future.get();
++successThreads;
} catch (InterruptedException | ExecutionException e) {
logger.error("", e);
}
}
} catch (InterruptedException e) {
logger.error("", e);
} finally {
executorService.shutdown();
}
if (successThreads != numOfThreads) {
logger.error("Only {}/{} threads finished.", successThreads, numOfThreads);
}
return classification;
}
private static class PredictorCallable implements Callable<Object> {
@SuppressWarnings("rawtypes")
private Predictor predictor;
private Object inputData;
private Metrics metrics;
private String workerId;
private boolean collectMemory;
private AtomicInteger counter;
private int total;
private int steps;
public PredictorCallable(
ZooModel<?, ?> model,
Object inputData,
Metrics metrics,
AtomicInteger counter,
int workerId,
boolean collectMemory) {
this.predictor = model.newPredictor();
this.inputData = inputData;
this.metrics = metrics;
this.counter = counter;
this.workerId = String.format("%02d", workerId);
this.collectMemory = collectMemory;
predictor.setMetrics(metrics);
total = counter.get();
if (total < 10) {
steps = 1;
} else {
steps = (int) Math.pow(10, (int) (Math.log10(total)) - 1);
}
}
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public Object call() throws TranslateException {
Object result = null;
int count = 0;
int remaining;
while ((remaining = counter.decrementAndGet()) > 0) {
result = predictor.predict(inputData);
if (collectMemory) {
MemoryTrainingListener.collectMemoryInfo(metrics);
}
int processed = total - remaining;
logger.trace("Worker-{}: {} iteration finished.", workerId, ++count);
if (processed % steps == 0) {
logger.info("Completed {} requests", processed);
}
}
logger.debug("Worker-{}: finished.", workerId);
return result;
}
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference/benchmark/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains inference benchmarking examples and code.
*
* <p>See the inference benchmarking utilities in {@link ai.djl.examples.inference.benchmark.util}.
*/
package ai.djl.examples.inference.benchmark;
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference/benchmark
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference/benchmark/util/AbstractBenchmark.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.inference.benchmark.util;
import ai.djl.Device;
import ai.djl.ModelException;
import ai.djl.engine.Engine;
import ai.djl.examples.inference.benchmark.MultithreadedBenchmark;
import ai.djl.metric.Metrics;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.Shape;
import ai.djl.repository.zoo.Criteria;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.training.listener.MemoryTrainingListener;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.Batchifier;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import java.io.IOException;
import java.time.Duration;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Abstract class that encapsulate command line options for example project. */
public abstract class AbstractBenchmark {
private static final Logger logger = LoggerFactory.getLogger(AbstractBenchmark.class);
private Object lastResult;
protected ProgressBar progressBar;
/**
* Abstract predict method that must be implemented by sub class.
*
* @param arguments command line arguments
* @param metrics {@link Metrics} to collect statistic information
* @param iteration number of prediction iteration to run
* @return prediction result
* @throws IOException if io error occurs when loading model.
* @throws ModelException if specified model not found or there is a parameter error
* @throws TranslateException if error occurs when processing input or output
* @throws ClassNotFoundException if input or output class cannot be loaded
*/
protected abstract Object predict(Arguments arguments, Metrics metrics, int iteration)
throws IOException, ModelException, TranslateException, ClassNotFoundException;
/**
* Returns command line options.
*
* <p>Child class can override this method and return different command line options.
*
* @return command line options
*/
protected Options getOptions() {
return Arguments.getOptions();
}
/**
* Parse command line into arguments.
*
* <p>Child class can override this method and return extension of {@link Arguments}.
*
* @param cmd list of arguments parsed against a {@link Options} descriptor
* @return parsed arguments
*/
protected Arguments parseArguments(CommandLine cmd) {
return new Arguments(cmd);
}
/**
* Execute example code.
*
* @param args input raw arguments
* @return if example execution complete successfully
*/
public final boolean runBenchmark(String[] args) {
Options options = getOptions();
try {
DefaultParser parser = new DefaultParser();
CommandLine cmd = parser.parse(options, args, null, false);
Arguments arguments = parseArguments(cmd);
long init = System.nanoTime();
String version = Engine.getInstance().getVersion();
long loaded = System.nanoTime();
logger.info(
String.format(
"Load library %s in %.3f ms.", version, (loaded - init) / 1_000_000f));
Duration duration = Duration.ofMinutes(arguments.getDuration());
if (arguments.getDuration() != 0) {
logger.info(
"Running {} on: {}, duration: {} minutes.",
getClass().getSimpleName(),
Device.defaultDevice(),
duration.toMinutes());
} else {
logger.info(
"Running {} on: {}.", getClass().getSimpleName(), Device.defaultDevice());
}
int numOfThreads = arguments.getThreads();
int iteration = arguments.getIteration();
if (this instanceof MultithreadedBenchmark) {
iteration = Math.max(iteration, numOfThreads * 2);
}
while (!duration.isNegative()) {
Metrics metrics = new Metrics(); // Reset Metrics for each test loop.
progressBar = new ProgressBar("Iteration", iteration);
long begin = System.currentTimeMillis();
lastResult = predict(arguments, metrics, iteration);
if (metrics.hasMetric("mt_start")) {
begin = metrics.getMetric("mt_start").get(0).getValue().longValue();
}
long totalTime = System.currentTimeMillis() - begin;
logger.info("Inference result: {}", lastResult);
String throughput = String.format("%.2f", iteration * 1000d / totalTime);
logger.info(
"Throughput: {}, {} iteration / {} ms.", throughput, iteration, totalTime);
if (metrics.hasMetric("LoadModel")) {
long loadModelTime =
metrics.getMetric("LoadModel").get(0).getValue().longValue();
logger.info(
"Model loading time: {} ms.",
String.format("%.3f", loadModelTime / 1_000_000f));
}
if (metrics.hasMetric("Inference") && iteration > 1) {
float totalP50 =
metrics.percentile("Total", 50).getValue().longValue() / 1_000_000f;
float totalP90 =
metrics.percentile("Total", 90).getValue().longValue() / 1_000_000f;
float totalP99 =
metrics.percentile("Total", 99).getValue().longValue() / 1_000_000f;
float p50 =
metrics.percentile("Inference", 50).getValue().longValue() / 1_000_000f;
float p90 =
metrics.percentile("Inference", 90).getValue().longValue() / 1_000_000f;
float p99 =
metrics.percentile("Inference", 99).getValue().longValue() / 1_000_000f;
float preP50 =
metrics.percentile("Preprocess", 50).getValue().longValue()
/ 1_000_000f;
float preP90 =
metrics.percentile("Preprocess", 90).getValue().longValue()
/ 1_000_000f;
float preP99 =
metrics.percentile("Preprocess", 99).getValue().longValue()
/ 1_000_000f;
float postP50 =
metrics.percentile("Postprocess", 50).getValue().longValue()
/ 1_000_000f;
float postP90 =
metrics.percentile("Postprocess", 90).getValue().longValue()
/ 1_000_000f;
float postP99 =
metrics.percentile("Postprocess", 99).getValue().longValue()
/ 1_000_000f;
logger.info(
String.format(
"total P50: %.3f ms, P90: %.3f ms, P99: %.3f ms",
totalP50, totalP90, totalP99));
logger.info(
String.format(
"inference P50: %.3f ms, P90: %.3f ms, P99: %.3f ms",
p50, p90, p99));
logger.info(
String.format(
"preprocess P50: %.3f ms, P90: %.3f ms, P99: %.3f ms",
preP50, preP90, preP99));
logger.info(
String.format(
"postprocess P50: %.3f ms, P90: %.3f ms, P99: %.3f ms",
postP50, postP90, postP99));
if (Boolean.getBoolean("collect-memory")) {
float heap = metrics.percentile("Heap", 90).getValue().longValue();
float nonHeap = metrics.percentile("NonHeap", 90).getValue().longValue();
float cpu = metrics.percentile("cpu", 90).getValue().longValue();
float rss = metrics.percentile("rss", 90).getValue().longValue();
logger.info(String.format("heap P90: %.3f", heap));
logger.info(String.format("nonHeap P90: %.3f", nonHeap));
logger.info(String.format("cpu P90: %.3f", cpu));
logger.info(String.format("rss P90: %.3f", rss));
}
}
MemoryTrainingListener.dumpMemoryInfo(metrics, arguments.getOutputDir());
long delta = System.currentTimeMillis() - begin;
duration = duration.minus(Duration.ofMillis(delta));
if (!duration.isNegative()) {
logger.info(duration.toMinutes() + " minutes left");
}
}
return true;
} catch (ParseException e) {
HelpFormatter formatter = new HelpFormatter();
formatter.setLeftPadding(1);
formatter.setWidth(120);
formatter.printHelp(e.getMessage(), options);
} catch (Throwable t) {
logger.error("Unexpected error", t);
}
return false;
}
/**
* Returns last predict result.
*
* <p>This method is used for unit test only.
*
* @return last predict result
*/
public Object getPredictResult() {
return lastResult;
}
@SuppressWarnings({"rawtypes", "unchecked"})
protected ZooModel<?, ?> loadModel(Arguments arguments, Metrics metrics)
throws ModelException, IOException, ClassNotFoundException {
long begin = System.nanoTime();
String artifactId = arguments.getArtifactId();
Class<?> input = arguments.getInputClass();
Class<?> output = arguments.getOutputClass();
Shape shape = arguments.getInputShape();
Criteria.Builder<?, ?> builder =
Criteria.builder()
.setTypes(input, output)
.optFilters(arguments.getCriteria())
.optArtifactId(artifactId)
.optProgress(new ProgressBar());
if (shape != null) {
builder.optTranslator(
new Translator() {
/** {@inheritDoc} */
@Override
public NDList processInput(TranslatorContext ctx, Object input) {
return new NDList(ctx.getNDManager().ones(shape));
}
/** {@inheritDoc} */
@Override
public Object processOutput(TranslatorContext ctx, NDList list) {
return list.get(0).toFloatArray();
}
/** {@inheritDoc} */
@Override
public Batchifier getBatchifier() {
return null;
}
});
}
ZooModel<?, ?> model = ModelZoo.loadModel(builder.build());
long delta = System.nanoTime() - begin;
logger.info(
"Model {} loaded in: {} ms.",
model.getName(),
String.format("%.3f", delta / 1_000_000f));
metrics.addMetric("LoadModel", delta);
return model;
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference/benchmark
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference/benchmark/util/Arguments.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.inference.benchmark.util;
import ai.djl.engine.Engine;
import ai.djl.modality.Classifications;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.Shape;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.reflect.Type;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Map;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionGroup;
import org.apache.commons.cli.Options;
/** A class represents parsed command line arguments. */
public class Arguments {
private String modelDir;
private String artifactId;
private String imageFile;
private String outputDir;
private Map<String, String> criteria;
private int duration;
private int iteration;
private int threads;
private String inputClass;
private String outputClass;
private Shape inputShape;
public Arguments(CommandLine cmd) {
modelDir = cmd.getOptionValue("model-dir");
artifactId = cmd.getOptionValue("artifact-id");
outputDir = cmd.getOptionValue("output-dir");
imageFile = cmd.getOptionValue("image");
inputClass = cmd.getOptionValue("input-class");
outputClass = cmd.getOptionValue("output-class");
if (cmd.hasOption("duration")) {
duration = Integer.parseInt(cmd.getOptionValue("duration"));
}
iteration = 1;
if (cmd.hasOption("iteration")) {
iteration = Integer.parseInt(cmd.getOptionValue("iteration"));
}
if (cmd.hasOption("threads")) {
threads = Integer.parseInt(cmd.getOptionValue("threads"));
} else {
threads = Runtime.getRuntime().availableProcessors() * 2 - 1;
}
if (cmd.hasOption("criteria")) {
Type type = new TypeToken<Map<String, String>>() {}.getType();
criteria = new Gson().fromJson(cmd.getOptionValue("criteria"), type);
}
if (cmd.hasOption("input-shape")) {
String shape = cmd.getOptionValue("input-shape");
String[] tokens = shape.split(",");
long[] shapes = Arrays.stream(tokens).mapToLong(Long::parseLong).toArray();
inputShape = new Shape(shapes);
}
}
public static Options getOptions() {
Options options = new Options();
options.addOption(
Option.builder("p")
.longOpt("model-dir")
.hasArg()
.argName("MODEL-DIR")
.desc("Path to the model directory.")
.build());
options.addOption(
Option.builder("n")
.longOpt("artifact-id")
.hasArg()
.argName("ARTIFACT-ID")
.desc("Model artifact id.")
.build());
options.addOption(
Option.builder("ic")
.longOpt("input-class")
.hasArg()
.argName("INPUT-CLASS")
.desc("Input class type.")
.build());
options.addOption(
Option.builder("is")
.longOpt("input-shape")
.hasArg()
.argName("INPUT-SHAPE")
.desc("Input data shape.")
.build());
options.addOption(
Option.builder("oc")
.longOpt("output-class")
.hasArg()
.argName("OUTPUT-CLASS")
.desc("Output class type.")
.build());
options.addOption(
Option.builder("i")
.longOpt("image")
.hasArg()
.argName("IMAGE")
.desc("Image file.")
.build());
options.addOptionGroup(
new OptionGroup()
.addOption(
Option.builder("d")
.longOpt("duration")
.hasArg()
.argName("DURATION")
.desc("Duration of the test in minutes.")
.build())
.addOption(
Option.builder("c")
.longOpt("iteration")
.hasArg()
.argName("ITERATION")
.desc("Number of total iterations.")
.build()));
options.addOption(
Option.builder("t")
.longOpt("threads")
.hasArg()
.argName("NUMBER_THREADS")
.desc("Number of inference threads.")
.build());
options.addOption(
Option.builder("o")
.longOpt("output-dir")
.hasArg()
.argName("OUTPUT-DIR")
.desc("Directory for output logs.")
.build());
options.addOption(
Option.builder("r")
.longOpt("criteria")
.hasArg()
.argName("CRITERIA")
.desc("The criteria used for the model.")
.build());
return options;
}
public int getDuration() {
return duration;
}
public Path getModelDir() throws IOException {
if (modelDir == null) {
throw new IOException("Please specify --model-dir");
}
Path path = Paths.get(modelDir);
if (Files.notExists(path)) {
throw new FileNotFoundException("model directory not found: " + modelDir);
}
return path;
}
public String getArtifactId() {
if (artifactId != null) {
return artifactId;
}
switch (Engine.getInstance().getEngineName()) {
case "PyTorch":
return "ai.djl.pytorch:resnet";
case "TensorFlow":
return "ai.djl.tensorflow:resnet";
case "MXNet":
default:
return "ai.djl.mxnet:resnet";
}
}
public Path getImageFile() throws FileNotFoundException {
if (imageFile == null) {
Path path = Paths.get("src/test/resources/kitten.jpg");
if (Files.notExists(path)) {
throw new FileNotFoundException("Missing --image parameter.");
}
return path;
}
Path path = Paths.get(imageFile);
if (Files.notExists(path)) {
throw new FileNotFoundException("image file not found: " + imageFile);
}
return path;
}
public int getIteration() {
return iteration;
}
public int getThreads() {
return threads;
}
public String getOutputDir() {
return outputDir;
}
public Map<String, String> getCriteria() {
return criteria;
}
public Class<?> getInputClass() throws ClassNotFoundException {
if (inputClass == null) {
return Image.class;
}
return Class.forName(inputClass);
}
public Class<?> getOutputClass() throws ClassNotFoundException {
if (outputClass == null) {
if (artifactId != null && artifactId.contains("ssd")) {
return DetectedObjects.class;
}
return Classifications.class;
}
return Class.forName(outputClass);
}
public Object getInputData() throws IOException, ClassNotFoundException {
Class<?> klass = getInputClass();
if (klass == Image.class) {
return ImageFactory.getInstance().fromFile(getImageFile());
} else if (klass == float[].class || klass == NDList.class) {
// TODO: load data from input file
// Create empty NDArray from shape for now
return null;
}
throw new IllegalArgumentException("Unsupported input class: " + klass);
}
public Shape getInputShape() {
return inputShape;
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference/benchmark
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/inference/benchmark/util/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains utilities used for the inference benchmarking examples within the package {@link
* ai.djl.examples.inference.benchmark}.
*/
package ai.djl.examples.inference.benchmark.util;
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/training/TrainCaptcha.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.training;
import ai.djl.Device;
import ai.djl.Model;
import ai.djl.basicdataset.CaptchaDataset;
import ai.djl.basicmodelzoo.cv.classification.ResNetV1;
import ai.djl.examples.training.util.Arguments;
import ai.djl.metric.Metrics;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Block;
import ai.djl.nn.SequentialBlock;
import ai.djl.training.DefaultTrainingConfig;
import ai.djl.training.EasyTrain;
import ai.djl.training.Trainer;
import ai.djl.training.TrainingResult;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.dataset.Dataset.Usage;
import ai.djl.training.dataset.RandomAccessDataset;
import ai.djl.training.evaluator.Accuracy;
import ai.djl.training.listener.CheckpointsTrainingListener;
import ai.djl.training.listener.TrainingListener;
import ai.djl.training.loss.SimpleCompositeLoss;
import ai.djl.training.loss.SoftmaxCrossEntropyLoss;
import ai.djl.training.util.ProgressBar;
import java.io.IOException;
import org.apache.commons.cli.ParseException;
/**
* An example of training a CAPTCHA solving model.
*
* <p>See this <a
* href="https://github.com/awslabs/djl/blob/master/examples/docs/train_captcha.md">doc</a> for
* information about this example.
*/
public final class TrainCaptcha {
private TrainCaptcha() {}
public static void main(String[] args) throws IOException, ParseException {
TrainCaptcha.runExample(args);
}
public static TrainingResult runExample(String[] args) throws ParseException, IOException {
Arguments arguments = Arguments.parseArgs(args);
try (Model model = Model.newInstance("captcha")) {
model.setBlock(getBlock());
// get training and validation dataset
RandomAccessDataset trainingSet = getDataset(Usage.TRAIN, arguments);
RandomAccessDataset validateSet = getDataset(Usage.VALIDATION, arguments);
// setup training configuration
DefaultTrainingConfig config = setupTrainingConfig(arguments);
try (Trainer trainer = model.newTrainer(config)) {
trainer.setMetrics(new Metrics());
Shape inputShape =
new Shape(1, 1, CaptchaDataset.IMAGE_HEIGHT, CaptchaDataset.IMAGE_WIDTH);
// initialize trainer with proper input shape
trainer.initialize(inputShape);
EasyTrain.fit(trainer, arguments.getEpoch(), trainingSet, validateSet);
return trainer.getTrainingResult();
}
}
}
private static DefaultTrainingConfig setupTrainingConfig(Arguments arguments) {
String outputDir = arguments.getOutputDir();
CheckpointsTrainingListener listener = new CheckpointsTrainingListener(outputDir);
listener.setSaveModelCallback(
trainer -> {
TrainingResult result = trainer.getTrainingResult();
Model model = trainer.getModel();
float accuracy = result.getValidateEvaluation("acc_digit_0");
model.setProperty("Accuracy", String.format("%.5f", accuracy));
model.setProperty("Loss", String.format("%.5f", result.getValidateLoss()));
});
SimpleCompositeLoss loss = new SimpleCompositeLoss();
for (int i = 0; i < CaptchaDataset.CAPTCHA_LENGTH; i++) {
loss.addLoss(new SoftmaxCrossEntropyLoss("loss_digit_" + i), i);
}
DefaultTrainingConfig config =
new DefaultTrainingConfig(loss)
.optDevices(Device.getDevices(arguments.getMaxGpus()))
.addTrainingListeners(TrainingListener.Defaults.logging(outputDir))
.addTrainingListeners(listener);
for (int i = 0; i < CaptchaDataset.CAPTCHA_LENGTH; i++) {
config.addEvaluator(new Accuracy("acc_digit_" + i, i));
}
return config;
}
private static RandomAccessDataset getDataset(Dataset.Usage usage, Arguments arguments)
throws IOException {
CaptchaDataset dataset =
CaptchaDataset.builder()
.optUsage(usage)
.setSampling(arguments.getBatchSize(), true)
.optLimit(arguments.getLimit())
.build();
dataset.prepare(new ProgressBar());
return dataset;
}
private static Block getBlock() {
Block resnet =
ResNetV1.builder()
.setNumLayers(50)
.setImageShape(
new Shape(
1, CaptchaDataset.IMAGE_HEIGHT, CaptchaDataset.IMAGE_WIDTH))
.setOutSize(CaptchaDataset.CAPTCHA_OPTIONS * CaptchaDataset.CAPTCHA_LENGTH)
.build();
return new SequentialBlock()
.add(resnet)
.add(
resnetOutputList -> {
NDArray resnetOutput = resnetOutputList.singletonOrThrow();
NDList splitOutput =
resnetOutput
.reshape(
-1,
CaptchaDataset.CAPTCHA_LENGTH,
CaptchaDataset.CAPTCHA_OPTIONS)
.split(CaptchaDataset.CAPTCHA_LENGTH, 1);
NDList output = new NDList(CaptchaDataset.CAPTCHA_LENGTH);
for (NDArray outputDigit : splitOutput) {
output.add(outputDigit.squeeze(1));
}
return output;
});
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/training/TrainMnist.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.training;
import ai.djl.Device;
import ai.djl.Model;
import ai.djl.basicdataset.Mnist;
import ai.djl.basicmodelzoo.basic.Mlp;
import ai.djl.examples.training.util.Arguments;
import ai.djl.metric.Metrics;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Block;
import ai.djl.training.DefaultTrainingConfig;
import ai.djl.training.EasyTrain;
import ai.djl.training.Trainer;
import ai.djl.training.TrainingResult;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.dataset.RandomAccessDataset;
import ai.djl.training.evaluator.Accuracy;
import ai.djl.training.listener.CheckpointsTrainingListener;
import ai.djl.training.listener.TrainingListener;
import ai.djl.training.loss.Loss;
import ai.djl.training.util.ProgressBar;
import java.io.IOException;
import org.apache.commons.cli.ParseException;
/**
* An example of training an image classification (MNIST) model.
*
* <p>See this <a
* href="https://github.com/awslabs/djl/blob/master/examples/docs/train_mnist_mlp.md">doc</a> for
* information about this example.
*/
public final class TrainMnist {
private TrainMnist() {}
public static void main(String[] args) throws IOException, ParseException {
TrainMnist.runExample(args);
}
public static TrainingResult runExample(String[] args) throws IOException, ParseException {
Arguments arguments = Arguments.parseArgs(args);
// Construct neural network
Block block =
new Mlp(
Mnist.IMAGE_HEIGHT * Mnist.IMAGE_WIDTH,
Mnist.NUM_CLASSES,
new int[] {128, 64});
try (Model model = Model.newInstance("mlp")) {
model.setBlock(block);
// get training and validation dataset
RandomAccessDataset trainingSet = getDataset(Dataset.Usage.TRAIN, arguments);
RandomAccessDataset validateSet = getDataset(Dataset.Usage.TEST, arguments);
// setup training configuration
DefaultTrainingConfig config = setupTrainingConfig(arguments);
try (Trainer trainer = model.newTrainer(config)) {
trainer.setMetrics(new Metrics());
/*
* MNIST is 28x28 grayscale image and pre processed into 28 * 28 NDArray.
* 1st axis is batch axis, we can use 1 for initialization.
*/
Shape inputShape = new Shape(1, Mnist.IMAGE_HEIGHT * Mnist.IMAGE_WIDTH);
// initialize trainer with proper input shape
trainer.initialize(inputShape);
EasyTrain.fit(trainer, arguments.getEpoch(), trainingSet, validateSet);
return trainer.getTrainingResult();
}
}
}
private static DefaultTrainingConfig setupTrainingConfig(Arguments arguments) {
String outputDir = arguments.getOutputDir();
CheckpointsTrainingListener listener = new CheckpointsTrainingListener(outputDir);
listener.setSaveModelCallback(
trainer -> {
TrainingResult result = trainer.getTrainingResult();
Model model = trainer.getModel();
float accuracy = result.getValidateEvaluation("Accuracy");
model.setProperty("Accuracy", String.format("%.5f", accuracy));
model.setProperty("Loss", String.format("%.5f", result.getValidateLoss()));
});
return new DefaultTrainingConfig(Loss.softmaxCrossEntropyLoss())
.addEvaluator(new Accuracy())
.optDevices(Device.getDevices(arguments.getMaxGpus()))
.addTrainingListeners(TrainingListener.Defaults.logging(outputDir))
.addTrainingListeners(listener);
}
private static RandomAccessDataset getDataset(Dataset.Usage usage, Arguments arguments)
throws IOException {
Mnist mnist =
Mnist.builder()
.optUsage(usage)
.setSampling(arguments.getBatchSize(), true)
.optLimit(arguments.getLimit())
.build();
mnist.prepare(new ProgressBar());
return mnist;
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/training/TrainMnistWithLSTM.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.training;
import ai.djl.Device;
import ai.djl.Model;
import ai.djl.basicdataset.Mnist;
import ai.djl.examples.training.util.Arguments;
import ai.djl.metric.Metrics;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Block;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.core.Linear;
import ai.djl.nn.norm.BatchNorm;
import ai.djl.nn.recurrent.LSTM;
import ai.djl.training.DefaultTrainingConfig;
import ai.djl.training.EasyTrain;
import ai.djl.training.Trainer;
import ai.djl.training.TrainingResult;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.dataset.RandomAccessDataset;
import ai.djl.training.evaluator.Accuracy;
import ai.djl.training.initializer.XavierInitializer;
import ai.djl.training.listener.CheckpointsTrainingListener;
import ai.djl.training.listener.TrainingListener;
import ai.djl.training.loss.Loss;
import ai.djl.training.util.ProgressBar;
import java.io.IOException;
import org.apache.commons.cli.ParseException;
public final class TrainMnistWithLSTM {
private TrainMnistWithLSTM() {}
public static void main(String[] args) throws IOException, ParseException {
TrainMnistWithLSTM.runExample(args);
}
public static TrainingResult runExample(String[] args) throws IOException, ParseException {
Arguments arguments = Arguments.parseArgs(args);
try (Model model = Model.newInstance("lstm")) {
model.setBlock(getLSTMModel());
// get training and validation dataset
RandomAccessDataset trainingSet = getDataset(Dataset.Usage.TRAIN, arguments);
RandomAccessDataset validateSet = getDataset(Dataset.Usage.TEST, arguments);
// setup training configuration
DefaultTrainingConfig config = setupTrainingConfig(arguments);
try (Trainer trainer = model.newTrainer(config)) {
trainer.setMetrics(new Metrics());
/*
* MNIST is 28x28 grayscale image and pre processed into 28 * 28 NDArray.
* 1st axis is batch axis, we can use 1 for initialization.
*/
Shape inputShape = new Shape(32, 28, 28);
// initialize trainer with proper input shape
trainer.initialize(inputShape);
EasyTrain.fit(trainer, arguments.getEpoch(), trainingSet, validateSet);
return trainer.getTrainingResult();
}
}
}
private static Block getLSTMModel() {
SequentialBlock block = new SequentialBlock();
block.add(
inputs -> {
NDArray input = inputs.singletonOrThrow();
Shape inputShape = input.getShape();
long batchSize = inputShape.get(0);
long channel = inputShape.get(3);
long time = inputShape.size() / (batchSize * channel);
return new NDList(input.reshape(new Shape(batchSize, time, channel)));
});
block.add(
new LSTM.Builder().setStateSize(64).setNumStackedLayers(1).optDropRate(0).build());
block.add(BatchNorm.builder().optEpsilon(1e-5f).optMomentum(0.9f).build());
block.add(Linear.builder().setOutChannels(10).optFlatten(true).build());
return block;
}
public static DefaultTrainingConfig setupTrainingConfig(Arguments arguments) {
String outputDir = arguments.getOutputDir();
CheckpointsTrainingListener listener = new CheckpointsTrainingListener(outputDir);
listener.setSaveModelCallback(
trainer -> {
TrainingResult result = trainer.getTrainingResult();
Model model = trainer.getModel();
float accuracy = result.getValidateEvaluation("Accuracy");
model.setProperty("Accuracy", String.format("%.5f", accuracy));
model.setProperty("Loss", String.format("%.5f", result.getValidateLoss()));
});
return new DefaultTrainingConfig(Loss.softmaxCrossEntropyLoss())
.addEvaluator(new Accuracy())
.optInitializer(new XavierInitializer())
.optDevices(Device.getDevices(arguments.getMaxGpus()))
.addTrainingListeners(TrainingListener.Defaults.logging(outputDir))
.addTrainingListeners(listener);
}
public static RandomAccessDataset getDataset(Dataset.Usage usage, Arguments arguments)
throws IOException {
Mnist mnist =
Mnist.builder()
.optUsage(usage)
.setSampling(arguments.getBatchSize(), false, true)
.optLimit(arguments.getLimit())
.build();
mnist.prepare(new ProgressBar());
return mnist;
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/training/TrainPikachu.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.training;
import ai.djl.Device;
import ai.djl.MalformedModelException;
import ai.djl.Model;
import ai.djl.basicdataset.PikachuDetection;
import ai.djl.basicmodelzoo.cv.object_detection.ssd.SingleShotDetection;
import ai.djl.examples.training.util.Arguments;
import ai.djl.inference.Predictor;
import ai.djl.metric.Metrics;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.MultiBoxDetection;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.modality.cv.translator.SingleShotDetectionTranslator;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Block;
import ai.djl.nn.LambdaBlock;
import ai.djl.nn.SequentialBlock;
import ai.djl.training.DefaultTrainingConfig;
import ai.djl.training.EasyTrain;
import ai.djl.training.Trainer;
import ai.djl.training.TrainingResult;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.dataset.RandomAccessDataset;
import ai.djl.training.evaluator.BoundingBoxError;
import ai.djl.training.evaluator.SingleShotDetectionAccuracy;
import ai.djl.training.listener.CheckpointsTrainingListener;
import ai.djl.training.listener.TrainingListener;
import ai.djl.training.loss.SingleShotDetectionLoss;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.Pipeline;
import ai.djl.translate.TranslateException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.commons.cli.ParseException;
/**
* An example of training a simple Single Shot Detection (SSD) model.
*
* <p>See this <a
* href="https://github.com/awslabs/djl/blob/master/examples/docs/train_pikachu_ssd.md">doc</a> for
* information about this example.
*/
public final class TrainPikachu {
private TrainPikachu() {}
public static void main(String[] args) throws IOException, ParseException {
TrainPikachu.runExample(args);
}
public static TrainingResult runExample(String[] args) throws IOException, ParseException {
Arguments arguments = Arguments.parseArgs(args);
try (Model model = Model.newInstance("pikachu-ssd")) {
model.setBlock(getSsdTrainBlock());
RandomAccessDataset trainingSet = getDataset(Dataset.Usage.TRAIN, arguments);
RandomAccessDataset validateSet = getDataset(Dataset.Usage.TEST, arguments);
DefaultTrainingConfig config = setupTrainingConfig(arguments);
try (Trainer trainer = model.newTrainer(config)) {
trainer.setMetrics(new Metrics());
Shape inputShape = new Shape(arguments.getBatchSize(), 3, 256, 256);
trainer.initialize(inputShape);
EasyTrain.fit(trainer, arguments.getEpoch(), trainingSet, validateSet);
return trainer.getTrainingResult();
}
}
}
public static int predict(String outputDir, String imageFile)
throws IOException, MalformedModelException, TranslateException {
try (Model model = Model.newInstance("pikachu-ssd")) {
float detectionThreshold = 0.6f;
// load parameters back to original training block
model.setBlock(getSsdTrainBlock());
model.load(Paths.get(outputDir));
// append prediction logic at end of training block with parameter loaded
Block ssdTrain = model.getBlock();
model.setBlock(getSsdPredictBlock(ssdTrain));
Path imagePath = Paths.get(imageFile);
SingleShotDetectionTranslator translator =
SingleShotDetectionTranslator.builder()
.addTransform(new ToTensor())
.optSynset(Collections.singletonList("pikachu"))
.optThreshold(detectionThreshold)
.build();
try (Predictor<Image, DetectedObjects> predictor = model.newPredictor(translator)) {
Image image = ImageFactory.getInstance().fromFile(imagePath);
DetectedObjects detectedObjects = predictor.predict(image);
image.drawBoundingBoxes(detectedObjects);
Path out = Paths.get(outputDir).resolve("pikachu_output.png");
image.save(Files.newOutputStream(out), "png");
// return number of pikachu detected
return detectedObjects.getNumberOfObjects();
}
}
}
private static RandomAccessDataset getDataset(Dataset.Usage usage, Arguments arguments)
throws IOException {
Pipeline pipeline = new Pipeline(new ToTensor());
PikachuDetection pikachuDetection =
PikachuDetection.builder()
.optUsage(usage)
.optLimit(arguments.getLimit())
.optPipeline(pipeline)
.setSampling(arguments.getBatchSize(), true)
.build();
pikachuDetection.prepare(new ProgressBar());
return pikachuDetection;
}
private static DefaultTrainingConfig setupTrainingConfig(Arguments arguments) {
String outputDir = arguments.getOutputDir();
CheckpointsTrainingListener listener = new CheckpointsTrainingListener(outputDir);
listener.setSaveModelCallback(
trainer -> {
TrainingResult result = trainer.getTrainingResult();
Model model = trainer.getModel();
float accuracy = result.getValidateEvaluation("classAccuracy");
model.setProperty("ClassAccuracy", String.format("%.5f", accuracy));
model.setProperty("Loss", String.format("%.5f", result.getValidateLoss()));
});
return new DefaultTrainingConfig(new SingleShotDetectionLoss())
.addEvaluator(new SingleShotDetectionAccuracy("classAccuracy"))
.addEvaluator(new BoundingBoxError("boundingBoxError"))
.optDevices(Device.getDevices(arguments.getMaxGpus()))
.addTrainingListeners(TrainingListener.Defaults.logging(outputDir))
.addTrainingListeners(listener);
}
public static Block getSsdTrainBlock() {
int[] numFilters = {16, 32, 64};
SequentialBlock baseBlock = new SequentialBlock();
for (int numFilter : numFilters) {
baseBlock.add(SingleShotDetection.getDownSamplingBlock(numFilter));
}
List<List<Float>> sizes = new ArrayList<>();
List<List<Float>> ratios = new ArrayList<>();
for (int i = 0; i < 5; i++) {
ratios.add(Arrays.asList(1f, 2f, 0.5f));
}
sizes.add(Arrays.asList(0.2f, 0.272f));
sizes.add(Arrays.asList(0.37f, 0.447f));
sizes.add(Arrays.asList(0.54f, 0.619f));
sizes.add(Arrays.asList(0.71f, 0.79f));
sizes.add(Arrays.asList(0.88f, 0.961f));
return SingleShotDetection.builder()
.setNumClasses(1)
.setNumFeatures(3)
.optGlobalPool(true)
.setRatios(ratios)
.setSizes(sizes)
.setBaseNetwork(baseBlock)
.build();
}
public static Block getSsdPredictBlock(Block ssdTrain) {
// add prediction process
SequentialBlock ssdPredict = new SequentialBlock();
ssdPredict.add(ssdTrain);
ssdPredict.add(
new LambdaBlock(
output -> {
NDArray anchors = output.get(0);
NDArray classPredictions = output.get(1).softmax(-1).transpose(0, 2, 1);
NDArray boundingBoxPredictions = output.get(2);
MultiBoxDetection multiBoxDetection =
MultiBoxDetection.builder().build();
NDList detections =
multiBoxDetection.detection(
new NDList(
classPredictions,
boundingBoxPredictions,
anchors));
return detections.singletonOrThrow().split(new long[] {1, 2}, 2);
}));
return ssdPredict;
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/training/TrainSentimentAnalysis.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.training;
import ai.djl.Application;
import ai.djl.Device;
import ai.djl.MalformedModelException;
import ai.djl.Model;
import ai.djl.basicdataset.StanfordMovieReview;
import ai.djl.basicdataset.utils.FixedBucketSampler;
import ai.djl.basicdataset.utils.TextData;
import ai.djl.examples.training.util.Arguments;
import ai.djl.inference.Predictor;
import ai.djl.metric.Metrics;
import ai.djl.modality.nlp.embedding.EmbeddingException;
import ai.djl.modality.nlp.embedding.ModelZooTextEmbedding;
import ai.djl.modality.nlp.embedding.TextEmbedding;
import ai.djl.modality.nlp.preprocess.LowerCaseConvertor;
import ai.djl.modality.nlp.preprocess.PunctuationSeparator;
import ai.djl.modality.nlp.preprocess.SimpleTokenizer;
import ai.djl.modality.nlp.preprocess.TextProcessor;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Block;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.core.Linear;
import ai.djl.nn.recurrent.LSTM;
import ai.djl.repository.zoo.Criteria;
import ai.djl.repository.zoo.ModelNotFoundException;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.training.DefaultTrainingConfig;
import ai.djl.training.EasyTrain;
import ai.djl.training.Trainer;
import ai.djl.training.TrainingResult;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.listener.CheckpointsTrainingListener;
import ai.djl.training.listener.TrainingListener;
import ai.djl.training.loss.SoftmaxCrossEntropyLoss;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.Batchifier;
import ai.djl.translate.PaddingStackBatchifier;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.apache.commons.cli.ParseException;
public final class TrainSentimentAnalysis {
private static final List<TextProcessor> TEXT_PROCESSORS =
Arrays.asList(
new SimpleTokenizer(),
new LowerCaseConvertor(Locale.ENGLISH),
new PunctuationSeparator());
private static int paddingTokenValue;
private TrainSentimentAnalysis() {}
public static void main(String[] args)
throws IOException, ParseException, ModelNotFoundException, MalformedModelException,
TranslateException {
TrainSentimentAnalysis.runExample(args);
}
public static TrainingResult runExample(String[] args)
throws IOException, ParseException, ModelNotFoundException, MalformedModelException,
TranslateException {
Arguments arguments = Arguments.parseArgs(args);
ExecutorService executorService = Executors.newFixedThreadPool(8);
Criteria<String, NDList> criteria =
Criteria.builder()
.optApplication(Application.NLP.WORD_EMBEDDING)
.setTypes(String.class, NDList.class)
.optArtifactId("glove")
.optFilter("dimensions", "50")
.build();
try (Model model = Model.newInstance("stanfordSentimentAnalysis");
ZooModel<String, NDList> embedding = ModelZoo.loadModel(criteria)) {
ModelZooTextEmbedding modelZooTextEmbedding = new ModelZooTextEmbedding(embedding);
// get training and validation dataset
paddingTokenValue =
modelZooTextEmbedding
.preprocessTextToEmbed(Collections.singletonList("<unk>"))[0];
StanfordMovieReview trainingSet =
getDataset(embedding, Dataset.Usage.TRAIN, executorService, arguments);
StanfordMovieReview validateSet =
getDataset(embedding, Dataset.Usage.TEST, executorService, arguments);
model.setBlock(getModel(modelZooTextEmbedding));
// setup training configuration
DefaultTrainingConfig config = setupTrainingConfig(arguments, modelZooTextEmbedding);
try (Trainer trainer = model.newTrainer(config)) {
trainer.setMetrics(new Metrics());
Shape encoderInputShape = new Shape(arguments.getBatchSize(), 10, 50);
// initialize trainer with proper input shape
trainer.initialize(encoderInputShape);
EasyTrain.fit(trainer, arguments.getEpoch(), trainingSet, validateSet);
TrainingResult result = trainer.getTrainingResult();
try (Predictor<String, Boolean> predictor =
model.newPredictor(new MyTranslator(embedding))) {
List<String> sentences =
Arrays.asList(
"This movie was very good",
"This movie was terrible",
"The movie was not that great");
System.out.println(predictor.batchPredict(sentences)); // NOPMD
}
return result;
}
} finally {
executorService.shutdownNow();
}
}
private static Block getModel(ModelZooTextEmbedding modelZooTextEmbedding) {
return new SequentialBlock()
.add(
inputs -> {
try {
return new NDList(modelZooTextEmbedding.embedText(inputs.head()));
} catch (EmbeddingException e) {
throw new IllegalArgumentException(e.getMessage(), e);
}
})
.add(
LSTM.builder()
.setNumStackedLayers(2)
.setStateSize(100)
.setSequenceLength(false)
.optBidrectional(true)
.build())
.add(
x -> {
long sequenceLength = x.head().getShape().get(1);
NDArray ntc = x.head().transpose(1, 0, 2);
return new NDList(
NDArrays.concat(
new NDList(ntc.get(0), ntc.get(sequenceLength - 1)),
1));
})
.add(Linear.builder().setOutChannels(2).build());
}
public static DefaultTrainingConfig setupTrainingConfig(
Arguments arguments, ModelZooTextEmbedding embedding) {
String outputDir = arguments.getOutputDir();
CheckpointsTrainingListener listener = new CheckpointsTrainingListener(outputDir);
listener.setSaveModelCallback(
trainer -> {
TrainingResult result = trainer.getTrainingResult();
Model model = trainer.getModel();
model.setProperty("Loss", String.format("%.5f", result.getValidateLoss()));
});
return new DefaultTrainingConfig(new SoftmaxCrossEntropyLoss())
.optDevices(Device.getDevices(arguments.getMaxGpus()))
.addTrainingListeners(TrainingListener.Defaults.logging(outputDir))
.addTrainingListeners(listener);
}
public static StanfordMovieReview getDataset(
Model embeddingModel,
Dataset.Usage usage,
ExecutorService executorService,
Arguments arguments)
throws IOException {
StanfordMovieReview stanfordMovieReview =
StanfordMovieReview.builder()
.setSampling(new FixedBucketSampler(arguments.getBatchSize()))
.optDataBatchifier(
PaddingStackBatchifier.builder()
.optIncludeValidLengths(false)
.addPad(
0,
0,
(m) -> m.ones(new Shape(1)).mul(paddingTokenValue))
.build())
.setSourceConfiguration(
new TextData.Configuration()
.setTextEmbedding(new ModelZooTextEmbedding(embeddingModel))
.setTextProcessors(TEXT_PROCESSORS))
.setUsage(usage)
.optExecutor(executorService, 8)
.optLimit(arguments.getLimit())
.build();
stanfordMovieReview.prepare(new ProgressBar());
return stanfordMovieReview;
}
public static final class MyTranslator implements Translator<String, Boolean> {
private TextEmbedding textEmbedding;
private NDManager manager;
public MyTranslator(ZooModel<String, NDList> embeddingModel) {
textEmbedding = new ModelZooTextEmbedding(embeddingModel);
manager = embeddingModel.getNDManager();
}
@Override
public Boolean processOutput(TranslatorContext ctx, NDList list) {
long argmax = list.head().argMax().getLong();
return argmax == 1;
}
@Override
public NDList processInput(TranslatorContext ctx, String input) throws EmbeddingException {
List<String> tokens = Collections.singletonList(input);
for (TextProcessor processor : TEXT_PROCESSORS) {
tokens = processor.preprocess(tokens);
}
NDArray array = textEmbedding.embedText(manager, tokens);
return new NDList(array);
}
/** {@inheritDoc} */
@Override
public Batchifier getBatchifier() {
return PaddingStackBatchifier.builder()
.optIncludeValidLengths(false)
.addPad(0, 0, m -> m.ones(new Shape(1, 50)).mul(paddingTokenValue))
.build();
}
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/training/TrainSeq2Seq.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.training;
import ai.djl.Device;
import ai.djl.Model;
import ai.djl.basicdataset.TatoebaEnglishFrenchDataset;
import ai.djl.basicdataset.TextDataset;
import ai.djl.basicdataset.utils.TextData.Configuration;
import ai.djl.basicmodelzoo.nlp.SimpleTextDecoder;
import ai.djl.basicmodelzoo.nlp.SimpleTextEncoder;
import ai.djl.examples.training.util.Arguments;
import ai.djl.metric.Metrics;
import ai.djl.modality.nlp.EncoderDecoder;
import ai.djl.modality.nlp.embedding.TextEmbedding;
import ai.djl.modality.nlp.embedding.TrainableTextEmbedding;
import ai.djl.modality.nlp.preprocess.LowerCaseConvertor;
import ai.djl.modality.nlp.preprocess.PunctuationSeparator;
import ai.djl.modality.nlp.preprocess.SimpleTokenizer;
import ai.djl.modality.nlp.preprocess.TextTerminator;
import ai.djl.modality.nlp.preprocess.TextTruncator;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Block;
import ai.djl.nn.recurrent.LSTM;
import ai.djl.training.DefaultTrainingConfig;
import ai.djl.training.EasyTrain;
import ai.djl.training.Trainer;
import ai.djl.training.TrainingResult;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.evaluator.Accuracy;
import ai.djl.training.listener.CheckpointsTrainingListener;
import ai.djl.training.listener.TrainingListener;
import ai.djl.training.loss.MaskedSoftmaxCrossEntropyLoss;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.PaddingStackBatchifier;
import java.io.IOException;
import java.util.Arrays;
import java.util.Locale;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.apache.commons.cli.ParseException;
public final class TrainSeq2Seq {
private TrainSeq2Seq() {}
public static void main(String[] args) throws IOException, ParseException {
TrainSeq2Seq.runExample(args);
}
public static TrainingResult runExample(String[] args) throws IOException, ParseException {
Arguments arguments = Arguments.parseArgs(args);
ExecutorService executorService = Executors.newFixedThreadPool(8);
try (Model model = Model.newInstance("seq2seqMTEn-Fr")) {
// get training and validation dataset
TextDataset trainingSet =
getDataset(Dataset.Usage.TRAIN, arguments, executorService, null, null);
// Fetch TextEmbedding from dataset
TrainableTextEmbedding sourceEmbedding =
(TrainableTextEmbedding) trainingSet.getTextEmbedding(true);
TrainableTextEmbedding targetEmbedding =
(TrainableTextEmbedding) trainingSet.getTextEmbedding(false);
// Validate must use the same embedding as training
TextDataset validateDataset =
getDataset(
Dataset.Usage.TEST,
arguments,
executorService,
sourceEmbedding,
targetEmbedding);
// Build the model with the TextEmbedding so that embeddings can be trained
Block block =
getSeq2SeqModel(
sourceEmbedding,
targetEmbedding,
trainingSet.getVocabulary(false).getAllTokens().size());
model.setBlock(block);
// setup training configuration
DefaultTrainingConfig config = setupTrainingConfig(arguments);
try (Trainer trainer = model.newTrainer(config)) {
trainer.setMetrics(new Metrics());
/*
In Sequence-Sequence model for MT, the decoder input must be staggered by one wrt
the label during training.
*/
Shape encoderInputShape = new Shape(arguments.getBatchSize(), 10);
Shape decoderInputShape = new Shape(arguments.getBatchSize(), 9);
// initialize trainer with proper input shape
trainer.initialize(encoderInputShape, decoderInputShape);
EasyTrain.fit(trainer, arguments.getEpoch(), trainingSet, validateDataset);
return trainer.getTrainingResult();
} finally {
executorService.shutdownNow();
}
}
}
private static Block getSeq2SeqModel(
TrainableTextEmbedding sourceEmbedding,
TrainableTextEmbedding targetEmbedding,
int vocabSize) {
SimpleTextEncoder simpleTextEncoder =
new SimpleTextEncoder(
sourceEmbedding,
new LSTM.Builder()
.setStateSize(32)
.setNumStackedLayers(2)
.optDropRate(0)
.build());
SimpleTextDecoder simpleTextDecoder =
new SimpleTextDecoder(
targetEmbedding,
new LSTM.Builder()
.setStateSize(32)
.setNumStackedLayers(2)
.optDropRate(0)
.build(),
vocabSize);
return new EncoderDecoder(simpleTextEncoder, simpleTextDecoder);
}
public static DefaultTrainingConfig setupTrainingConfig(Arguments arguments) {
String outputDir = arguments.getOutputDir();
CheckpointsTrainingListener listener = new CheckpointsTrainingListener(outputDir);
listener.setSaveModelCallback(
trainer -> {
TrainingResult result = trainer.getTrainingResult();
Model model = trainer.getModel();
float accuracy = result.getValidateEvaluation("Accuracy");
model.setProperty("Accuracy", String.format("%.5f", accuracy));
model.setProperty("Loss", String.format("%.5f", result.getValidateLoss()));
});
return new DefaultTrainingConfig(new MaskedSoftmaxCrossEntropyLoss())
.addEvaluator(new Accuracy("Accuracy", 0, 2))
.optDevices(Device.getDevices(arguments.getMaxGpus()))
.addTrainingListeners(TrainingListener.Defaults.logging(outputDir))
.addTrainingListeners(listener);
}
public static TextDataset getDataset(
Dataset.Usage usage,
Arguments arguments,
ExecutorService executorService,
TextEmbedding sourceEmbedding,
TextEmbedding targetEmbedding)
throws IOException {
long limit =
usage == Dataset.Usage.TRAIN ? arguments.getLimit() : arguments.getLimit() / 10;
TatoebaEnglishFrenchDataset.Builder datasetBuilder =
TatoebaEnglishFrenchDataset.builder()
.setSampling(arguments.getBatchSize(), true, false)
.optDataBatchifier(
PaddingStackBatchifier.builder()
.optIncludeValidLengths(true)
.addPad(0, 0, (m) -> m.zeros(new Shape(1)), 10)
.build())
.optLabelBatchifier(
PaddingStackBatchifier.builder()
.optIncludeValidLengths(true)
.addPad(0, 0, (m) -> m.ones(new Shape(1)), 10)
.build())
.optUsage(usage)
.optExecutor(executorService, 8)
.optLimit(limit);
Configuration sourceConfig =
new Configuration()
.setTextProcessors(
Arrays.asList(
new SimpleTokenizer(),
new LowerCaseConvertor(Locale.ENGLISH),
new PunctuationSeparator(),
new TextTruncator(10)));
Configuration targetConfig =
new Configuration()
.setTextProcessors(
Arrays.asList(
new SimpleTokenizer(),
new LowerCaseConvertor(Locale.FRENCH),
new PunctuationSeparator(),
new TextTruncator(8),
new TextTerminator()));
if (sourceEmbedding != null) {
sourceConfig.setTextEmbedding(sourceEmbedding);
} else {
sourceConfig.setEmbeddingSize(32);
}
if (targetEmbedding != null) {
targetConfig.setTextEmbedding(targetEmbedding);
} else {
targetConfig.setEmbeddingSize(32);
}
TatoebaEnglishFrenchDataset dataset =
datasetBuilder
.setSourceConfiguration(sourceConfig)
.setTargetConfiguration(targetConfig)
.build();
dataset.prepare(new ProgressBar());
return dataset;
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/training/TrainWithHpo.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.training;
import ai.djl.Device;
import ai.djl.Model;
import ai.djl.basicdataset.Mnist;
import ai.djl.basicmodelzoo.basic.Mlp;
import ai.djl.examples.training.util.Arguments;
import ai.djl.metric.Metrics;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Block;
import ai.djl.training.DefaultTrainingConfig;
import ai.djl.training.EasyTrain;
import ai.djl.training.Trainer;
import ai.djl.training.TrainingResult;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.dataset.RandomAccessDataset;
import ai.djl.training.evaluator.Accuracy;
import ai.djl.training.hyperparameter.optimizer.HpORandom;
import ai.djl.training.hyperparameter.optimizer.HpOptimizer;
import ai.djl.training.hyperparameter.param.HpInt;
import ai.djl.training.hyperparameter.param.HpSet;
import ai.djl.training.listener.CheckpointsTrainingListener;
import ai.djl.training.listener.TrainingListener;
import ai.djl.training.loss.Loss;
import ai.djl.training.util.ProgressBar;
import ai.djl.util.Pair;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.Arrays;
import org.apache.commons.cli.ParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class TrainWithHpo {
private static final Logger logger = LoggerFactory.getLogger(TrainWithHpo.class);
private TrainWithHpo() {}
public static void main(String[] args) throws IOException, ParseException {
TrainWithHpo.runExample(args);
}
public static TrainingResult runExample(String[] args) throws IOException, ParseException {
Arguments arguments = Arguments.parseArgs(args);
// get training and validation dataset
RandomAccessDataset trainingSet = getDataset(Dataset.Usage.TRAIN, arguments);
RandomAccessDataset validateSet = getDataset(Dataset.Usage.TEST, arguments);
HpSet hyperParams =
new HpSet(
"hp",
Arrays.asList(
new HpInt("hiddenLayersSize", 10, 100),
new HpInt("hiddenLayersCount", 2, 10)));
HpOptimizer hpOptimizer = new HpORandom(hyperParams);
final int hyperparameterTests = 50;
for (int i = 0; i < hyperparameterTests; i++) {
HpSet hpVals = hpOptimizer.nextConfig();
Pair<Model, TrainingResult> trained =
train(arguments, hpVals, trainingSet, validateSet);
trained.getKey().close();
float loss = trained.getValue().getValidateLoss();
hpOptimizer.update(hpVals, loss);
logger.info(
"--------- hp test {}/{} - Loss {} - {}", i, hyperparameterTests, loss, hpVals);
}
HpSet bestHpVals = hpOptimizer.getBest().getKey();
Pair<Model, TrainingResult> trained =
train(arguments, bestHpVals, trainingSet, validateSet);
TrainingResult result = trained.getValue();
float loss = result.getValidateLoss();
try (Model model = trained.getKey()) {
logger.info("--------- FINAL_HP - Loss {} - {}", loss, bestHpVals);
model.setProperty("Epoch", String.valueOf(result.getEpoch()));
model.setProperty(
"Accuracy", String.format("%.5f", result.getValidateEvaluation("Accuracy")));
model.setProperty("Loss", String.format("%.5f", loss));
model.save(Paths.get(arguments.getOutputDir()), "mlp");
}
return result;
}
private static Pair<Model, TrainingResult> train(
Arguments arguments,
HpSet hpVals,
RandomAccessDataset trainingSet,
RandomAccessDataset validateSet) {
// Construct neural network
int[] hidden = new int[(Integer) hpVals.getHParam("hiddenLayersCount").random()];
Arrays.fill(hidden, (Integer) hpVals.getHParam("hiddenLayersSize").random());
Block block = new Mlp(Mnist.IMAGE_HEIGHT * Mnist.IMAGE_WIDTH, Mnist.NUM_CLASSES, hidden);
Model model = Model.newInstance("mlp");
model.setBlock(block);
// setup training configuration
DefaultTrainingConfig config = setupTrainingConfig(arguments);
try (Trainer trainer = model.newTrainer(config)) {
trainer.setMetrics(new Metrics());
/*
* MNIST is 28x28 grayscale image and pre processed into 28 * 28 NDArray.
* 1st axis is batch axis, we can use 1 for initialization.
*/
Shape inputShape = new Shape(1, Mnist.IMAGE_HEIGHT * Mnist.IMAGE_WIDTH);
// initialize trainer with proper input shape
trainer.initialize(inputShape);
EasyTrain.fit(trainer, arguments.getEpoch(), trainingSet, validateSet);
TrainingResult result = trainer.getTrainingResult();
return new Pair<>(model, result);
}
}
private static DefaultTrainingConfig setupTrainingConfig(Arguments arguments) {
String outputDir = arguments.getOutputDir();
CheckpointsTrainingListener listener = new CheckpointsTrainingListener(outputDir);
listener.setSaveModelCallback(
trainer -> {
TrainingResult result = trainer.getTrainingResult();
Model model = trainer.getModel();
float accuracy = result.getValidateEvaluation("Accuracy");
model.setProperty("Accuracy", String.format("%.5f", accuracy));
model.setProperty("Loss", String.format("%.5f", result.getValidateLoss()));
});
return new DefaultTrainingConfig(Loss.softmaxCrossEntropyLoss())
.addEvaluator(new Accuracy())
.optDevices(Device.getDevices(arguments.getMaxGpus()))
.addTrainingListeners(TrainingListener.Defaults.logging(outputDir))
.addTrainingListeners(listener);
}
private static RandomAccessDataset getDataset(Dataset.Usage usage, Arguments arguments)
throws IOException {
Mnist mnist =
Mnist.builder()
.optUsage(usage)
.setSampling(arguments.getBatchSize(), true)
.optLimit(arguments.getLimit())
.build();
mnist.prepare(new ProgressBar());
return mnist;
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/training/TrainWithOptimizers.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.training;
import ai.djl.Device;
import ai.djl.MalformedModelException;
import ai.djl.Model;
import ai.djl.basicdataset.Cifar10;
import ai.djl.basicmodelzoo.BasicModelZoo;
import ai.djl.basicmodelzoo.cv.classification.ResNetV1;
import ai.djl.examples.training.util.Arguments;
import ai.djl.metric.Metrics;
import ai.djl.modality.Classifications;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.transform.Normalize;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Block;
import ai.djl.nn.Blocks;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.SymbolBlock;
import ai.djl.nn.core.Linear;
import ai.djl.repository.zoo.Criteria;
import ai.djl.repository.zoo.ModelNotFoundException;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.training.DefaultTrainingConfig;
import ai.djl.training.EasyTrain;
import ai.djl.training.Trainer;
import ai.djl.training.TrainingResult;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.dataset.RandomAccessDataset;
import ai.djl.training.evaluator.Accuracy;
import ai.djl.training.listener.CheckpointsTrainingListener;
import ai.djl.training.listener.TrainingListener;
import ai.djl.training.loss.Loss;
import ai.djl.training.optimizer.Optimizer;
import ai.djl.training.optimizer.learningrate.LearningRateTracker;
import ai.djl.training.optimizer.learningrate.MultiFactorTracker;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.Pipeline;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
/** This example features sample usage of a variety of optimizers to train Cifar10. */
public final class TrainWithOptimizers {
private TrainWithOptimizers() {}
public static void main(String[] args)
throws IOException, ParseException, ModelNotFoundException, MalformedModelException {
TrainWithOptimizers.runExample(args);
}
public static TrainingResult runExample(String[] args)
throws IOException, ParseException, ModelNotFoundException, MalformedModelException {
Options options = OptimizerArguments.getOptions();
DefaultParser parser = new DefaultParser();
CommandLine cmd = parser.parse(options, args, null, false);
OptimizerArguments arguments = new OptimizerArguments(cmd);
try (Model model = getModel(arguments)) {
// get training dataset
RandomAccessDataset trainDataset = getDataset(Dataset.Usage.TRAIN, arguments);
RandomAccessDataset validationDataset = getDataset(Dataset.Usage.TEST, arguments);
// setup training configuration
DefaultTrainingConfig config = setupTrainingConfig(arguments);
try (Trainer trainer = model.newTrainer(config)) {
trainer.setMetrics(new Metrics());
/*
* CIFAR10 is 32x32 image and pre processed into NCHW NDArray.
* 1st axis is batch axis, we can use 1 for initialization.
*/
Shape inputShape = new Shape(1, 3, Cifar10.IMAGE_HEIGHT, Cifar10.IMAGE_WIDTH);
// initialize trainer with proper input shape
trainer.initialize(inputShape);
EasyTrain.fit(trainer, arguments.getEpoch(), trainDataset, validationDataset);
return trainer.getTrainingResult();
}
}
}
private static Model getModel(Arguments arguments)
throws IOException, ModelNotFoundException, MalformedModelException {
boolean isSymbolic = arguments.isSymbolic();
boolean preTrained = arguments.isPreTrained();
Map<String, String> options = arguments.getCriteria();
Criteria.Builder<Image, Classifications> builder =
Criteria.builder()
.setTypes(Image.class, Classifications.class)
.optProgress(new ProgressBar())
.optArtifactId("resnet");
if (isSymbolic) {
// currently only MxEngine support removeLastBlock
builder.optGroupId("ai.djl.mxnet");
if (options == null) {
builder.optFilter("layers", "50");
builder.optFilter("flavor", "v1");
} else {
builder.optFilters(options);
}
Model model = ModelZoo.loadModel(builder.build());
SequentialBlock newBlock = new SequentialBlock();
SymbolBlock block = (SymbolBlock) model.getBlock();
block.removeLastBlock();
newBlock.add(block);
// the original model don't include the flatten
// so apply the flatten here
newBlock.add(Blocks.batchFlattenBlock());
newBlock.add(Linear.builder().setOutChannels(10).build());
model.setBlock(newBlock);
if (!preTrained) {
model.getBlock().clear();
}
return model;
}
// imperative resnet50
if (preTrained) {
builder.optGroupId(BasicModelZoo.GROUP_ID);
if (options == null) {
builder.optFilter("layers", "50");
builder.optFilter("flavor", "v1");
builder.optFilter("dataset", "cifar10");
} else {
builder.optFilters(options);
}
// load pre-trained imperative ResNet50 from DJL model zoo
return ModelZoo.loadModel(builder.build());
} else {
// construct new ResNet50 without pre-trained weights
Model model = Model.newInstance("resnetv1");
Block resNet50 =
ResNetV1.builder()
.setImageShape(new Shape(3, Cifar10.IMAGE_HEIGHT, Cifar10.IMAGE_WIDTH))
.setNumLayers(50)
.setOutSize(10)
.build();
model.setBlock(resNet50);
return model;
}
}
private static DefaultTrainingConfig setupTrainingConfig(OptimizerArguments arguments) {
String outputDir = arguments.getOutputDir();
CheckpointsTrainingListener listener =
new CheckpointsTrainingListener(outputDir, "resnetv1");
listener.setSaveModelCallback(
trainer -> {
TrainingResult result = trainer.getTrainingResult();
Model model = trainer.getModel();
float accuracy = result.getValidateEvaluation("Accuracy");
model.setProperty("Accuracy", String.format("%.5f", accuracy));
model.setProperty("Loss", String.format("%.5f", result.getValidateLoss()));
});
return new DefaultTrainingConfig(Loss.softmaxCrossEntropyLoss())
.addEvaluator(new Accuracy())
.optOptimizer(setupOptimizer(arguments))
.optDevices(Device.getDevices(arguments.getMaxGpus()))
.addTrainingListeners(TrainingListener.Defaults.logging(outputDir))
.addTrainingListeners(listener);
}
private static Optimizer setupOptimizer(OptimizerArguments arguments) {
String optimizerName = arguments.getOptimizer();
int batchSize = arguments.getBatchSize();
switch (optimizerName) {
case "sgd":
// epoch number to change learning rate
int[] epochs;
if (arguments.isPreTrained()) {
epochs = new int[] {2, 5, 8};
} else {
epochs = new int[] {20, 60, 90, 120, 180};
}
int[] steps = Arrays.stream(epochs).map(k -> k * 60000 / batchSize).toArray();
MultiFactorTracker learningRateTracker =
LearningRateTracker.multiFactorTracker()
.setSteps(steps)
.optBaseLearningRate(1e-3f)
.optFactor((float) Math.sqrt(.1f))
.optWarmUpBeginLearningRate(1e-4f)
.optWarmUpSteps(200)
.build();
return Optimizer.sgd()
.setLearningRateTracker(learningRateTracker)
.optWeightDecays(0.001f)
.optClipGrad(5f)
.build();
case "adam":
return Optimizer.adam().build();
default:
throw new IllegalArgumentException("Unknown optimizer");
}
}
private static RandomAccessDataset getDataset(Dataset.Usage usage, Arguments arguments)
throws IOException {
Pipeline pipeline =
new Pipeline(
new ToTensor(),
new Normalize(Cifar10.NORMALIZE_MEAN, Cifar10.NORMALIZE_STD));
Cifar10 cifar10 =
Cifar10.builder()
.optUsage(usage)
.setSampling(arguments.getBatchSize(), true)
.optLimit(arguments.getLimit())
.optPipeline(pipeline)
.build();
cifar10.prepare(new ProgressBar());
return cifar10;
}
private static class OptimizerArguments extends Arguments {
private String optimizer;
public OptimizerArguments(CommandLine cmd) {
super(cmd);
if (cmd.hasOption("optimizer")) {
optimizer = cmd.getOptionValue("optimizer");
} else {
optimizer = "adam";
}
}
public static Options getOptions() {
Options options = Arguments.getOptions();
options.addOption(
Option.builder("z")
.longOpt("optimizer")
.hasArg()
.argName("OPTIMIZER")
.desc("The optimizer to use.")
.build());
return options;
}
public String getOptimizer() {
return optimizer;
}
}
}
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/training/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains examples of training models. */
package ai.djl.examples.training;
|
0
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/training
|
java-sources/ai/djl/examples/0.6.0/ai/djl/examples/training/transferlearning/TrainResnetWithCifar10.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.examples.training.transferlearning;
import ai.djl.Application;
import ai.djl.Device;
import ai.djl.MalformedModelException;
import ai.djl.Model;
import ai.djl.ModelException;
import ai.djl.basicdataset.Cifar10;
import ai.djl.basicmodelzoo.BasicModelZoo;
import ai.djl.basicmodelzoo.cv.classification.ResNetV1;
import ai.djl.examples.training.util.Arguments;
import ai.djl.inference.Predictor;
import ai.djl.metric.Metrics;
import ai.djl.modality.Classifications;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.transform.Normalize;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.modality.cv.translator.ImageClassificationTranslator;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Block;
import ai.djl.nn.Blocks;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.SymbolBlock;
import ai.djl.nn.core.Linear;
import ai.djl.repository.zoo.Criteria;
import ai.djl.repository.zoo.ModelNotFoundException;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.training.DefaultTrainingConfig;
import ai.djl.training.EasyTrain;
import ai.djl.training.Trainer;
import ai.djl.training.TrainingResult;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.dataset.RandomAccessDataset;
import ai.djl.training.evaluator.Accuracy;
import ai.djl.training.listener.TrainingListener;
import ai.djl.training.loss.Loss;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.Pipeline;
import ai.djl.translate.TranslateException;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Map;
import org.apache.commons.cli.ParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An example of training an image classification (ResNet for Cifar10) model.
*
* <p>See this <a
* href="https://github.com/awslabs/djl/blob/master/examples/docs/train_cifar10_resnet.md">doc</a>
* for information about this example.
*/
public final class TrainResnetWithCifar10 {
private static final Logger logger = LoggerFactory.getLogger(TrainResnetWithCifar10.class);
private TrainResnetWithCifar10() {}
public static void main(String[] args)
throws ParseException, ModelException, IOException, TranslateException {
TrainResnetWithCifar10.runExample(args);
}
public static TrainingResult runExample(String[] args)
throws IOException, ParseException, ModelException, TranslateException {
Arguments arguments = Arguments.parseArgs(args);
try (Model model = getModel(arguments)) {
// get training dataset
RandomAccessDataset trainDataset = getDataset(Dataset.Usage.TRAIN, arguments);
RandomAccessDataset validationDataset = getDataset(Dataset.Usage.TEST, arguments);
// setup training configuration
DefaultTrainingConfig config = setupTrainingConfig(arguments);
try (Trainer trainer = model.newTrainer(config)) {
trainer.setMetrics(new Metrics());
/*
* CIFAR10 is 32x32 image and pre processed into NCHW NDArray.
* 1st axis is batch axis, we can use 1 for initialization.
*/
Shape inputShape = new Shape(1, 3, 32, 32);
// initialize trainer with proper input shape
trainer.initialize(inputShape);
EasyTrain.fit(trainer, arguments.getEpoch(), trainDataset, validationDataset);
TrainingResult result = trainer.getTrainingResult();
model.setProperty("Epoch", String.valueOf(result.getEpoch()));
model.setProperty(
"Accuracy",
String.format("%.5f", result.getValidateEvaluation("Accuracy")));
model.setProperty("Loss", String.format("%.5f", result.getValidateLoss()));
Path modelPath = Paths.get("build/model");
model.save(modelPath, "resnetv1");
Classifications classifications = testSaveParameters(model.getBlock(), modelPath);
logger.info("Predict result: {}", classifications.topK(3));
return result;
}
}
}
private static Model getModel(Arguments arguments)
throws IOException, ModelNotFoundException, MalformedModelException {
boolean isSymbolic = arguments.isSymbolic();
boolean preTrained = arguments.isPreTrained();
Map<String, String> options = arguments.getCriteria();
Criteria.Builder<Image, Classifications> builder =
Criteria.builder()
.optApplication(Application.CV.IMAGE_CLASSIFICATION)
.setTypes(Image.class, Classifications.class)
.optProgress(new ProgressBar())
.optArtifactId("resnet");
if (isSymbolic) {
// load the model
builder.optGroupId("ai.djl.mxnet");
if (options == null) {
builder.optFilter("layers", "50");
builder.optFilter("flavor", "v1");
} else {
builder.optFilters(options);
}
Model model = ModelZoo.loadModel(builder.build());
SequentialBlock newBlock = new SequentialBlock();
SymbolBlock block = (SymbolBlock) model.getBlock();
block.removeLastBlock();
newBlock.add(block);
// the original model don't include the flatten
// so apply the flatten here
newBlock.add(Blocks.batchFlattenBlock());
newBlock.add(Linear.builder().setOutChannels(10).build());
model.setBlock(newBlock);
if (!preTrained) {
model.getBlock().clear();
}
return model;
}
// imperative resnet50
if (preTrained) {
builder.optGroupId(BasicModelZoo.GROUP_ID);
if (options == null) {
builder.optFilter("layers", "50");
builder.optFilter("flavor", "v1");
builder.optFilter("dataset", "cifar10");
} else {
builder.optFilters(options);
}
// load pre-trained imperative ResNet50 from DJL model zoo
return ModelZoo.loadModel(builder.build());
} else {
// construct new ResNet50 without pre-trained weights
Model model = Model.newInstance("resnetv1");
Block resNet50 =
ResNetV1.builder()
.setImageShape(new Shape(3, 32, 32))
.setNumLayers(50)
.setOutSize(10)
.build();
model.setBlock(resNet50);
return model;
}
}
private static Classifications testSaveParameters(Block block, Path path)
throws IOException, ModelException, TranslateException {
URL synsetUrl =
new URL(
"https://mlrepo.djl.ai/model/cv/image_classification/ai/djl/mxnet/synset_cifar10.txt");
ImageClassificationTranslator translator =
ImageClassificationTranslator.builder()
.addTransform(new ToTensor())
.addTransform(new Normalize(Cifar10.NORMALIZE_MEAN, Cifar10.NORMALIZE_STD))
.optSynsetUrl(synsetUrl)
.optApplySoftmax(true)
.build();
Image img = ImageFactory.getInstance().fromUrl("src/test/resources/airplane1.png");
Criteria<Image, Classifications> criteria =
Criteria.builder()
.setTypes(Image.class, Classifications.class)
.optModelUrls(path.toUri().toString())
.optTranslator(translator)
.optBlock(block)
.optModelName("resnetv1")
.build();
try (ZooModel<Image, Classifications> model = ModelZoo.loadModel(criteria);
Predictor<Image, Classifications> predictor = model.newPredictor()) {
return predictor.predict(img);
}
}
private static DefaultTrainingConfig setupTrainingConfig(Arguments arguments) {
return new DefaultTrainingConfig(Loss.softmaxCrossEntropyLoss())
.addEvaluator(new Accuracy())
.optDevices(Device.getDevices(arguments.getMaxGpus()))
.addTrainingListeners(TrainingListener.Defaults.logging(arguments.getOutputDir()));
}
private static RandomAccessDataset getDataset(Dataset.Usage usage, Arguments arguments)
throws IOException {
Pipeline pipeline =
new Pipeline(
new ToTensor(),
new Normalize(Cifar10.NORMALIZE_MEAN, Cifar10.NORMALIZE_STD));
Cifar10 cifar10 =
Cifar10.builder()
.optUsage(usage)
.setSampling(arguments.getBatchSize(), true)
.optLimit(arguments.getLimit())
.optPipeline(pipeline)
.build();
cifar10.prepare(new ProgressBar());
return cifar10;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.