index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository/RpcRepository.java
|
/*
* Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository;
import ai.djl.Application;
import ai.djl.engine.rpc.RpcEngine;
import ai.djl.engine.rpc.RpcTranslatorFactory;
import ai.djl.repository.zoo.DefaultModelZoo;
import ai.djl.util.Progress;
import ai.djl.util.Utils;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* A {@code RpcRepository} is a {@link Repository} as a remote model server.
*
* @see Repository
*/
public class RpcRepository extends AbstractRepository {
private String artifactId;
private String modelName;
RpcRepository(String name, URI uri) {
super(name, uri);
modelName = arguments.get("model_name");
artifactId = arguments.get("artifact_id");
if (artifactId == null) {
artifactId = "rpc";
}
if (modelName == null) {
modelName = artifactId;
}
arguments.put("translatorFactory", RpcTranslatorFactory.class.getName());
arguments.put("engine", RpcEngine.ENGINE_NAME);
arguments.put("djl_rpc_uri", uri.toString());
}
/** {@inheritDoc} */
@Override
public boolean isRemote() {
return true;
}
/** {@inheritDoc} */
@Override
public Metadata locate(MRL mrl) throws IOException {
return getMetadata();
}
/** {@inheritDoc} */
@Override
public Artifact resolve(MRL mrl, Map<String, String> filter) throws IOException {
List<Artifact> artifacts = locate(mrl).getArtifacts();
if (artifacts.isEmpty()) {
return null;
}
return artifacts.get(0);
}
/** {@inheritDoc} */
@Override
public List<MRL> getResources() {
MRL mrl = MRL.undefined(this, DefaultModelZoo.GROUP_ID, artifactId);
return Collections.singletonList(mrl);
}
/** {@inheritDoc} */
@Override
protected void download(Path tmp, URI baseUri, Artifact.Item item, Progress progress) {}
private synchronized Metadata getMetadata() {
Artifact artifact = new Artifact();
artifact.setName(modelName);
artifact.getArguments().putAll(arguments);
Map<String, Artifact.Item> files = new ConcurrentHashMap<>();
Artifact.Item item = new Artifact.Item();
item.setUri(uri.getPath());
item.setName(""); // avoid creating extra folder
item.setArtifact(artifact);
item.setSize(0);
files.put(artifactId, item);
artifact.setFiles(files);
Metadata metadata = new Metadata.MatchAllMetadata();
metadata.setArtifactId(artifactId);
metadata.setArtifacts(Collections.singletonList(artifact));
String hash = Utils.hash(uri.toString());
MRL mrl = model(Application.UNDEFINED, DefaultModelZoo.GROUP_ID, hash);
metadata.setRepositoryUri(mrl.toURI());
return metadata;
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository/SimpleRepository.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository;
import ai.djl.Application;
import ai.djl.repository.Artifact.Item;
import ai.djl.repository.zoo.DefaultModelZoo;
import ai.djl.util.Progress;
import ai.djl.util.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* A {@code SimpleRepository} is a {@link Repository} containing only a single artifact without
* requiring a "metadata.json" file.
*
* @see Repository
*/
public class SimpleRepository extends AbstractRepository {
private static final Logger logger = LoggerFactory.getLogger(SimpleRepository.class);
private Path path;
private String artifactId;
private String modelName;
private boolean isRemote;
private Metadata metadata;
private boolean resolved;
/**
* (Internal) Constructs a SimpleRepository.
*
* <p>Use {@link Repository#newInstance(String, String)}.
*
* @param name the name of the repository
* @param uri the base URI of the repository
* @param path the path to the repository
*/
protected SimpleRepository(String name, URI uri, Path path) {
super(name, uri);
this.path = path;
isRemote = FilenameUtils.isArchiveFile(path.toString());
modelName = arguments.get("model_name");
artifactId = arguments.get("artifact_id");
if (artifactId == null) {
if (isRemote) {
artifactId = FilenameUtils.getNamePart(path.toFile().getName());
} else {
artifactId = path.toFile().getName();
}
}
if (modelName == null) {
modelName = artifactId;
}
}
/** {@inheritDoc} */
@Override
public boolean isRemote() {
return isRemote;
}
/** {@inheritDoc} */
@Override
public URI getBaseUri() {
return path.toUri();
}
/** {@inheritDoc} */
@Override
public Metadata locate(MRL mrl) throws IOException {
return getMetadata();
}
/** {@inheritDoc} */
@Override
public Artifact resolve(MRL mrl, Map<String, String> filter) throws IOException {
List<Artifact> artifacts = locate(mrl).getArtifacts();
if (artifacts.isEmpty()) {
return null;
}
return artifacts.get(0);
}
/** {@inheritDoc} */
@Override
public Path getResourceDirectory(Artifact artifact) throws IOException {
if (isRemote) {
return super.getResourceDirectory(artifact);
}
return path;
}
/** {@inheritDoc} */
@Override
protected void download(Path tmp, URI baseUri, Artifact.Item item, Progress progress)
throws IOException {
logger.debug("Extracting artifact: {} ...", path);
try (InputStream is = new BufferedInputStream(Files.newInputStream(path))) {
save(is, tmp, item, progress);
}
}
/** {@inheritDoc} */
@Override
public void prepare(Artifact artifact, Progress progress) throws IOException {
if (isRemote) {
super.prepare(artifact, progress);
} else {
logger.debug("Skip prepare for local repository.");
}
}
/** {@inheritDoc} */
@Override
public Path getCacheDirectory() throws IOException {
if (isRemote) {
return super.getCacheDirectory();
}
return path;
}
/** {@inheritDoc} */
@Override
protected URI resolvePath(Item item, String path) throws IOException {
if (isRemote) {
return super.resolvePath(item, path);
}
return this.path.resolve(item.getName()).toUri();
}
/** {@inheritDoc} */
@Override
public List<MRL> getResources() {
if (!Files.exists(path)) {
logger.debug("Specified path doesn't exists: {}", path.toAbsolutePath());
return Collections.emptyList();
}
MRL mrl = MRL.undefined(this, DefaultModelZoo.GROUP_ID, artifactId);
return Collections.singletonList(mrl);
}
private synchronized Metadata getMetadata() throws IOException {
if (resolved) {
return metadata;
}
resolved = true;
metadata = new Metadata.MatchAllMetadata();
metadata.setRepositoryUri(URI.create(""));
metadata.setArtifactId(artifactId);
if (!Files.exists(path)) {
logger.debug("Specified path doesn't exists: {}", path.toAbsolutePath());
return metadata;
}
Artifact artifact = new Artifact();
artifact.setName(modelName);
artifact.getArguments().putAll(arguments);
Map<String, Item> files = new ConcurrentHashMap<>();
if (isRemote) {
Artifact.Item item = new Artifact.Item();
String uri = path.toAbsolutePath().toUri().toString();
item.setUri(uri);
item.setName(""); // avoid creating extra folder
item.setArtifact(artifact);
item.setSize(Files.size(path));
files.put(artifactId, item);
artifact.setFiles(files);
String hash = Utils.hash(uri);
MRL mrl = model(Application.UNDEFINED, DefaultModelZoo.GROUP_ID, hash);
metadata.setRepositoryUri(mrl.toURI());
} else {
if (Files.isDirectory(path)) {
File[] fileList = path.toFile().listFiles();
if (fileList != null) {
for (File f : fileList) {
Item item = new Item();
item.setName(f.getName());
item.setSize(f.length());
item.setArtifact(artifact);
files.put(f.getName(), item);
}
}
} else {
logger.warn("Simple repository pointing to a non-archive file.");
}
}
artifact.setFiles(files);
metadata.setArtifacts(Collections.singletonList(artifact));
return metadata;
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository/SimpleUrlRepository.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository;
import ai.djl.Application;
import ai.djl.repository.zoo.DefaultModelZoo;
import ai.djl.util.Progress;
import ai.djl.util.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URI;
import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* A {@code SimpleUrlRepository} is a {@link Repository} contains an archive file from a HTTP URL.
*
* @see Repository
*/
public class SimpleUrlRepository extends AbstractRepository {
private static final Logger logger = LoggerFactory.getLogger(SimpleUrlRepository.class);
private String fileName;
private String artifactId;
private String modelName;
private Metadata metadata;
private boolean resolved;
SimpleUrlRepository(String name, URI uri, String fileName) {
super(name, uri);
this.fileName = fileName;
modelName = arguments.get("model_name");
artifactId = arguments.get("artifact_id");
if (artifactId == null) {
artifactId = FilenameUtils.getNamePart(fileName);
}
if (modelName == null) {
modelName = artifactId;
}
}
/** {@inheritDoc} */
@Override
public boolean isRemote() {
return true;
}
/** {@inheritDoc} */
@Override
public Metadata locate(MRL mrl) throws IOException {
return getMetadata();
}
/** {@inheritDoc} */
@Override
public Artifact resolve(MRL mrl, Map<String, String> filter) throws IOException {
List<Artifact> artifacts = locate(mrl).getArtifacts();
if (artifacts.isEmpty()) {
return null;
}
return artifacts.get(0);
}
/** {@inheritDoc} */
@Override
public List<MRL> getResources() {
try {
Metadata m = getMetadata();
if (m != null && !m.getArtifacts().isEmpty()) {
MRL mrl = MRL.undefined(this, m.getGroupId(), m.getArtifactId());
return Collections.singletonList(mrl);
}
} catch (IOException e) {
logger.warn("Failed to connect URL: {}", uri, e);
}
return Collections.emptyList();
}
/** {@inheritDoc} */
@Override
protected void download(Path tmp, URI baseUri, Artifact.Item item, Progress progress)
throws IOException {
logger.debug("Downloading artifact: {} ...", uri);
try (InputStream is = new BufferedInputStream(uri.toURL().openStream())) {
save(is, tmp, item, progress);
}
}
private synchronized Metadata getMetadata() throws IOException {
if (resolved) {
return metadata;
}
Artifact artifact = new Artifact();
artifact.setName(modelName);
artifact.getArguments().putAll(arguments);
Map<String, Artifact.Item> files = new ConcurrentHashMap<>();
Artifact.Item item = new Artifact.Item();
item.setUri(uri.getPath());
item.setName(""); // avoid creating extra folder
item.setExtension(FilenameUtils.getFileType(fileName));
item.setArtifact(artifact);
item.setSize(getContentLength());
files.put(artifactId, item);
artifact.setFiles(files);
metadata = new Metadata.MatchAllMetadata();
metadata.setArtifactId(artifactId);
metadata.setArtifacts(Collections.singletonList(artifact));
String hash = Utils.hash(uri.toString());
MRL mrl = model(Application.UNDEFINED, DefaultModelZoo.GROUP_ID, hash);
metadata.setRepositoryUri(mrl.toURI());
return metadata;
}
private long getContentLength() throws IOException {
String scheme = uri.getScheme();
if ("http".equalsIgnoreCase(scheme) || "https".equalsIgnoreCase(scheme)) {
HttpURLConnection conn = null;
try {
resolved = true;
conn = (HttpURLConnection) uri.toURL().openConnection();
conn.setRequestMethod("HEAD");
int code = conn.getResponseCode();
if (code != 200) {
logger.debug("Failed detect content length, error code: {}", code);
return -1;
}
return conn.getContentLength();
} finally {
if (conn != null) {
conn.disconnect();
}
}
}
return -1;
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository/Version.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Objects;
/**
* A {@link Version} is used for the versioning of an {@link Artifact}.
*
* <p>The version should be a series of "." separated numbers and strings. For a development
* version, the version should end with "-SNAPSHOT". The most common case is semantic versioning
* which follows the format "major.minor.incremental".
*/
public class Version implements Comparable<Version> {
private String version;
private boolean snapshot;
private List<Object> comparable;
/**
* Constructs a version with the version string.
*
* @param version the version string
*/
public Version(String version) {
this.version = version;
comparable = new ArrayList<>();
String[] parts = version.split("\\.", 5);
int length = parts.length;
snapshot = parts[length - 1].endsWith("-SNAPSHOT");
if (snapshot) {
parts[length - 1] = parts[length - 1].replaceAll("-SNAPSHOT", "");
}
for (String part : parts) {
Integer value = tryParseInt(part);
if (value != null) {
comparable.add(value);
} else {
comparable.add(part);
}
}
}
/** {@inheritDoc} */
@Override
public int compareTo(Version otherVersion) {
Comp comp = new Comp();
List<Object> other = otherVersion.comparable;
int currentSize = comparable.size();
int otherSize = other.size();
int size = Math.min(currentSize, otherSize);
for (int i = 0; i < size; ++i) {
int ret = comp.compare(comparable.get(i), other.get(i));
if (ret != 0) {
return ret;
}
}
return Integer.compare(currentSize, otherSize);
}
/** {@inheritDoc} */
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
return compareTo((Version) o) == 0;
}
/** {@inheritDoc} */
@Override
public int hashCode() {
return Objects.hash(comparable);
}
/**
* Returns the major version (assuming major.minor.incremental...) of the version.
*
* @return the major version
*/
public int getMajorVersion() {
return get(0);
}
/**
* Returns the minor version (assuming major.minor.incremental...) of the version.
*
* @return the minor version
*/
public int getMinorVersion() {
return get(1);
}
/**
* Returns the incremental version (assuming major.minor.incremental...) of the version.
*
* @return the incremental version
*/
public int getIncrementalVersion() {
return get(2);
}
/**
* Returns true if this is a snapshot version.
*
* @return true if this is a snapshot version
*/
public boolean isSnapshot() {
return snapshot;
}
/**
* Returns the dot separated part at a given index if it is an integer.
*
* @param index the index for dot separated parts (not characters in the string)
* @return the integer parsed to an int if it exists, or zero otherwise
*/
private int get(int index) {
if (comparable.size() > index) {
Object c = comparable.get(index);
if (c instanceof Integer) {
return (Integer) c;
}
}
return 0;
}
private static Integer tryParseInt(String s) {
try {
long longValue = Long.parseLong(s);
if (longValue > Integer.MAX_VALUE) {
return null;
}
return (int) longValue;
} catch (NumberFormatException e) {
return null;
}
}
/** {@inheritDoc} */
@Override
public String toString() {
return version;
}
/** A version component that can be either an integer or string. */
private static final class Comp implements Comparator<Object>, Serializable {
private static final long serialVersionUID = 1L;
/** {@inheritDoc} */
@Override
public int compare(Object o1, Object o2) {
if (o1 instanceof Integer) {
if (o2 instanceof Integer) {
return ((Integer) o1).compareTo((Integer) o2);
}
return -1;
}
if (o2 instanceof Integer) {
return 1;
}
return ((String) o1).compareTo((String) o2);
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository/VersionRange.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.stream.Collectors;
/**
* A {@code VersionRange} is a set of {@link Restriction}s that match some {@link Version}s.
*
* <p>A {@code VersionRange} should be constructed using {@link VersionRange#parse(String)}. The
* format used by the version ranges matches the <a
* href="https://cwiki.apache.org/confluence/display/MAVENOLD/Dependency+Mediation+and+Conflict+Resolution#DependencyMediationandConflictResolution-DependencyVersionRanges">maven
* version range syntax</a>.
*/
public final class VersionRange {
private static final VersionRange ANY = new VersionRange(null, Collections.emptyList());
private Version recommendedVersion;
private List<Restriction> restrictions;
private VersionRange(Version recommendedVersion, List<Restriction> restrictions) {
this.recommendedVersion = recommendedVersion;
this.restrictions = restrictions;
}
/**
* Returns the recommended version in the range.
*
* @return the recommended version in the range
*/
public Version getRecommendedVersion() {
return recommendedVersion;
}
/**
* Returns the restrictions that compose the range.
*
* @return the restrictions that compose the range
*/
public List<Restriction> getRestrictions() {
return restrictions;
}
/**
* Creates a new version range from a string version range.
*
* @param spec the string version range
* @return the {@link VersionRange}
*/
public static VersionRange parse(String spec) {
if (spec == null || spec.isEmpty()) {
return ANY;
}
List<Restriction> restrictions = new ArrayList<>();
String process = spec;
Version version = null;
Version upperBound = null;
Version lowerBound = null;
while (process.startsWith("[") || process.startsWith("(")) {
int index1 = process.indexOf(')');
int index2 = process.indexOf(']');
int index = index2;
if (index2 < 0 || index1 < index2) {
if (index1 >= 0) {
index = index1;
}
}
if (index < 0) {
throw new IllegalArgumentException("Unbounded range: " + spec);
}
Restriction restriction = parseRestriction(process.substring(0, index + 1));
if (lowerBound == null) {
lowerBound = restriction.getLowerBound();
}
if (upperBound != null) {
if (restriction.getLowerBound() == null
|| restriction.getLowerBound().compareTo(upperBound) < 0) {
throw new IllegalArgumentException("Ranges overlap: " + spec);
}
}
restrictions.add(restriction);
upperBound = restriction.getUpperBound();
process = process.substring(index + 1).trim();
if (process.length() > 0 && process.startsWith(",")) {
process = process.substring(1).trim();
}
}
if (process.length() > 0) {
if (!restrictions.isEmpty()) {
throw new IllegalArgumentException(
"Only fully-qualified sets allowed in multiple set scenario: " + spec);
}
version = new Version(process);
restrictions.add(Restriction.EVERYTHING);
}
return new VersionRange(version, restrictions);
}
private static Restriction parseRestriction(String spec) {
boolean lowerBoundInclusive = spec.startsWith("[");
boolean upperBoundInclusive = spec.endsWith("]");
String process = spec.substring(1, spec.length() - 1).trim();
Restriction restriction;
int index = process.indexOf(',');
if (index < 0) {
if (!lowerBoundInclusive || !upperBoundInclusive) {
throw new IllegalArgumentException(
"Single version must be surrounded by []: " + spec);
}
Version version = new Version(process);
restriction = new Restriction(version, true, version, true);
} else {
String lowerBound = process.substring(0, index).trim();
String upperBound = process.substring(index + 1).trim();
if (lowerBound.equals(upperBound)) {
throw new IllegalArgumentException(
"Range cannot have identical boundaries: " + spec);
}
Version lowerVersion = null;
if (lowerBound.length() > 0) {
lowerVersion = new Version(lowerBound);
}
Version upperVersion = null;
if (upperBound.length() > 0) {
upperVersion = new Version(upperBound);
}
if (upperVersion != null
&& lowerVersion != null
&& upperVersion.compareTo(lowerVersion) < 0) {
throw new IllegalArgumentException("Range defies version ordering: " + spec);
}
restriction =
new Restriction(
lowerVersion, lowerBoundInclusive, upperVersion, upperBoundInclusive);
}
return restriction;
}
/**
* Filters the provided artifacts to those that match the version range.
*
* @param artifacts the artifacts to filter
* @return the filtered artifacts
*/
public List<Artifact> matches(List<Artifact> artifacts) {
return artifacts.stream().filter(this::contains).collect(Collectors.toList());
}
/**
* Returns true if a version falls within this range.
*
* @param version the version to test
* @return true if the version falls within this range
*/
public boolean contains(Version version) {
if (ANY == this) {
return true;
}
if (recommendedVersion != null) {
return recommendedVersion.equals(version);
}
for (Restriction restriction : restrictions) {
if (restriction.containsVersion(version)) {
return true;
}
}
return false;
}
/**
* Returns true if the artifact's version falls within this range.
*
* @param artifact the artifact to test
* @return true if the artifact's version falls within this range
*/
public boolean contains(Artifact artifact) {
return artifact.getVersion() == null || contains(artifact.getParsedVersion());
}
/** {@inheritDoc} */
@Override
public String toString() {
if (recommendedVersion != null) {
return recommendedVersion.toString();
}
StringBuilder buf = new StringBuilder();
for (Iterator<Restriction> i = restrictions.iterator(); i.hasNext(); ) {
Restriction r = i.next();
buf.append(r.toString());
if (i.hasNext()) {
buf.append(',');
}
}
return buf.toString();
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains a Maven-based Repository format for creating repositories of artifacts such as datasets
* and model zoos.
*
* <p>There are also helper classes for Model Zoos ({@link ai.djl.repository.zoo}).
*
* @see ai.djl.repository.Repository
*/
package ai.djl.repository;
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository/zoo/BaseModelLoader.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
import ai.djl.Application;
import ai.djl.Device;
import ai.djl.MalformedModelException;
import ai.djl.Model;
import ai.djl.engine.Engine;
import ai.djl.nn.Block;
import ai.djl.nn.BlockFactory;
import ai.djl.repository.Artifact;
import ai.djl.repository.MRL;
import ai.djl.translate.DefaultTranslatorFactory;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorFactory;
import ai.djl.util.ClassLoaderUtils;
import ai.djl.util.Pair;
import ai.djl.util.Progress;
import ai.djl.util.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.Reader;
import java.lang.reflect.Type;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Map;
import java.util.Properties;
/** Shared code for the {@link ModelLoader} implementations. */
public class BaseModelLoader implements ModelLoader {
private static final Logger logger = LoggerFactory.getLogger(BaseModelLoader.class);
protected MRL mrl;
protected TranslatorFactory defaultFactory;
/**
* Constructs a {@link ModelLoader} given the repository, mrl, and version.
*
* @param mrl the mrl of the model to load
*/
public BaseModelLoader(MRL mrl) {
this.mrl = mrl;
defaultFactory = new DefaultTranslatorFactory();
}
/** {@inheritDoc} */
@Override
public String getGroupId() {
return mrl.getGroupId();
}
/** {@inheritDoc} */
@Override
public String getArtifactId() {
return mrl.getArtifactId();
}
/** {@inheritDoc} */
@Override
public Application getApplication() {
return mrl.getApplication();
}
/** {@inheritDoc} */
@Override
public MRL getMrl() {
return mrl;
}
/** {@inheritDoc} */
@Override
public <I, O> ZooModel<I, O> loadModel(Criteria<I, O> criteria)
throws IOException, ModelNotFoundException, MalformedModelException {
Artifact artifact = mrl.match(criteria.getFilters());
if (artifact == null) {
throw new ModelNotFoundException("No matching filter found");
}
Progress progress = criteria.getProgress();
Map<String, Object> arguments = artifact.getArguments(criteria.getArguments());
Map<String, String> options = artifact.getOptions(criteria.getOptions());
try {
TranslatorFactory factory = getTranslatorFactory(criteria, arguments);
Class<I> input = criteria.getInputClass();
Class<O> output = criteria.getOutputClass();
if (factory == null || !factory.isSupported(input, output)) {
factory = defaultFactory;
if (!factory.isSupported(input, output)) {
throw new ModelNotFoundException(getFactoryLookupErrorMessage(factory));
}
}
mrl.prepare(artifact, progress);
if (progress != null) {
progress.reset("Loading", 2);
progress.update(1);
}
Path modelPath = mrl.getRepository().getResourceDirectory(artifact);
Path modelDir = Files.isRegularFile(modelPath) ? modelPath.getParent() : modelPath;
if (modelDir == null) {
throw new AssertionError("Directory should not be null.");
}
modelDir = Utils.getNestedModelDir(modelDir);
loadServingProperties(modelDir, arguments, options);
Application application = criteria.getApplication();
if (application != Application.UNDEFINED) {
arguments.put("application", application.getPath());
}
String engine = criteria.getEngine();
if (engine == null) {
// get engine from serving.properties
engine = (String) arguments.get("engine");
}
// Check if the engine is specified in Criteria, use it if it is.
// Otherwise check the modelzoo supported engine and grab a random engine in the list.
// Otherwise if none of them is specified or model zoo is null, go to default engine.
if (engine == null) {
ModelZoo modelZoo = ModelZoo.getModelZoo(mrl.getGroupId());
if (modelZoo != null) {
String defaultEngine = Engine.getDefaultEngineName();
for (String supportedEngine : modelZoo.getSupportedEngines()) {
if (supportedEngine.equals(defaultEngine)) {
engine = supportedEngine;
break;
} else if (Engine.hasEngine(supportedEngine)) {
engine = supportedEngine;
}
}
if (engine == null) {
throw new ModelNotFoundException(
"No supported engine available for model zoo: "
+ modelZoo.getGroupId());
}
}
}
if (engine != null && !Engine.hasEngine(engine)) {
throw new ModelNotFoundException(engine + " is not supported");
}
String modelName = criteria.getModelName();
if (modelName == null) {
modelName = options.get("modelName");
if (modelName == null) {
modelName = artifact.getName();
}
}
Model model =
createModel(
modelDir,
modelName,
criteria.getDevice(),
criteria.getBlock(),
arguments,
engine);
model.load(modelPath, null, options);
Translator<I, O> translator = factory.newInstance(input, output, model, arguments);
return new ZooModel<>(model, translator);
} catch (TranslateException e) {
throw new ModelNotFoundException("No matching translator found", e);
} finally {
if (progress != null) {
progress.end();
}
}
}
/** {@inheritDoc} */
@Override
public <I, O> boolean isDownloaded(Criteria<I, O> criteria)
throws IOException, ModelNotFoundException {
Artifact artifact = mrl.match(criteria.getFilters());
if (artifact == null) {
throw new ModelNotFoundException("No matching filter found");
}
return mrl.isPrepared(artifact);
}
/** {@inheritDoc} */
@Override
public <I, O> void downloadModel(Criteria<I, O> criteria, Progress progress)
throws IOException, ModelNotFoundException {
Artifact artifact = mrl.match(criteria.getFilters());
if (artifact == null) {
throw new ModelNotFoundException("No matching filter found");
}
mrl.prepare(artifact, progress);
}
protected Model createModel(
Path modelPath,
String name,
Device device,
Block block,
Map<String, Object> arguments,
String engine)
throws IOException {
Model model = Model.newInstance(name, device, engine);
if (block == null) {
Object bf = arguments.get("blockFactory");
if (bf instanceof BlockFactory) {
block = ((BlockFactory) bf).newBlock(model, modelPath, arguments);
} else {
String className = (String) bf;
BlockFactory factory =
ClassLoaderUtils.findImplementation(
modelPath, BlockFactory.class, className);
if (factory != null) {
block = factory.newBlock(model, modelPath, arguments);
} else if (className != null) {
throw new IllegalArgumentException("Failed to load BlockFactory: " + className);
}
}
}
if (block != null) {
model.setBlock(block);
}
for (Map.Entry<String, Object> entry : arguments.entrySet()) {
model.setProperty(entry.getKey(), entry.getValue().toString());
}
return model;
}
/** {@inheritDoc} */
@Override
public String toString() {
StringBuilder sb = new StringBuilder(200);
sb.append(mrl).append(" [\n");
try {
for (Artifact artifact : mrl.listArtifacts()) {
sb.append('\t').append(artifact).append('\n');
}
} catch (IOException e) {
sb.append("\tFailed load metadata.");
}
sb.append(']');
return sb.toString();
}
protected TranslatorFactory getTranslatorFactory(
Criteria<?, ?> criteria, Map<String, Object> arguments) {
TranslatorFactory factory = criteria.getTranslatorFactory();
if (factory != null) {
return factory;
}
String factoryClass = (String) arguments.get("translatorFactory");
if (factoryClass != null) {
ClassLoader cl = ClassLoaderUtils.getContextClassLoader();
factory = ClassLoaderUtils.initClass(cl, TranslatorFactory.class, factoryClass);
if (factory == null) {
logger.warn("Failed to load translatorFactory: {}", factoryClass);
}
}
return factory;
}
private String getFactoryLookupErrorMessage(TranslatorFactory factory) {
StringBuilder sb = new StringBuilder(200);
sb.append(
"No matching default translator found. The valid input and output classes are: \n");
for (Pair<Type, Type> io : factory.getSupportedTypes()) {
sb.append("\t(")
.append(io.getKey().getTypeName())
.append(", ")
.append(io.getValue().getTypeName())
.append(")\n");
}
return sb.toString();
}
private void loadServingProperties(
Path modelDir, Map<String, Object> arguments, Map<String, String> options)
throws IOException {
Path manifestFile = modelDir.resolve("serving.properties");
if (Files.isRegularFile(manifestFile)) {
Properties prop = new Properties();
try (Reader reader = Files.newBufferedReader(manifestFile)) {
prop.load(reader);
}
for (String key : prop.stringPropertyNames()) {
if (key.startsWith("option.")) {
options.putIfAbsent(key.substring(7), prop.getProperty(key));
} else {
arguments.putIfAbsent(key, prop.getProperty(key));
}
}
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository/zoo/Criteria.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
import ai.djl.Application;
import ai.djl.Device;
import ai.djl.MalformedModelException;
import ai.djl.nn.Block;
import ai.djl.translate.DefaultTranslatorFactory;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorFactory;
import ai.djl.util.JsonUtils;
import ai.djl.util.Progress;
import com.google.gson.Gson;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.MalformedURLException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* The {@code Criteria} class contains search criteria to look up a {@link ZooModel}.
*
* <p>Criteria follows Builder pattern. See {@link Builder} for detail. In DJL's builder convention,
* the methods start with {@code set} are required fields, and {@code opt} for optional fields.
*
* <p>Examples
*
* <pre>
* Criteria<Image, Classifications> criteria = Criteria.builder()
* .setTypes(Image.class, Classifications.class) // defines input and output data type
* .optTranslator(ImageClassificationTranslator.builder().setSynsetArtifactName("synset.txt").build())
* .optModelUrls("file:///var/models/my_resnet50") // search models in specified path
* .optModelName("resnet50") // specify model file prefix
* .build();
* </pre>
*
* <p>See <a href="https://docs.djl.ai/master/docs/load_model.html#criteria-class">Model loading</a>
* for more detail.
*
* @param <I> the model input type
* @param <O> the model output type
*/
public class Criteria<I, O> {
private Application application;
private Class<I> inputClass;
private Class<O> outputClass;
private String engine;
private Device device;
private String groupId;
private String artifactId;
private ModelZoo modelZoo;
private Map<String, String> filters;
private Map<String, Object> arguments;
private Map<String, String> options;
private TranslatorFactory factory;
private Block block;
private String modelName;
private Progress progress;
private List<ModelLoader> resolvedLoaders;
Criteria(Builder<I, O> builder) {
this.application = builder.application;
this.inputClass = builder.inputClass;
this.outputClass = builder.outputClass;
this.engine = builder.engine;
this.device = builder.device;
this.groupId = builder.groupId;
this.artifactId = builder.artifactId;
this.modelZoo = builder.modelZoo;
this.filters = builder.filters;
this.arguments = builder.arguments;
this.options = builder.options;
this.factory = builder.factory;
this.block = builder.block;
this.modelName = builder.modelName;
this.progress = builder.progress;
}
/**
* Returns {@code true} of the model artifacts has been downloaded.
*
* @return {@code true} of the model artifacts has been downloaded
* @throws IOException for various exceptions loading data from the repository
* @throws ModelNotFoundException if no model with the specified criteria is found
*/
public boolean isDownloaded() throws IOException, ModelNotFoundException {
if (resolvedLoaders == null) {
resolvedLoaders = resolveModelLoaders();
}
for (ModelLoader loader : resolvedLoaders) {
if (!loader.isDownloaded(this)) {
return false;
}
}
return true;
}
/**
* Downloads the model artifacts that matches this criteria.
*
* @throws IOException for various exceptions loading data from the repository
* @throws ModelNotFoundException if no model with the specified criteria is found
*/
public void downloadModel() throws ModelNotFoundException, IOException {
if (!isDownloaded()) {
for (ModelLoader loader : resolvedLoaders) {
loader.downloadModel(this, progress);
}
}
}
/**
* Loads the {@link ZooModel} that matches this criteria.
*
* @return the model that matches the criteria
* @throws IOException for various exceptions loading data from the repository
* @throws ModelNotFoundException if no model with the specified criteria is found
* @throws MalformedModelException if the model data is malformed
*/
public ZooModel<I, O> loadModel()
throws IOException, ModelNotFoundException, MalformedModelException {
if (resolvedLoaders == null) {
resolvedLoaders = resolveModelLoaders();
}
Logger logger = LoggerFactory.getLogger(ModelZoo.class);
Exception lastException = null;
for (ModelLoader loader : resolvedLoaders) {
try {
return loader.loadModel(this);
} catch (ModelNotFoundException e) {
lastException = e;
logger.trace("", e);
logger.debug(
"{} for ModelLoader: {}:{}",
e.getMessage(),
loader.getGroupId(),
loader.getArtifactId());
}
}
throw new ModelNotFoundException(
"No model with the specified URI or the matching Input/Output type is found.",
lastException);
}
/**
* Returns the application of the model.
*
* @return the application of the model
*/
public Application getApplication() {
return application;
}
/**
* Returns the input data type.
*
* @return the input data type
*/
public Class<I> getInputClass() {
return inputClass;
}
/**
* Returns the output data type.
*
* @return the output data type
*/
public Class<O> getOutputClass() {
return outputClass;
}
/**
* Returns the engine name.
*
* @return the engine name
*/
public String getEngine() {
return engine;
}
/**
* Returns the {@link Device} of the model to be loaded on.
*
* @return the {@link Device} of the model to be loaded on
*/
public Device getDevice() {
return device;
}
/**
* Returns the groupId of the {@link ModelZoo} to be searched.
*
* @return the groupId of the {@link ModelZoo} to be searched
*/
public String getGroupId() {
return groupId;
}
/**
* Returns the artifactId of the {@link ModelLoader} to be searched.
*
* @return the artifactIds of the {@link ModelLoader} to be searched
*/
public String getArtifactId() {
return artifactId;
}
/**
* Returns the {@link ModelZoo} to be searched.
*
* @return the {@link ModelZoo} to be searched
*/
public ModelZoo getModelZoo() {
return modelZoo;
}
/**
* Returns the search filters that must match the properties of the model.
*
* @return the search filters that must match the properties of the model.
*/
public Map<String, String> getFilters() {
return filters;
}
/**
* Returns the override configurations of the model loading arguments.
*
* @return the override configurations of the model loading arguments
*/
public Map<String, Object> getArguments() {
return arguments;
}
/**
* Returns the model loading options.
*
* @return the model loading options
*/
public Map<String, String> getOptions() {
return options;
}
/**
* Returns the optional {@link TranslatorFactory} to be used for {@link ZooModel}.
*
* @return the optional {@link TranslatorFactory} to be used for {@link ZooModel}
*/
public TranslatorFactory getTranslatorFactory() {
return factory;
}
/**
* Returns the optional {@link Block} to be used for {@link ZooModel}.
*
* @return the optional {@link Block} to be used for {@link ZooModel}
*/
public Block getBlock() {
return block;
}
/**
* Returns the optional model name to be used for {@link ZooModel}.
*
* @return the optional model name to be used for {@link ZooModel}
*/
public String getModelName() {
return modelName;
}
/**
* Returns the optional {@link Progress} for the model loading.
*
* @return the optional {@link Progress} for the model loading
*/
public Progress getProgress() {
return progress;
}
/** {@inheritDoc} */
@Override
public String toString() {
StringBuilder sb = new StringBuilder(128);
sb.append("Criteria:\n");
if (application != null) {
sb.append("\tApplication: ").append(application).append('\n');
}
sb.append("\tInput: ").append(inputClass);
sb.append("\n\tOutput: ").append(outputClass).append('\n');
if (engine != null) {
sb.append("\tEngine: ").append(engine).append('\n');
}
if (modelZoo != null) {
sb.append("\tModelZoo: ").append(modelZoo.getGroupId()).append('\n');
}
if (groupId != null) {
sb.append("\tGroupID: ").append(groupId).append('\n');
}
if (artifactId != null) {
sb.append("\tArtifactId: ").append(artifactId).append('\n');
}
if (filters != null) {
sb.append("\tFilter: ").append(JsonUtils.GSON.toJson(filters)).append('\n');
}
if (arguments != null) {
Gson gson = JsonUtils.builder().excludeFieldsWithoutExposeAnnotation().create();
sb.append("\tArguments: ").append(gson.toJson(arguments)).append('\n');
}
if (options != null) {
sb.append("\tOptions: ").append(JsonUtils.GSON.toJson(options)).append('\n');
}
if (factory == null) {
sb.append("\tNo translator supplied\n");
}
return sb.toString();
}
/**
* Creates a new {@link Builder} which starts with the values of this {@link Criteria}.
*
* @return a new {@link Builder}
*/
public Builder<I, O> toBuilder() {
return Criteria.builder()
.setTypes(inputClass, outputClass)
.optApplication(application)
.optEngine(engine)
.optDevice(device)
.optGroupId(groupId)
.optArtifactId(artifactId)
.optModelZoo(modelZoo)
.optFilters(filters)
.optArguments(arguments)
.optOptions(options)
.optTranslatorFactory(factory)
.optBlock(block)
.optModelName(modelName)
.optProgress(progress);
}
/**
* Creates a builder to build a {@code Criteria}.
*
* <p>The methods start with {@code set} are required fields, and {@code opt} for optional
* fields.
*
* @return a new builder
*/
public static Builder<?, ?> builder() {
return new Builder<>();
}
private List<ModelLoader> resolveModelLoaders() throws ModelNotFoundException {
if (inputClass == null || outputClass == null) {
throw new IllegalArgumentException("inputClass and outputClass are required.");
}
Logger logger = LoggerFactory.getLogger(ModelZoo.class);
logger.debug("Loading model with {}", this);
List<ModelZoo> list = new ArrayList<>();
if (modelZoo != null) {
logger.debug("Searching model in specified model zoo: {}", modelZoo.getGroupId());
if (groupId != null && !modelZoo.getGroupId().equals(groupId)) {
throw new ModelNotFoundException(
"groupId conflict with ModelZoo criteria."
+ modelZoo.getGroupId()
+ " v.s. "
+ groupId);
}
Set<String> supportedEngine = modelZoo.getSupportedEngines();
if (engine != null && !supportedEngine.contains(engine)) {
throw new ModelNotFoundException(
"ModelZoo doesn't support specified engine: " + engine);
}
list.add(modelZoo);
} else {
for (ModelZoo zoo : ModelZoo.listModelZoo()) {
if (groupId != null && !zoo.getGroupId().equals(groupId)) {
// filter out ModelZoo by groupId
logger.debug("Ignore ModelZoo {} by groupId: {}", zoo.getGroupId(), groupId);
continue;
}
Set<String> supportedEngine = zoo.getSupportedEngines();
if (engine != null && !supportedEngine.contains(engine)) {
logger.debug("Ignore ModelZoo {} by engine: {}", zoo.getGroupId(), engine);
continue;
}
list.add(zoo);
}
}
List<ModelLoader> loaders = new ArrayList<>();
for (ModelZoo zoo : list) {
String loaderGroupId = zoo.getGroupId();
for (ModelLoader loader : zoo.getModelLoaders()) {
Application app = loader.getApplication();
String loaderArtifactId = loader.getArtifactId();
logger.debug("Checking ModelLoader: {}", loader);
if (artifactId != null && !artifactId.equals(loaderArtifactId)) {
// filter out by model loader artifactId
logger.debug(
"artifactId mismatch for ModelLoader: {}:{}",
loaderGroupId,
loaderArtifactId);
continue;
}
if (application != Application.UNDEFINED
&& app != Application.UNDEFINED
&& !app.matches(application)) {
// filter out ModelLoader by application
logger.debug(
"application mismatch for ModelLoader: {}:{}",
loaderGroupId,
loaderArtifactId);
continue;
}
loaders.add(loader);
}
}
if (loaders.isEmpty()) {
throw new ModelNotFoundException("No model matching the criteria is found.");
}
return loaders;
}
/** A Builder to construct a {@code Criteria}. */
public static final class Builder<I, O> {
Application application;
Class<I> inputClass;
Class<O> outputClass;
String engine;
Device device;
String groupId;
String artifactId;
ModelZoo modelZoo;
Map<String, String> filters;
Map<String, Object> arguments;
Map<String, String> options;
TranslatorFactory factory;
Block block;
String modelName;
Progress progress;
Translator<I, O> translator;
Builder() {
application = Application.UNDEFINED;
}
@SuppressWarnings("unchecked")
private Builder(Class<I> inputClass, Class<O> outputClass, Builder<?, ?> parent) {
this.inputClass = inputClass;
this.outputClass = outputClass;
application = parent.application;
engine = parent.engine;
device = parent.device;
groupId = parent.groupId;
artifactId = parent.artifactId;
modelZoo = parent.modelZoo;
filters = parent.filters;
arguments = parent.arguments;
options = parent.options;
factory = parent.factory;
block = parent.block;
modelName = parent.modelName;
progress = parent.progress;
translator = (Translator<I, O>) parent.translator;
}
/**
* Creates a new @{code Builder} class with the specified input and output data type.
*
* @param <P> the input data type
* @param <Q> the output data type
* @param inputClass the input class
* @param outputClass the output class
* @return a new @{code Builder} class with the specified input and output data type
*/
public <P, Q> Builder<P, Q> setTypes(Class<P> inputClass, Class<Q> outputClass) {
return new Builder<>(inputClass, outputClass, this);
}
/**
* Sets the model application for this criteria.
*
* @param application the model application
* @return this {@code Builder}
*/
public Builder<I, O> optApplication(Application application) {
this.application = application;
return this;
}
/**
* Sets the engine name for this criteria.
*
* @param engine the engine name
* @return this {@code Builder}
*/
public Builder<I, O> optEngine(String engine) {
this.engine = engine;
return this;
}
/**
* Sets the {@link Device} for this criteria.
*
* @param device the {@link Device} for the criteria
* @return this {@code Builder}
*/
public Builder<I, O> optDevice(Device device) {
this.device = device;
return this;
}
/**
* Sets optional groupId of the {@link ModelZoo} for this criteria.
*
* @param groupId the groupId of the {@link ModelZoo}
* @return this {@code Builder}
*/
public Builder<I, O> optGroupId(String groupId) {
this.groupId = groupId;
return this;
}
/**
* Sets optional artifactId of the {@link ModelLoader} for this criteria.
*
* @param artifactId the artifactId of the {@link ModelLoader}
* @return this {@code Builder}
*/
public Builder<I, O> optArtifactId(String artifactId) {
if (artifactId != null && artifactId.contains(":")) {
String[] tokens = artifactId.split(":", -1);
groupId = tokens[0].isEmpty() ? null : tokens[0];
this.artifactId = tokens[1].isEmpty() ? null : tokens[1];
} else {
this.artifactId = artifactId;
}
return this;
}
/**
* Sets optional model urls of the {@link ModelLoader} for this criteria.
*
* @param modelUrls the comma delimited url string
* @return this {@code Builder}
*/
public Builder<I, O> optModelUrls(String modelUrls) {
if (modelUrls != null) {
this.modelZoo = new DefaultModelZoo(modelUrls);
}
return this;
}
/**
* Sets the optional model path of the {@link ModelLoader} for this criteria.
*
* @param modelPath the path to the model folder/files
* @return this {@code Builder}
*/
public Builder<I, O> optModelPath(Path modelPath) {
if (modelPath != null) {
try {
this.modelZoo = new DefaultModelZoo(modelPath.toUri().toURL().toString());
} catch (MalformedURLException e) {
throw new AssertionError("Invalid model path: " + modelPath, e);
}
}
return this;
}
/**
* Sets optional {@link ModelZoo} of the {@link ModelLoader} for this criteria.
*
* @param modelZoo ModelZoo} of the {@link ModelLoader} for this criteria
* @return this {@code Builder}
*/
public Builder<I, O> optModelZoo(ModelZoo modelZoo) {
this.modelZoo = modelZoo;
return this;
}
/**
* Sets the extra search filters for this criteria.
*
* @param filters the extra search filters
* @return this {@code Builder}
*/
public Builder<I, O> optFilters(Map<String, String> filters) {
this.filters = filters;
return this;
}
/**
* Sets an extra search filter for this criteria.
*
* @param key the search key
* @param value the search value
* @return this {@code Builder}
*/
public Builder<I, O> optFilter(String key, String value) {
if (filters == null) {
filters = new HashMap<>();
}
filters.put(key, value);
return this;
}
/**
* Sets an optional model {@link Block} for this criteria.
*
* @param block optional model {@link Block} for this criteria
* @return this {@code Builder}
*/
public Builder<I, O> optBlock(Block block) {
this.block = block;
return this;
}
/**
* Sets an optional model name for this criteria.
*
* @param modelName optional model name for this criteria
* @return this {@code Builder}
*/
public Builder<I, O> optModelName(String modelName) {
this.modelName = modelName;
return this;
}
/**
* Sets an extra model loading argument for this criteria.
*
* @param arguments optional model loading arguments
* @return this {@code Builder}
*/
public Builder<I, O> optArguments(Map<String, Object> arguments) {
this.arguments = arguments;
return this;
}
/**
* Sets the optional model loading argument for this criteria.
*
* @param key the model loading argument key
* @param value the model loading argument value
* @return this {@code Builder}
*/
public Builder<I, O> optArgument(String key, Object value) {
if (arguments == null) {
arguments = new HashMap<>();
}
arguments.put(key, value);
return this;
}
/**
* Sets the model loading options for this criteria.
*
* @param options the model loading options
* @return this {@code Builder}
*/
public Builder<I, O> optOptions(Map<String, String> options) {
this.options = options;
return this;
}
/**
* Sets the optional model loading option for this criteria.
*
* @param key the model loading option key
* @param value the model loading option value
* @return this {@code Builder}
*/
public Builder<I, O> optOption(String key, String value) {
if (options == null) {
options = new HashMap<>();
}
options.put(key, value);
return this;
}
/**
* Sets the optional {@link Translator} to override default {@code Translator}.
*
* @param translator the override {@code Translator}
* @return this {@code Builder}
*/
public Builder<I, O> optTranslator(Translator<I, O> translator) {
this.factory = null;
this.translator = translator;
return this;
}
/**
* Sets the optional {@link TranslatorFactory} to override default {@code Translator}.
*
* @param factory the override {@code TranslatorFactory}
* @return this {@code Builder}
*/
public Builder<I, O> optTranslatorFactory(TranslatorFactory factory) {
this.translator = null;
this.factory = factory;
return this;
}
/**
* Set the optional {@link Progress}.
*
* @param progress the {@code Progress}
* @return this {@code Builder}
*/
public Builder<I, O> optProgress(Progress progress) {
this.progress = progress;
return this;
}
/**
* Builds a {@link Criteria} instance.
*
* @return the {@link Criteria} instance
*/
public Criteria<I, O> build() {
if (factory == null && translator != null) {
DefaultTranslatorFactory f = new DefaultTranslatorFactory();
f.registerTranslator(inputClass, outputClass, translator);
factory = f;
}
return new Criteria<>(this);
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository/zoo/DefaultModelZoo.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
import ai.djl.engine.Engine;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
/** A {@link ModelZoo} that contains models in specified locations. */
public class DefaultModelZoo extends ModelZoo {
public static final String GROUP_ID = "ai.djl.localmodelzoo";
private static final Logger logger = LoggerFactory.getLogger(DefaultModelZoo.class);
/** Constructs a new {@code LocalModelZoo} instance. */
@SuppressWarnings("this-escape")
public DefaultModelZoo() {
String locations = System.getProperty("ai.djl.repository.zoo.location");
if (locations != null) {
parseLocation(locations);
}
}
/**
* Constructs a new {@code LocalModelZoo} instance from the given search locations.
*
* @param locations a comma separated urls where the models to be loaded from
*/
@SuppressWarnings("this-escape")
public DefaultModelZoo(String locations) {
parseLocation(locations);
}
/** {@inheritDoc} */
@Override
public String getGroupId() {
return GROUP_ID;
}
/** {@inheritDoc} */
@Override
public Set<String> getSupportedEngines() {
return Engine.getAllEngines();
}
private void parseLocation(String locations) {
String[] urls = locations.split("\\s*,\\s*");
for (String url : urls) {
if (!url.isEmpty()) {
Repository repo = Repository.newInstance(url, url);
logger.debug("Scanning models in repo: {}, {}", repo.getClass(), url);
for (MRL mrl : repo.getResources()) {
addModel(mrl);
}
} else {
logger.warn("Model location is empty.");
}
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository/zoo/DefaultZooProvider.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
/** An {@link ZooProvider} implementation can load models from specified locations. */
public class DefaultZooProvider implements ZooProvider {
/** {@inheritDoc} */
@Override
public ModelZoo getModelZoo() {
return new DefaultModelZoo();
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository/zoo/ModelLoader.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
import ai.djl.Application;
import ai.djl.MalformedModelException;
import ai.djl.repository.MRL;
import ai.djl.util.Progress;
import java.io.IOException;
/** A ModelLoader loads a particular {@link ZooModel} from a Repository for a model zoo. */
public interface ModelLoader {
/**
* Returns the group ID of the {@code ModelLoader}.
*
* @return the group ID of the {@code ModelLoader}
*/
String getGroupId();
/**
* Returns the artifact ID of the {@code ModelLoader}.
*
* @return the artifact ID of the {@code ModelLoader}
*/
String getArtifactId();
/**
* Returns the application of the {@code ModelLoader}.
*
* @return the application of the {@code ModelLoader}
*/
Application getApplication();
/**
* Returns the {@link MRL} of the {@code ModelLoader}.
*
* @return the {@link MRL} of the {@code ModelLoader}
*/
MRL getMrl();
/**
* Loads the model with the given criteria.
*
* @param <I> the input data type
* @param <O> the output data type
* @param criteria the criteria to match against the loaded model
* @return the loaded model
* @throws IOException for various exceptions loading data from the repository
* @throws ModelNotFoundException if no model with the specified criteria is found
* @throws MalformedModelException if the model data is malformed
*/
<I, O> ZooModel<I, O> loadModel(Criteria<I, O> criteria)
throws IOException, ModelNotFoundException, MalformedModelException;
/**
* Returns {@code true} if the model is downloaded in local directory.
*
* @param <I> the input data type
* @param <O> the output data type
* @param criteria the criteria to match against the loaded model
* @return {@code true} if the model is downloaded in local directory
* @throws IOException for various exceptions loading data from the repository
* @throws ModelNotFoundException if no model with the specified criteria is found
*/
<I, O> boolean isDownloaded(Criteria<I, O> criteria) throws IOException, ModelNotFoundException;
/**
* Downloads the model artifacts to local directory.
*
* @param <I> the input data type
* @param <O> the output data type
* @param criteria the criteria to match against the loaded model
* @param progress the progress tracker
* @throws IOException for various exceptions loading data from the repository
* @throws ModelNotFoundException if no model with the specified criteria is found
*/
<I, O> void downloadModel(Criteria<I, O> criteria, Progress progress)
throws IOException, ModelNotFoundException;
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository/zoo/ModelNotFoundException.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
import ai.djl.ModelException;
/** Thrown when an application tries to load a model from repository search path. */
public class ModelNotFoundException extends ModelException {
private static final long serialVersionUID = 1L;
/**
* Constructs a new exception with the specified detail message. The cause is not initialized,
* and may subsequently be initialized by a call to {@link #initCause}.
*
* @param message the detail message. The detail message is saved for later retrieval by the
* {@link #getMessage()} method.
*/
public ModelNotFoundException(String message) {
super(message);
}
/**
* Constructs a new exception with the specified detail message and cause.
*
* <p>Note that the detail message associated with {@code cause} is <i>not</i> automatically
* incorporated in this exception's detail message.
*
* @param message the detail message (which is saved for later retrieval by the {@link
* #getMessage()} method).
* @param cause the cause (which is saved for later retrieval by the {@link #getCause()}
* method). (A {@code null} value is permitted, and indicates that the cause is nonexistent
* or unknown.)
*/
public ModelNotFoundException(String message, Throwable cause) {
super(message, cause);
}
/**
* Constructs a new exception with the specified cause and a detail message of {@code
* (cause==null ? null : cause.toString())} (which typically contains the class and detail
* message of {@code cause}). This constructor is useful for exceptions that are little more
* than wrappers for other throwables (for example, {@link
* java.security.PrivilegedActionException}).
*
* @param cause the cause (which is saved for later retrieval by the {@link #getCause()}
* method). (A {@code null} value is permitted, and indicates that the cause is nonexistent
* or unknown.)
*/
public ModelNotFoundException(Throwable cause) {
super(cause);
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository/zoo/ModelZoo.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
import ai.djl.Application;
import ai.djl.MalformedModelException;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.util.ClassLoaderUtils;
import ai.djl.util.JsonUtils;
import ai.djl.util.Utils;
import com.google.gson.reflect.TypeToken;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.io.Writer;
import java.lang.reflect.Type;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.ServiceLoader;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.zip.GZIPInputStream;
/** An interface represents a collection of models. */
public abstract class ModelZoo {
public static final URI DJL_REPO_URL =
URI.create(Utils.getEnvOrSystemProperty("DJL_REPO_URL", "https://mlrepo.djl.ai/"));
private static final Logger logger = LoggerFactory.getLogger(ModelZoo.class);
private static final Map<String, ModelZoo> MODEL_ZOO_MAP = new ConcurrentHashMap<>();
private static final long ONE_DAY = Duration.ofDays(1).toMillis();
private static ModelZooResolver resolver;
private Map<String, ModelLoader> modelLoaders = new ConcurrentHashMap<>();
static {
ServiceLoader<ZooProvider> providers = ServiceLoader.load(ZooProvider.class);
for (ZooProvider provider : providers) {
registerModelZoo(provider);
}
}
/**
* Returns the global unique identifier of the {@code ModelZoo}.
*
* <p>We recommend to use reverse DNS name as your model zoo group ID to make sure it's not
* conflict with other ModelZoos.
*
* @return the global unique identifier of the {@code ModelZoo}
*/
public abstract String getGroupId();
/**
* Lists the available model families in the ModelZoo.
*
* @return the list of all available model families
*/
public Collection<ModelLoader> getModelLoaders() {
return modelLoaders.values();
}
/**
* Returns the {@link ModelLoader} based on the model name.
*
* @param name the name of the model
* @return the {@link ModelLoader} of the model
*/
public ModelLoader getModelLoader(String name) {
return modelLoaders.get(name);
}
/**
* Returns all supported engine names.
*
* @return all supported engine names
*/
public abstract Set<String> getSupportedEngines();
protected final void addModel(MRL mrl) {
modelLoaders.put(mrl.getArtifactId(), new BaseModelLoader(mrl));
}
protected final void addModel(ModelLoader loader) {
modelLoaders.put(loader.getArtifactId(), loader);
}
/**
* Sets the {@code ModelZooResolver}.
*
* @param resolver the {@code ModelZooResolver}
*/
public static void setModelZooResolver(ModelZooResolver resolver) {
ModelZoo.resolver = resolver;
}
/**
* Refreshes model zoo.
*
* @param provider the {@code ZooProvider}
*/
public static void registerModelZoo(ZooProvider provider) {
ModelZoo zoo = provider.getModelZoo();
MODEL_ZOO_MAP.put(zoo.getGroupId(), zoo);
}
/**
* Returns available model zoos.
*
* @return a list of model zoo
*/
public static Collection<ModelZoo> listModelZoo() {
return MODEL_ZOO_MAP.values();
}
/**
* Returns the {@code ModelZoo} with the {@code groupId}.
*
* @param groupId the model zoo group id to check for
* @return the {@code ModelZoo} with the {@code groupId}
*/
public static ModelZoo getModelZoo(String groupId) {
ModelZoo zoo = MODEL_ZOO_MAP.get(groupId);
if (zoo == null && resolver != null) {
zoo = resolver.resolve(groupId);
if (zoo != null) {
MODEL_ZOO_MAP.putIfAbsent(groupId, zoo);
}
}
return zoo;
}
/**
* Returns whether a model zoo with the group id is available.
*
* @param groupId the model zoo group id to check for
* @return whether a model zoo with the group id is available
*/
public static boolean hasModelZoo(String groupId) {
return MODEL_ZOO_MAP.containsKey(groupId);
}
/**
* Load the {@link ZooModel} that matches this criteria.
*
* @param criteria the requirements for the model
* @param <I> the input data type for preprocessing
* @param <O> the output data type after postprocessing
* @return the model that matches the criteria
* @throws IOException for various exceptions loading data from the repository
* @throws ModelNotFoundException if no model with the specified criteria is found
* @throws MalformedModelException if the model data is malformed
*/
public static <I, O> ZooModel<I, O> loadModel(Criteria<I, O> criteria)
throws IOException, ModelNotFoundException, MalformedModelException {
return criteria.loadModel();
}
/**
* Returns the available {@link Application} and their model artifact metadata.
*
* @return the available {@link Application} and their model artifact metadata
*/
public static Map<Application, List<MRL>> listModels() {
return listModels(Criteria.builder().build());
}
/**
* Returns the available {@link Application} and their model artifact metadata.
*
* @param criteria the requirements for the model
* @return the available {@link Application} and their model artifact metadata
*/
public static Map<Application, List<MRL>> listModels(Criteria<?, ?> criteria) {
String artifactId = criteria.getArtifactId();
ModelZoo modelZoo = criteria.getModelZoo();
String groupId = criteria.getGroupId();
String engine = criteria.getEngine();
Application application = criteria.getApplication();
@SuppressWarnings("PMD.UseConcurrentHashMap")
Map<Application, List<MRL>> models =
new TreeMap<>(Comparator.comparing(Application::getPath));
for (ModelZoo zoo : listModelZoo()) {
if (modelZoo != null) {
if (groupId != null && !modelZoo.getGroupId().equals(groupId)) {
continue;
}
Set<String> supportedEngine = modelZoo.getSupportedEngines();
if (engine != null && !supportedEngine.contains(engine)) {
continue;
}
}
for (ModelLoader loader : zoo.getModelLoaders()) {
Application app = loader.getApplication();
String loaderArtifactId = loader.getArtifactId();
if (artifactId != null && !artifactId.equals(loaderArtifactId)) {
// filter out by model loader artifactId
continue;
}
if (application != Application.UNDEFINED
&& app != Application.UNDEFINED
&& !app.matches(application)) {
// filter out ModelLoader by application
continue;
}
models.compute(
app,
(key, val) -> {
if (val == null) {
val = new ArrayList<>();
}
val.add(loader.getMrl());
return val;
});
}
}
return models;
}
protected Map<String, Map<String, Object>> listModels(Repository repo, Application app) {
try {
String groupId = getGroupId();
String path = "model/" + app.getPath() + '/' + groupId.replace('.', '/') + '/';
Path dir = Utils.getCacheDir().resolve("cache/repo/" + path);
if (Files.notExists(dir)) {
Files.createDirectories(dir);
} else if (!Files.isDirectory(dir)) {
logger.warn("Failed initialize cache directory: {}", dir);
return Collections.emptyMap();
}
Type type = new TypeToken<Map<String, Map<String, Object>>>() {}.getType();
Path file = dir.resolve("models.json");
if (Files.exists(file)) {
long lastModified = Files.getLastModifiedTime(file).toMillis();
if (Utils.isOfflineMode() || System.currentTimeMillis() - lastModified < ONE_DAY) {
try (Reader reader = Files.newBufferedReader(file)) {
return JsonUtils.GSON.fromJson(reader, type);
}
}
}
URI uri = repo.getBaseUri().resolve(path + "models.json.gz");
Path tmp = Files.createTempFile(dir, "models", ".tmp");
try (GZIPInputStream gis = new GZIPInputStream(Utils.openUrl(uri.toURL()))) {
String json = Utils.toString(gis);
try (Writer writer = Files.newBufferedWriter(tmp)) {
writer.write(json);
}
Utils.moveQuietly(tmp, file);
return JsonUtils.GSON.fromJson(json, type);
} catch (IOException e) {
logger.warn("Failed to download Huggingface model zoo index: {}", app);
if (Files.exists(file)) {
try (Reader reader = Files.newBufferedReader(file)) {
return JsonUtils.GSON.fromJson(reader, type);
}
}
String resource = app.getPath() + "/" + groupId + ".json";
try (InputStream is = ClassLoaderUtils.getResourceAsStream(resource)) {
String json = Utils.toString(is);
try (Writer writer = Files.newBufferedWriter(tmp)) {
writer.write(json);
}
Utils.moveQuietly(tmp, file);
return JsonUtils.GSON.fromJson(json, type);
}
} finally {
Utils.deleteQuietly(tmp);
}
} catch (IOException e) {
logger.warn("Failed load index of models: {}", app, e);
}
return Collections.emptyMap();
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository/zoo/ModelZooResolver.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
/** An interface that resolves external ModelZoo. */
public interface ModelZooResolver {
/**
* Returns {@link ModelZoo} based on model zoo group ID.
*
* @param groupId the model zoo group ID.
* @return the resolved {@code ModelZoo}
*/
ModelZoo resolve(String groupId);
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository/zoo/ZooModel.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
import ai.djl.Device;
import ai.djl.Model;
import ai.djl.inference.Predictor;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Block;
import ai.djl.training.Trainer;
import ai.djl.training.TrainingConfig;
import ai.djl.translate.Translator;
import ai.djl.util.PairList;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.file.Path;
import java.util.Map;
import java.util.function.Function;
/**
* A {@code ZooModel} is a {@link Model} loaded from a model zoo and includes a default {@link
* Translator}.
*
* @param <I> the model input type
* @param <O> the model output type
*/
public class ZooModel<I, O> implements Model {
private Model model;
private Translator<I, O> translator;
/**
* Constructs a {@code ZooModel} given the model and translator.
*
* @param model the model to wrap
* @param translator the translator
*/
public ZooModel(Model model, Translator<I, O> translator) {
this.model = model;
this.translator = translator;
}
/** {@inheritDoc} */
@Override
public void load(Path modelPath, String prefix, Map<String, ?> options) {
throw new IllegalArgumentException("ZooModel should not be re-loaded.");
}
/** {@inheritDoc} */
@Override
public void load(InputStream modelStream, Map<String, ?> options) throws IOException {
throw new IllegalArgumentException("ZooModel should not be re-loaded.");
}
/**
* Returns the wrapped model.
*
* @return the wrapped model
*/
public Model getWrappedModel() {
return model;
}
/** {@inheritDoc} */
@Override
public void save(Path modelPath, String modelName) throws IOException {
model.save(modelPath, modelName);
}
/** {@inheritDoc} */
@Override
public Path getModelPath() {
return model.getModelPath();
}
/** {@inheritDoc} */
@Override
public Block getBlock() {
return model.getBlock();
}
/** {@inheritDoc} */
@Override
public void setBlock(Block block) {
model.setBlock(block);
}
/** {@inheritDoc} */
@Override
public String getName() {
return model.getName();
}
/** {@inheritDoc} */
@Override
public String getProperty(String key) {
return model.getProperty(key);
}
/** {@inheritDoc} */
@Override
public void setProperty(String key, String value) {
model.setProperty(key, value);
}
/** {@inheritDoc} */
@Override
public Map<String, String> getProperties() {
return model.getProperties();
}
/** {@inheritDoc} */
@Override
public Trainer newTrainer(TrainingConfig trainingConfig) {
return model.newTrainer(trainingConfig);
}
/**
* Creates a new Predictor based on the model with the default translator.
*
* @return an instance of {@code Predictor}
*/
public Predictor<I, O> newPredictor() {
return newPredictor(translator);
}
/**
* Creates a new Predictor based on the model with the default translator and a specified
* device.
*
* @param device the device to use for prediction
* @return an instance of {@code Predictor}
*/
public Predictor<I, O> newPredictor(Device device) {
return model.newPredictor(translator, device);
}
/** {@inheritDoc} */
@Override
public <P, Q> Predictor<P, Q> newPredictor(Translator<P, Q> translator, Device device) {
return model.newPredictor(translator, device);
}
/**
* Returns the default translator.
*
* @return the default translator
*/
public Translator<I, O> getTranslator() {
return translator;
}
/** {@inheritDoc} */
@Override
public PairList<String, Shape> describeInput() {
return model.describeInput();
}
/** {@inheritDoc} */
@Override
public PairList<String, Shape> describeOutput() {
return model.describeOutput();
}
/** {@inheritDoc} */
@Override
public String[] getArtifactNames() {
return model.getArtifactNames();
}
/** {@inheritDoc} */
@Override
public <T> T getArtifact(String name, Function<InputStream, T> function) throws IOException {
return model.getArtifact(name, function);
}
/** {@inheritDoc} */
@Override
public URL getArtifact(String name) throws IOException {
return model.getArtifact(name);
}
/** {@inheritDoc} */
@Override
public InputStream getArtifactAsStream(String name) throws IOException {
return model.getArtifactAsStream(name);
}
/** {@inheritDoc} */
@Override
public NDManager getNDManager() {
return model.getNDManager();
}
/** {@inheritDoc} */
@Override
public void setDataType(DataType dataType) {
model.setDataType(dataType);
}
/** {@inheritDoc} */
@Override
public DataType getDataType() {
return model.getDataType();
}
/** {@inheritDoc} */
@Override
public void cast(DataType dataType) {
model.cast(dataType);
}
/** {@inheritDoc} */
@Override
public void close() {
model.close();
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository/zoo/ZooProvider.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
/**
* The {@code ZooProvider} is a <i>service provider</i> that enables {@code ServiceLoader} to locate
* and load at the run time.
*/
public interface ZooProvider {
/**
* Gets the name of the {@link ModelZoo}.
*
* @return the name of the {@link ModelZoo}
*/
default String getName() {
return getClass().getName();
}
/**
* Returns the instance of the {@link ModelZoo}.
*
* @return the instance of {@link ModelZoo}
*/
ModelZoo getModelZoo();
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository/zoo/ZooProviderNotFoundException.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.repository.zoo;
/** Runtime exception thrown when a provider of the required type cannot be found. */
public class ZooProviderNotFoundException extends RuntimeException {
private static final long serialVersionUID = 1L;
/**
* Constructs a new exception with the specified detail message. The cause is not initialized,
* and may subsequently be initialized by a call to {@link #initCause}.
*
* @param message the detail message. The detail message is saved for later retrieval by the
* {@link #getMessage()} method.
*/
public ZooProviderNotFoundException(String message) {
super(message);
}
/**
* Constructs a new exception with the specified detail message and cause.
*
* <p>Note that the detail message associated with {@code cause} is <i>not</i> automatically
* incorporated in this exception's detail message.
*
* @param message the detail message (which is saved for later retrieval by the {@link
* #getMessage()} method).
* @param cause the cause (which is saved for later retrieval by the {@link #getCause()}
* method). (A {@code null} value is permitted, and indicates that the cause is nonexistent
* or unknown.)
*/
public ZooProviderNotFoundException(String message, Throwable cause) {
super(message, cause);
}
/**
* Constructs a new exception with the specified cause and a detail message of {@code
* (cause==null ? null : cause.toString())} (which typically contains the class and detail
* message of {@code cause}). This constructor is useful for exceptions that are little more
* than wrappers for other throwables (for example, {@link
* java.security.PrivilegedActionException}).
*
* @param cause the cause (which is saved for later retrieval by the {@link #getCause()}
* method). (A {@code null} value is permitted, and indicates that the cause is nonexistent
* or unknown.)
*/
public ZooProviderNotFoundException(Throwable cause) {
super(cause);
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository
|
java-sources/ai/djl/api/0.34.0/ai/djl/repository/zoo/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains classes for working with model zoo repositories.
*
* @see ai.djl.Model
* @see ai.djl.Device
*/
package ai.djl.repository.zoo;
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/DefaultTrainingConfig.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training;
import ai.djl.Device;
import ai.djl.engine.Engine;
import ai.djl.nn.Parameter;
import ai.djl.training.evaluator.Evaluator;
import ai.djl.training.initializer.Initializer;
import ai.djl.training.listener.TrainingListener;
import ai.djl.training.loss.Loss;
import ai.djl.training.optimizer.Adam;
import ai.djl.training.optimizer.Optimizer;
import ai.djl.util.PairList;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ForkJoinPool;
import java.util.function.Predicate;
/** {@code DefaultTrainingConfig} is an implementation of the {@link TrainingConfig} interface. */
public class DefaultTrainingConfig implements TrainingConfig {
private PairList<Initializer, Predicate<Parameter>> initializers = new PairList<>();
private Optimizer optimizer;
private Device[] devices;
private Loss loss;
private ExecutorService executorService;
private List<Evaluator> evaluators;
private List<TrainingListener> listeners;
/**
* Creates an instance of {@code DefaultTrainingConfig} with the given {@link Loss}. {@code
* DefaultTrainingConfig} creates a default {@link TrainingConfig}, {@link Adam} as optimiser,
* and the given {@link Loss}. The evaluators and listeners are left to the user's discretion.
*
* @param loss the loss to use for training
*/
public DefaultTrainingConfig(Loss loss) {
this.loss = loss;
optimizer = Adam.builder().build();
evaluators = new ArrayList<>();
listeners = new ArrayList<>();
}
/**
* Sets the {@link Initializer} to use for the parameters (default from <a
* href="https://arxiv.org/abs/1502.01852">paper</a>).
*
* @param initializer the initialer to use for the parameters
* @param type the {@link Parameter.Type} of the parameters
* @return this {@code DefaultTrainingConfig}
*/
public DefaultTrainingConfig optInitializer(Initializer initializer, Parameter.Type type) {
initializers.add(initializer, parameter -> parameter.getType().equals(type));
return this;
}
/**
* Sets the {@link Initializer} to use for the parameters (default from <a
* href="https://arxiv.org/abs/1502.01852">paper</a>).
*
* @param initializer the initialer to use for the parameters
* @param name the name of the parameter
* @return this {@code DefaultTrainingConfig}
*/
public DefaultTrainingConfig optInitializer(Initializer initializer, String name) {
initializers.add(initializer, parameter -> parameter.getName().equals(name));
return this;
}
/**
* Sets the {@link Initializer} to use for the parameters (default from <a
* href="https://arxiv.org/abs/1502.01852">paper</a>).
*
* @param initializer the initialer to use for the parameters
* @param predicate the predicate to identify parameter
* @return this {@code DefaultTrainingConfig}
*/
public DefaultTrainingConfig optInitializer(
Initializer initializer, Predicate<Parameter> predicate) {
initializers.add(initializer, predicate);
return this;
}
/**
* Sets the array of {@link Device} available for training.
*
* @param devices an array of devices to be set
* @return this {@code DefaultTrainingConfig}
*/
public DefaultTrainingConfig optDevices(Device[] devices) {
this.devices = devices;
return this;
}
/**
* Sets the {@link Optimizer} used during training (default {@link Adam}).
*
* @param optimizer the optimizer to be set
* @return this {@code DefaultTrainingConfig}
*/
public DefaultTrainingConfig optOptimizer(Optimizer optimizer) {
this.optimizer = optimizer;
return this;
}
/**
* Sets the {@link ExecutorService} with the global {@link ForkJoinPool#commonPool()}.
*
* @return this {@link DefaultTrainingConfig}
*/
public DefaultTrainingConfig optExecutorService() {
return optExecutorService(ForkJoinPool.commonPool());
}
/**
* Sets the {@link ExecutorService} to train with multiple threads.
*
* @param executorService the executor service
* @return this {@link DefaultTrainingConfig}
*/
public DefaultTrainingConfig optExecutorService(ExecutorService executorService) {
this.executorService = executorService;
return this;
}
/**
* Adds multiple {@link Evaluator}s that needs to be computed during training.
*
* @param evaluators the evaluators to be added
* @param <T> the type of evaluator to be added
* @return this {@code DefaultTrainingConfig}
*/
public <T extends Evaluator> DefaultTrainingConfig addEvaluators(Collection<T> evaluators) {
evaluators.forEach(this::addEvaluator);
return this;
}
/**
* Adds an {@link Evaluator} that needs to be computed during training.
*
* @param evaluator the evaluator to be added
* @return this {@code DefaultTrainingConfig}
*/
public DefaultTrainingConfig addEvaluator(Evaluator evaluator) {
evaluators.add(evaluator);
return this;
}
/**
* Adds {@link TrainingListener}s for training.
*
* @param listeners the {@link TrainingListener}s to add
* @return this {@code DefaultTrainingConfig}
*/
public DefaultTrainingConfig addTrainingListeners(TrainingListener... listeners) {
this.listeners.addAll(Arrays.asList(listeners));
return this;
}
/** {@inheritDoc} */
@Override
public Device[] getDevices() {
if (devices == null) {
return Engine.getInstance().getDevices();
}
return devices;
}
/** {@inheritDoc} */
@Override
public PairList<Initializer, Predicate<Parameter>> getInitializers() {
return initializers;
}
/** {@inheritDoc} */
@Override
public Optimizer getOptimizer() {
return optimizer;
}
/** {@inheritDoc} */
@Override
public Loss getLossFunction() {
return loss;
}
/** {@inheritDoc} */
@Override
public ExecutorService getExecutorService() {
return executorService;
}
/** {@inheritDoc} */
@Override
public List<Evaluator> getEvaluators() {
return evaluators;
}
/** {@inheritDoc} */
@Override
public List<TrainingListener> getTrainingListeners() {
return listeners;
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/EasyTrain.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.training.dataset.Batch;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.listener.TrainingListener.BatchData;
import ai.djl.translate.TranslateException;
import ai.djl.util.Preconditions;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
/** Helper for easy training of a whole model, a trainining batch, or a validation batch. */
public final class EasyTrain {
private EasyTrain() {}
/**
* Runs a basic epoch training experience with a given trainer.
*
* @param trainer the trainer to train for
* @param numEpoch the number of epochs to train
* @param trainingDataset the dataset to train on
* @param validateDataset the dataset to validate against. Can be null for no validation
* @throws IOException for various exceptions depending on the dataset
* @throws TranslateException if there is an error while processing input
*/
public static void fit(
Trainer trainer, int numEpoch, Dataset trainingDataset, Dataset validateDataset)
throws IOException, TranslateException {
// Deep learning is typically trained in epochs where each epoch trains the model on each
// item in the dataset once
for (int epoch = 0; epoch < numEpoch; epoch++) {
// We iterate through the dataset once during each epoch
for (Batch batch : trainer.iterateDataset(trainingDataset)) {
// During trainBatch, we update the loss and evaluators with the results for the
// training batch
trainBatch(trainer, batch);
// Now, we update the model parameters based on the results of the latest trainBatch
trainer.step();
// We must make sure to close the batch to ensure all the memory associated with the
// batch is cleared.
// If the memory isn't closed after each batch, you will very quickly run out of
// memory on your GPU
batch.close();
}
// After each epoch, test against the validation dataset if we have one
evaluateDataset(trainer, validateDataset);
// reset training and validation evaluators at end of epoch
trainer.notifyListeners(listener -> listener.onEpoch(trainer));
}
}
/**
* Trains the model with one iteration of the given {@link Batch} of data.
*
* @param trainer the trainer to validate the batch with
* @param batch a {@link Batch} that contains data, and its respective labels
* @throws IllegalArgumentException if the batch engine does not match the trainer engine
*/
public static void trainBatch(Trainer trainer, Batch batch) {
if (trainer.getManager().getEngine() != batch.getManager().getEngine()) {
throw new IllegalArgumentException(
"The data must be on the same engine as the trainer. You may need to change one"
+ " of your NDManagers.");
}
Batch[] splits = batch.split(trainer.getDevices(), false);
BatchData batchData =
new BatchData(batch, new ConcurrentHashMap<>(), new ConcurrentHashMap<>());
try (GradientCollector collector = trainer.newGradientCollector()) {
if (splits.length > 1 && trainer.getExecutorService().isPresent()) {
// multi-threaded
ExecutorService executor = trainer.getExecutorService().get();
List<CompletableFuture<Boolean>> futures = new ArrayList<>(splits.length);
for (Batch split : splits) {
futures.add(
CompletableFuture.supplyAsync(
() -> trainSplit(trainer, collector, batchData, split),
executor));
}
CompletableFuture.allOf(futures.stream().toArray(CompletableFuture[]::new));
} else {
// sequence
for (Batch split : splits) {
trainSplit(trainer, collector, batchData, split);
}
}
}
trainer.notifyListeners(listener -> listener.onTrainingBatch(trainer, batchData));
}
private static boolean trainSplit(
Trainer trainer, GradientCollector collector, BatchData batchData, Batch split) {
NDList data = split.getData();
NDList labels = split.getLabels();
NDList preds = trainer.forward(data, labels);
long time = System.nanoTime();
NDArray lossValue = trainer.getLoss().evaluate(labels, preds);
collector.backward(lossValue);
trainer.addMetric("backward", time);
time = System.nanoTime();
batchData.getLabels().put(labels.get(0).getDevice(), labels);
batchData.getPredictions().put(preds.get(0).getDevice(), preds);
trainer.addMetric("training-metrics", time);
return true;
}
/**
* Validates the given batch of data.
*
* <p>During validation, the evaluators and losses are computed, but gradients aren't computed,
* and parameters aren't updated.
*
* @param trainer the trainer to validate the batch with
* @param batch a {@link Batch} of data
* @throws IllegalArgumentException if the batch engine does not match the trainer engine
*/
public static void validateBatch(Trainer trainer, Batch batch) {
Preconditions.checkArgument(
trainer.getManager().getEngine() == batch.getManager().getEngine(),
"The data must be on the same engine as the trainer. You may need to change one of"
+ " your NDManagers.");
Batch[] splits = batch.split(trainer.getDevices(), false);
BatchData batchData =
new BatchData(batch, new ConcurrentHashMap<>(), new ConcurrentHashMap<>());
if (splits.length > 1 && trainer.getExecutorService().isPresent()) {
// multi-threaded
ExecutorService executor = trainer.getExecutorService().get();
List<CompletableFuture<Boolean>> futures = new ArrayList<>(splits.length);
for (Batch split : splits) {
futures.add(
CompletableFuture.supplyAsync(
() -> validateSplit(trainer, batchData, split), executor));
}
CompletableFuture.allOf(futures.stream().toArray(CompletableFuture[]::new));
} else {
// sequence
for (Batch split : splits) {
validateSplit(trainer, batchData, split);
}
}
trainer.notifyListeners(listener -> listener.onValidationBatch(trainer, batchData));
}
private static boolean validateSplit(Trainer trainer, BatchData batchData, Batch split) {
NDList data = split.getData();
NDList labels = split.getLabels();
NDList preds = trainer.evaluate(data);
batchData.getLabels().put(labels.get(0).getDevice(), labels);
batchData.getPredictions().put(preds.get(0).getDevice(), preds);
return true;
}
/**
* Evaluates the test dataset.
*
* @param trainer the trainer to evaluate on
* @param testDataset the test dataset to evaluate
* @throws IOException for various exceptions depending on the dataset
* @throws TranslateException if there is an error while processing input
*/
public static void evaluateDataset(Trainer trainer, Dataset testDataset)
throws IOException, TranslateException {
if (testDataset != null) {
for (Batch batch : trainer.iterateDataset(testDataset)) {
validateBatch(trainer, batch);
batch.close();
}
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/GradientCollector.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training;
import ai.djl.ndarray.NDArray;
/**
* An interface that provides a mechanism to collect gradients during training.
*
* <p>The {@code GradientCollector} should be opened with a try-with-resources. All operations
* performed within the try-with-resources are recorded and the variables marked. When {@link
* #backward(NDArray) backward function} is called, gradients are collected w.r.t previously marked
* variables.
*
* <p>The typical behavior is to open up a gradient collector during each batch and close it during
* the end of the batch. In this way, the gradient is reset between batches. If the gradient
* collector is left open for multiple calls to backwards, the gradients collected are accumulated
* and added together.
*
* <p>Due to limitations in most engines, the gradient collectors are global. This means that only
* one can be used at a time. If multiple are opened, an error will be thrown.
*/
public interface GradientCollector extends AutoCloseable {
/**
* Calculate the gradient w.r.t previously marked variable (head).
*
* @param target the target NDArray to calculate the gradient w.r.t head
*/
void backward(NDArray target);
/** Sets all the gradients within the engine to zero. */
void zeroGradients();
/** {@inheritDoc} */
@Override
void close();
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/LocalParameterServer.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training;
import ai.djl.Device;
import ai.djl.ndarray.NDArray;
import ai.djl.training.optimizer.Optimizer;
/** {@code LocalParameterServer} is an implementation of the {@code ParameterServer} interface. */
public class LocalParameterServer implements ParameterServer {
private Optimizer optimizer;
/**
* Create a new instance of {@code LocalParameterServer} for the given optimizer.
*
* @param optimizer an optimizer
*/
public LocalParameterServer(Optimizer optimizer) {
this.optimizer = optimizer;
}
/** {@inheritDoc} */
@Override
public void init(String parameterId, NDArray[] value) {}
/** {@inheritDoc} */
@Override
public void update(String parameterId, NDArray[] grads, NDArray[] params) {
Device firstDevice = params[0].getDevice();
// reduce gradient from all devices to first device
for (int i = 1; i < grads.length; i++) {
try (NDArray gradCopy = grads[i].toDevice(firstDevice, true)) {
grads[0].addi(gradCopy);
}
}
// update weights on different devices with reduced gradient
// use duplicate because after the first optimizer.update
// PyTorch optimizer will zero grads[0]
// the second copy is to move the grads[0] to the device the weight is on
try (NDArray aggregatedGrad = grads[0].duplicate()) {
for (NDArray param : params) {
if (param.getDevice().equals(firstDevice)) {
optimizer.update(parameterId, param, aggregatedGrad);
} else {
try (NDArray gradSumCopy = aggregatedGrad.toDevice(param.getDevice(), true)) {
optimizer.update(parameterId, param, gradSumCopy);
}
}
}
}
}
/** {@inheritDoc} */
@Override
public void close() {}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/ParameterServer.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training;
import ai.djl.ndarray.NDArray;
import java.util.Arrays;
/** An interface for a key-value store to store parameters, and their corresponding gradients. */
public interface ParameterServer extends AutoCloseable {
/**
* Initializes the {@code ParameterStore} for the given parameter.
*
* @param parameterId the parameter ID
* @param value the values to be set for the given parameter
*/
void init(String parameterId, NDArray[] value);
/**
* Updates the parameter of a key from Parameter Server.
*
* @param parameterId the key to identify the parameter
* @param params the parameter NDArrays in different devices to be updated.
*/
default void update(String parameterId, NDArray[] params) {
NDArray[] grads = Arrays.stream(params).map(NDArray::getGradient).toArray(NDArray[]::new);
update(parameterId, grads, params);
Arrays.stream(grads).forEach(NDArray::close);
}
/**
* Updates the parameter of a key from Parameter Server.
*
* @param parameterId the key to identify the parameter
* @param grads the gradient NDArrays in different devices to apply the update.
* @param params the parameter NDArrays in different devices to be updated.
*/
void update(String parameterId, NDArray[] grads, NDArray[] params);
/** {@inheritDoc} */
@Override
void close();
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/ParameterStore.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training;
import ai.djl.Device;
import ai.djl.Device.MultiDevice;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.nn.Parameter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* The {@code ParameterStore} contains a map from a parameter to the mirrors of it on other devices.
*/
public class ParameterStore {
private NDManager manager;
private Map<String, ParameterData> parameterMap;
private Map<Device, Integer> deviceMap;
private boolean copy;
private ParameterServer parameterServer;
/** Constructs a new {@code ParameterStore} instance. */
public ParameterStore() {
this(NDManager.newBaseManager(), false);
}
/**
* Constructs an empty {@code ParameterStore}.
*
* @param manager the manager to attach mirrored parameters to
* @param copy whether to always copy even for the same device as the original parameter
*/
public ParameterStore(NDManager manager, boolean copy) {
this.manager = manager;
this.copy = copy;
parameterMap = new ConcurrentHashMap<>();
deviceMap = new ConcurrentHashMap<>();
deviceMap.put(manager.getDevice(), 0);
}
/**
* Sets the parameterServer used to apply updates to the parameters.
*
* @param parameterServer the parameterServer
* @param devices the devices to create mirrored parameters on
*/
public void setParameterServer(ParameterServer parameterServer, Device[] devices) {
this.parameterServer = parameterServer;
deviceMap.clear();
for (int i = 0; i < devices.length; ++i) {
if (devices[i] instanceof MultiDevice) {
throw new IllegalArgumentException(
"The parameter store does not support MultiDevices");
}
if (deviceMap.put(devices[i], i) != null) {
throw new IllegalArgumentException("Duplicated devices are not allowed.");
}
}
}
/** Updates all the mirrored parameters. */
public void updateAllParameters() {
for (Map.Entry<String, ParameterData> entry : parameterMap.entrySet()) {
String parameterId = entry.getKey();
ParameterData data = entry.getValue();
if (data.requireGradient()) {
NDArray[] params = data.toArray();
parameterServer.update(parameterId, params);
}
}
}
/**
* Returns the value of a mirrored parameter on a device.
*
* @param parameter the parameter to get the value for
* @param device the device to get the mirror from
* @param training true for a training forward pass
* @return the value of the mirrored parameter on the device
*/
public NDArray getValue(Parameter parameter, Device device, boolean training) {
// for those optional parameters, they might not be in the ParameterStore
if (parameter == null) {
return null;
}
String parameterId = parameter.getId();
int index = deviceMap.get(device);
ParameterData data =
parameterMap.computeIfAbsent(parameterId, k -> new ParameterData(parameter));
if (data.isEmpty()) {
NDArray array = parameter.getArray();
if (parameterServer != null) {
// initialize on parameter store for first time
parameterServer.init(parameterId, new NDArray[] {array});
NDArray[] arrays = new NDArray[deviceMap.size()];
for (Map.Entry<Device, Integer> entry : deviceMap.entrySet()) {
Device dev = entry.getKey();
int i = entry.getValue();
if (i == index && array.getDevice().equals(dev)) {
arrays[i] = array;
} else {
arrays[i] = array.toDevice(dev, true);
arrays[i].attach(manager);
// some parameter doesn't require grad
// for example running_mean in BatchNorm
if (parameter.requiresGradient()) {
arrays[i].setRequiresGradient(true);
}
}
data.add(arrays[i]);
}
} else {
if (copy || !array.getDevice().equals(device)) {
array = array.toDevice(device, true);
array.attach(manager);
// some parameter doesn't require grad
// for example running_mean in BatchNorm
if (parameter.requiresGradient() && training) {
array.setRequiresGradient(true);
}
}
data.add(array);
}
}
return data.get(index);
}
/**
* Get the {@link NDManager} associated with {@code ParameterStore}.
*
* @return the {@link NDManager}
*/
public NDManager getManager() {
return manager;
}
/** Synchronizes the values on all mirrors with the main parameter. */
public void sync() {
for (ParameterData data : parameterMap.values()) {
data.sync();
}
}
/** A helper for {@link ParameterStore} that stores data for a single parameter. */
private final class ParameterData {
private Parameter parameter;
private List<NDArray> list;
private ParameterData(Parameter parameter) {
this.parameter = parameter;
list = Collections.synchronizedList(new ArrayList<>());
}
private boolean isEmpty() {
return list.isEmpty();
}
private void add(NDArray array) {
list.add(array);
}
private NDArray get(int index) {
return list.get(index);
}
private NDArray[] toArray() {
return list.toArray(new NDArray[0]);
}
private boolean requireGradient() {
return parameter.requiresGradient();
}
private void sync() {
NDArray array = parameter.getArray();
Device device = array.getDevice();
if (!deviceMap.containsKey(device)) {
// model's parameters maybe loaded on different device than any of training devices.
list.get(0).copyTo(array);
}
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/Trainer.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training;
import ai.djl.Device;
import ai.djl.Model;
import ai.djl.metric.Metrics;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Parameter;
import ai.djl.nn.UninitializedParameterException;
import ai.djl.training.dataset.Batch;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.evaluator.Evaluator;
import ai.djl.training.listener.EpochTrainingListener;
import ai.djl.training.listener.EvaluatorTrainingListener;
import ai.djl.training.listener.TrainingListener;
import ai.djl.training.loss.Loss;
import ai.djl.translate.TranslateException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
import java.util.function.Consumer;
/**
* The {@code Trainer} interface provides a session for model training.
*
* <p>{@code Trainer} provides an easy, and manageable interface for training. {@code Trainer} is
* not thread-safe.
*
* <p>See the tutorials on:
*
* <ul>
* <li><a
* href="https://docs.djl.ai/master/docs/demos/jupyter/tutorial/02_train_your_first_model.html">Training
* your first model</a>
* <li><a
* href="https://docs.djl.ai/master/docs/demos/jupyter/transfer_learning_on_cifar10.html">Training
* using transfer learning</a>
* <li><a href="https://docs.djl.ai/master/docs/demos/jupyter/load_mxnet_model.html">Inference
* with an MXNet model</a>
* </ul>
*
* @see <a href="https://docs.djl.ai/master/docs/development/memory_management.html">The guide on
* memory management</a>
*/
public class Trainer implements AutoCloseable {
private static final Logger logger = LoggerFactory.getLogger(Trainer.class);
private Model model;
private NDManager manager;
private Metrics metrics;
private List<TrainingListener> listeners;
private Device[] devices;
private ParameterStore parameterStore;
private List<Evaluator> evaluators;
private Loss loss;
private ExecutorService executorService;
private boolean gradientsChecked;
/**
* Creates an instance of {@code Trainer} with the given {@link Model} and {@link
* TrainingConfig}.
*
* @param model the model the trainer will train on
* @param trainingConfig the configuration used by the trainer
*/
@SuppressWarnings("this-escape")
public Trainer(Model model, TrainingConfig trainingConfig) {
this.model = model;
manager = model.getNDManager().newSubManager();
manager.setName("trainer");
devices = trainingConfig.getDevices();
loss = trainingConfig.getLossFunction();
Objects.requireNonNull(loss, "You must specify a loss for the trainer");
evaluators = new ArrayList<>(trainingConfig.getEvaluators());
evaluators.add(loss); // track loss as an evaluator by default
executorService = trainingConfig.getExecutorService();
ParameterServer parameterServer =
manager.getEngine().newParameterServer(trainingConfig.getOptimizer());
parameterStore = new ParameterStore(manager, false);
parameterStore.setParameterServer(parameterServer, devices);
listeners = trainingConfig.getTrainingListeners();
notifyListeners(listener -> listener.onTrainingBegin(this));
}
/**
* Initializes the {@link Model} that the {@code Trainer} is going to train.
*
* @param shapes an array of {@code Shape} of the inputs
*/
public void initialize(Shape... shapes) {
model.getBlock().initialize(model.getNDManager(), model.getDataType(), shapes);
// call getValue on all params to initialize on all devices
model.getBlock()
.getParameters()
.forEach(
pair -> {
for (Device device : devices) {
try {
parameterStore.getValue(pair.getValue(), device, true);
} catch (UninitializedParameterException e) {
throw new IllegalStateException(
"Failed to initialize parameter: "
+ pair.getKey()
+ ".\n"
+ "If you are defining a Block extending"
+ " AbstractBlock, check that you are"
+ " initializing all child blocks as part of"
+ " the overload for"
+ " AbstractBlock.initializeChildBlocks().",
e);
}
}
});
}
/**
* Fetches an iterator that can iterate through the given {@link Dataset}.
*
* @param dataset the dataset to iterate through
* @return an {@link Iterable} of {@link Batch} that contains batches of data from the dataset
* @throws IOException for various exceptions depending on the dataset
* @throws TranslateException if there is an error while processing input
*/
public Iterable<Batch> iterateDataset(Dataset dataset) throws IOException, TranslateException {
return dataset.getData(getManager(), executorService);
}
/**
* Returns a new instance of {@link GradientCollector}.
*
* @return a new instance of {@link GradientCollector}
*/
public GradientCollector newGradientCollector() {
return manager.getEngine().newGradientCollector();
}
/**
* Applies the forward function of the model once on the given input {@link NDList}.
*
* @param input the input {@link NDList}
* @return the output of the forward function
*/
public NDList forward(NDList input) {
long begin = System.nanoTime();
try {
return model.getBlock().forward(parameterStore, input, true);
} finally {
addMetric("forward", begin);
}
}
/**
* Applies the forward function of the model once with both data and labels.
*
* @param data the input data {@link NDList}
* @param labels the input labels {@link NDList}
* @return the output of the forward function
*/
public NDList forward(NDList data, NDList labels) {
long begin = System.nanoTime();
try {
return model.getBlock().forward(parameterStore, data, labels, null);
} finally {
addMetric("forward", begin);
}
}
/**
* Evaluates function of the model once on the given input {@link NDList}.
*
* @param input the input {@link NDList}
* @return the output of the predict function
*/
public NDList evaluate(NDList input) {
return model.getBlock().forward(parameterStore, input, false, null);
}
/** Updates all of the parameters of the model once. */
public void step() {
if (!gradientsChecked) {
checkGradients();
}
long begin = System.nanoTime();
parameterStore.updateAllParameters();
addMetric("step", begin);
}
/**
* Returns the Metrics param used for benchmarking.
*
* @return the the Metrics param used for benchmarking
*/
public Metrics getMetrics() {
return metrics;
}
/**
* Attaches a Metrics param to use for benchmarking.
*
* @param metrics the Metrics class
*/
public void setMetrics(Metrics metrics) {
this.metrics = metrics;
}
/**
* Returns the devices used for training.
*
* @return the devices used for training
*/
public Device[] getDevices() {
return devices;
}
/**
* Gets the training {@link Loss} function of the trainer.
*
* @return the {@link Loss} function
*/
public Loss getLoss() {
return loss;
}
/**
* Returns the model used to create this trainer.
*
* @return the model associated with this trainer
*/
public Model getModel() {
return model;
}
/**
* Returns the {@link ExecutorService}.
*
* @return the {@link ExecutorService}
*/
public Optional<ExecutorService> getExecutorService() {
return Optional.ofNullable(executorService);
}
/**
* Gets all {@link Evaluator}s.
*
* @return the evaluators used during training
*/
public List<Evaluator> getEvaluators() {
return evaluators;
}
/**
* Executes a method on each of the {@link TrainingListener}s.
*
* @param listenerConsumer a consumer that executes the method
*/
public final void notifyListeners(Consumer<TrainingListener> listenerConsumer) {
listeners.forEach(listenerConsumer);
}
/**
* Returns the {@link TrainingResult}.
*
* @return the {@code TrainingResult}
*/
public TrainingResult getTrainingResult() {
TrainingResult result = new TrainingResult();
for (TrainingListener listener : listeners) {
if (listener instanceof EpochTrainingListener) {
result.setEpoch(((EpochTrainingListener) listener).getNumEpochs());
} else if (listener instanceof EvaluatorTrainingListener) {
EvaluatorTrainingListener l = (EvaluatorTrainingListener) listener;
result.setEvaluations(l.getLatestEvaluations());
}
}
return result;
}
/**
* Gets the {@link NDManager} from the model.
*
* @return the {@link NDManager}
*/
public NDManager getManager() {
return manager;
}
/** {@inheritDoc} */
@SuppressWarnings("deprecation")
@Override
protected void finalize() throws Throwable {
if (manager.isOpen()) {
if (logger.isDebugEnabled()) {
logger.warn("Trainer for {} was not closed explicitly.", model.getName());
}
close();
}
super.finalize();
}
/** {@inheritDoc} */
@Override
public void close() {
notifyListeners(listener -> listener.onTrainingEnd(this));
parameterStore.sync();
manager.close();
}
/**
* Checks if all gradients are zeros. This prevent users from calling step() without running
* {@code backward}.
*/
private void checkGradients() {
List<NDArray> grads = new ArrayList<>();
model.getBlock().getParameters().values().stream()
.filter(Parameter::requiresGradient)
.forEach(
param ->
grads.add(
parameterStore
.getValue(param, devices[0], true)
.getGradient()));
try (NDManager scoped = manager.newSubManager()) {
scoped.tempAttachAll(new NDList(grads));
NDList list = new NDList(grads.stream().map(NDArray::sum).toArray(NDArray[]::new));
float gradSum = NDArrays.stack(list).sum().getFloat();
if (gradSum == 0f) {
throw new IllegalStateException(
"Gradient values are all zeros, please call gradientCollector.backward() on"
+ "your target NDArray (usually loss), before calling step() ");
}
gradientsChecked = true;
}
}
/**
* Helper to add a metric for a time difference.
*
* @param metricName the metric name
* @param begin the time difference start (this method is called at the time difference end)
*/
public void addMetric(String metricName, long begin) {
if (metrics != null && begin > 0L) {
metrics.addMetric(metricName, System.nanoTime() - begin);
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/TrainingConfig.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training;
import ai.djl.Device;
import ai.djl.nn.Parameter;
import ai.djl.training.evaluator.Evaluator;
import ai.djl.training.initializer.Initializer;
import ai.djl.training.listener.TrainingListener;
import ai.djl.training.loss.Loss;
import ai.djl.training.optimizer.Optimizer;
import ai.djl.util.PairList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.function.Predicate;
/**
* An interface that is responsible for holding the configuration required by {@link Trainer}.
*
* <p>A trainer requires different information to facilitate the training process. This information
* is passed by using this configuration.
*
* <p>The required options for the configuration are:
*
* <ul>
* <li><b>Required</b> {@link Loss} - A loss function is used to measure how well a model matches
* the dataset. Because the lower value of the function is better, it is called the "loss"
* function. This is the only required configuration.
* <li>{@link Evaluator} - An evaluator is used to measure how well a model matches the dataset.
* Unlike the loss, they are only there for people to look at and are not used for
* optimization. Since many losses are not as intuitive, adding other evaluators can help to
* understand how the model is doing. We recommend adding as many as possible.
* <li>{@link Device} - The device is what hardware should be used to train your model on.
* Typically, this is either GPU or GPU. The default is to use a single GPU if it is available
* or CPU if not.
* <li>{@link Initializer} - The initializer is used to set the initial values of the model's
* parameters before training. This can usually be left as the default initializer.
* <li>{@link Optimizer} - The optimizer is the algorithm that updates the model parameters to
* minimize the loss function. There are a variety of optimizers, most of which are variants
* of stochastic gradient descent. When you are just starting, you can use the default
* optimizer. Later on, customizing the optimizer can result in faster training.
* <li>{@link ExecutorService} - The executorService is used for parallelization when training
* batches on multiple GPUs or loading data from the dataset. If none is provided, all
* operations with be sequential.
* <li>{@link TrainingListener} - The training listeners add additional functionality to the
* training process through a listener interface. This can include showing training progress,
* stopping early if the training fails, or recording performance metrics. We offer several
* easy sets of {@link TrainingListener.Defaults}.
* </ul>
*/
public interface TrainingConfig {
/**
* Gets the {@link Device} that are available for computation.
*
* <p>This is necessary for a {@link Trainer} as it needs to know what kind of device it is
* running on, and how many devices it is running on.
*
* @return an array of {@link Device}
*/
Device[] getDevices();
/**
* Gets a list of {@link Initializer} and Predicate to initialize the parameters of the model.
*
* @return an {@link Initializer}
*/
PairList<Initializer, Predicate<Parameter>> getInitializers();
/**
* Gets the {@link Optimizer} to use during training.
*
* @return an {@link Optimizer}
*/
Optimizer getOptimizer();
/**
* Gets the {@link Loss} function to compute the loss against.
*
* @return a {@link Loss} function
*/
Loss getLossFunction();
/**
* Gets the {@link ExecutorService} for parallelization.
*
* @return an {@link ExecutorService}
*/
ExecutorService getExecutorService();
/**
* Returns the list of {@link Evaluator}s that should be computed during training.
*
* @return a list of {@link Evaluator}s
*/
List<Evaluator> getEvaluators();
/**
* Returns the list of {@link TrainingListener}s that should be used during training.
*
* @return a list of {@link TrainingListener}s
*/
List<TrainingListener> getTrainingListeners();
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/TrainingResult.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training;
import ai.djl.util.JsonUtils;
import java.util.Collections;
import java.util.Map;
/** A class that is responsible for holding the training result produced by {@link Trainer}. */
public class TrainingResult {
private int epoch;
private Map<String, Float> evaluations = Collections.emptyMap();
/**
* Returns the train loss.
*
* @return the train loss
*/
public Float getTrainLoss() {
return evaluations.get("train_loss");
}
/**
* Returns the validate loss.
*
* @return the validate loss
*/
public Float getValidateLoss() {
return evaluations.get("validate_loss");
}
/**
* Returns the evaluation to which the specified key is mapped.
*
* @param key the key whose associated value is to be returned
* @return the evaluation to which the specified key is mapped
*/
public Float getTrainEvaluation(String key) {
return evaluations.get("train_" + key);
}
/**
* Returns the evaluation to which the specified key is mapped.
*
* @param key the key whose associated value is to be returned
* @return the evaluation to which the specified key is mapped
*/
public Float getValidateEvaluation(String key) {
return evaluations.get("validate_" + key);
}
/**
* Returns the actual number of epoch.
*
* @return the actual number of epoch
*/
public int getEpoch() {
return epoch;
}
/**
* Sets the actual number of epoch.
*
* @param epoch the actual number of epoch
*/
public void setEpoch(int epoch) {
this.epoch = epoch;
}
/**
* Returns the raw evaluation metrics.
*
* @return the raw evaluation metrics
*/
public Map<String, Float> getEvaluations() {
return evaluations;
}
/**
* Sets the raw evaluation metrics.
*
* @param evaluations the raw evaluation metrics
*/
public void setEvaluations(Map<String, Float> evaluations) {
this.evaluations = evaluations;
}
/** {@inheritDoc} * */
@Override
public String toString() {
return JsonUtils.GSON_PRETTY.toJson(this);
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains classes and implementations to train a neural network.
*
* @see ai.djl.training.Trainer
*/
package ai.djl.training;
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/dataset/ArrayDataset.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.dataset;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.index.NDIndex;
import ai.djl.translate.Batchifier;
import ai.djl.translate.TranslateException;
import ai.djl.util.Progress;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.stream.Stream;
/**
* {@code ArrayDataset} is an implementation of {@link RandomAccessDataset} that consist entirely of
* large {@link NDArray}s. It is recommended only for datasets small enough to fit in memory that
* come in array formats. Otherwise, consider directly using the {@link RandomAccessDataset}
* instead.
*
* <p>There can be multiple data and label {@link NDArray}s within the dataset. Each sample will be
* retrieved by indexing each {@link NDArray} along the first dimension.
*
* <p>The following is an example of how to use ArrayDataset:
*
* <pre>
* ArrayDataset dataset = new ArrayDataset.Builder()
* .setData(data1, data2)
* .optLabels(labels1, labels2, labels3)
* .setSampling(20, false)
* .build();
* </pre>
*
* <p>Suppose you get a {@link Batch} from {@code trainer.iterateDataset(dataset)} or {@code
* dataset.getData(manager)}. In the data of this batch, it will be an NDList with one NDArray for
* each data input. In this case, it would be 2 arrays. Similarly, the labels would have 3 arrays.
*
* @see Dataset
*/
public class ArrayDataset extends RandomAccessDataset {
protected NDArray[] data;
protected NDArray[] labels;
/**
* Creates a new instance of {@code ArrayDataset} with the arguments in {@link Builder}.
*
* @param builder a builder with the required arguments
*/
public ArrayDataset(BaseBuilder<?> builder) {
super(builder);
if (builder instanceof Builder) {
Builder builder2 = (Builder) builder;
data = builder2.data;
labels = builder2.labels;
// check data and labels have the same size
long size = data[0].size(0);
if (Stream.of(data).anyMatch(array -> array.size(0) != size)) {
throw new IllegalArgumentException("All the NDArray must have the same length!");
}
if (labels != null && Stream.of(labels).anyMatch(array -> array.size(0) != size)) {
throw new IllegalArgumentException("All the NDArray must have the same length!");
}
}
}
ArrayDataset() {}
/** {@inheritDoc} */
@Override
protected long availableSize() {
return data[0].size(0);
}
/** {@inheritDoc} */
@Override
public Record get(NDManager manager, long index) {
NDList datum = new NDList();
NDList label = new NDList();
for (NDArray array : data) {
datum.add(array.get(manager, index));
}
if (labels != null) {
for (NDArray array : labels) {
label.add(array.get(manager, index));
}
}
return new Record(datum, label);
}
/**
* Gets the {@link Batch} for the given indices from the dataset.
*
* @param manager the manager used to create the arrays
* @param indices indices of the requested data items
* @return a {@link Batch} that contains the data and label of the requested data items
*/
public Batch getByIndices(NDManager manager, long... indices) {
try (NDArray ndIndices = manager.create(indices)) {
NDIndex index = new NDIndex("{}", ndIndices);
NDList datum = new NDList();
NDList label = new NDList();
for (NDArray array : data) {
datum.add(array.get(manager, index));
}
if (labels != null) {
for (NDArray array : labels) {
label.add(array.get(manager, index));
}
}
return new Batch(
manager,
datum,
label,
indices.length,
Batchifier.STACK,
Batchifier.STACK,
-1,
-1);
}
}
/**
* Gets the {@link Batch} for the given range from the dataset.
*
* @param manager the manager used to create the arrays
* @param fromIndex low endpoint (inclusive) of the dataset
* @param toIndex high endpoint (exclusive) of the dataset
* @return a {@link Batch} that contains the data and label of the requested data items
*/
public Batch getByRange(NDManager manager, long fromIndex, long toIndex) {
NDIndex index = new NDIndex().addSliceDim(fromIndex, toIndex);
NDList datum = new NDList();
NDList label = new NDList();
for (NDArray array : data) {
datum.add(array.get(manager, index));
}
if (labels != null) {
for (NDArray array : labels) {
label.add(array.get(manager, index));
}
}
int size = Math.toIntExact(toIndex - fromIndex);
return new Batch(manager, datum, label, size, Batchifier.STACK, Batchifier.STACK, -1, -1);
}
/** {@inheritDoc} */
@Override
protected RandomAccessDataset newSubDataset(int[] indices, int from, int to) {
return new SubDataset(this, indices, from, to);
}
@Override
protected RandomAccessDataset newSubDataset(List<Long> subIndices) {
return new SubDatasetByIndices(this, subIndices);
}
/** {@inheritDoc} */
@Override
public Iterable<Batch> getData(
NDManager manager, Sampler sampler, ExecutorService executorService)
throws IOException, TranslateException {
prepare();
if (dataBatchifier == Batchifier.STACK && labelBatchifier == Batchifier.STACK) {
return new BulkDataIterable(
this,
manager,
sampler,
dataBatchifier,
labelBatchifier,
pipeline,
targetPipeline,
executorService,
prefetchNumber,
device);
}
return new DataIterable(
this,
manager,
sampler,
dataBatchifier,
labelBatchifier,
pipeline,
targetPipeline,
executorService,
prefetchNumber,
device);
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) throws IOException {}
/** The Builder to construct an {@link ArrayDataset}. */
public static final class Builder extends BaseBuilder<Builder> {
private NDArray[] data;
private NDArray[] labels;
/** {@inheritDoc} */
@Override
protected Builder self() {
return this;
}
/**
* Sets the data as an {@link NDArray} for the {@code ArrayDataset}.
*
* @param data an array of {@link NDArray} that contains the data
* @return this Builder
*/
public Builder setData(NDArray... data) {
this.data = data;
return self();
}
/**
* Sets the labels for the data in the {@code ArrayDataset}.
*
* @param labels an array of {@link NDArray} that contains the labels
* @return this Builder
*/
public Builder optLabels(NDArray... labels) {
this.labels = labels;
return self();
}
/**
* Builds a new instance of {@code ArrayDataset} with the specified data and labels.
*
* @return a new instance of {@code ArrayDataset}
*/
public ArrayDataset build() {
if (data == null || data.length == 0) {
throw new IllegalArgumentException("Please pass in at least one data");
}
return new ArrayDataset(this);
}
}
private static final class SubDataset extends ArrayDataset {
private ArrayDataset dataset;
private int[] indices;
private int from;
private int to;
public SubDataset(ArrayDataset dataset, int[] indices, int from, int to) {
this.dataset = dataset;
this.indices = indices;
this.from = from;
this.to = to;
this.sampler = dataset.sampler;
this.dataBatchifier = dataset.dataBatchifier;
this.labelBatchifier = dataset.labelBatchifier;
this.pipeline = dataset.pipeline;
this.targetPipeline = dataset.targetPipeline;
this.prefetchNumber = dataset.prefetchNumber;
this.device = dataset.device;
limit = Long.MAX_VALUE;
}
/** {@inheritDoc} */
@Override
public Record get(NDManager manager, long index) {
if (index >= size()) {
throw new IndexOutOfBoundsException("index(" + index + ") > size(" + size() + ").");
}
return dataset.get(manager, indices[Math.toIntExact(index) + from]);
}
/** {@inheritDoc} */
@Override
public Batch getByIndices(NDManager manager, long... indices) {
long[] resolvedIndices = new long[indices.length];
int i = 0;
for (long index : indices) {
resolvedIndices[i++] = this.indices[Math.toIntExact(index) + from];
}
return dataset.getByIndices(manager, resolvedIndices);
}
/** {@inheritDoc} */
@Override
public Batch getByRange(NDManager manager, long fromIndex, long toIndex) {
return dataset.getByRange(manager, fromIndex + from, toIndex + from);
}
/** {@inheritDoc} */
@Override
protected long availableSize() {
return to - from;
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) {}
}
private static final class SubDatasetByIndices extends ArrayDataset {
private ArrayDataset dataset;
private List<Long> subIndices;
public SubDatasetByIndices(ArrayDataset dataset, List<Long> subIndices) {
this.dataset = dataset;
this.subIndices = subIndices;
this.sampler = dataset.sampler;
this.dataBatchifier = dataset.dataBatchifier;
this.labelBatchifier = dataset.labelBatchifier;
this.pipeline = dataset.pipeline;
this.targetPipeline = dataset.targetPipeline;
this.prefetchNumber = dataset.prefetchNumber;
this.device = dataset.device;
limit = Long.MAX_VALUE;
}
/** {@inheritDoc} */
@Override
public Record get(NDManager manager, long index) {
return dataset.get(manager, subIndices.get(Math.toIntExact(index)));
}
/** {@inheritDoc} */
@Override
public Batch getByIndices(NDManager manager, long... indices) {
long[] resolvedIndices = new long[indices.length];
int i = 0;
for (long index : indices) {
resolvedIndices[i++] = subIndices.get(Math.toIntExact(index));
}
return dataset.getByIndices(manager, resolvedIndices);
}
/** {@inheritDoc} */
@Override
public Batch getByRange(NDManager manager, long fromIndex, long toIndex) {
long[] resolvedIndices = new long[(int) (toIndex - fromIndex)];
int i = 0;
for (long index = fromIndex; index < toIndex; index++) {
resolvedIndices[i++] = subIndices.get(Math.toIntExact(index));
}
return dataset.getByIndices(manager, resolvedIndices);
}
/** {@inheritDoc} */
@Override
protected long availableSize() {
return subIndices.size();
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) {}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/dataset/Batch.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.dataset;
import ai.djl.Device;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.translate.Batchifier;
import java.util.List;
/**
* A {@code Batch} is used to hold multiple items (data and label pairs) from a {@link Dataset}.
*
* <p>When training and performing inference, it is often more efficient to run multiple items
* through a network simultaneously rather than one at a time. For this reason, much of the API is
* oriented around the {@code Batch} class.
*
* <p>In a {@code Batch}, data and label are each an {@link NDList}. The data {@link NDList}
* represents the data for each input in the batch. The number of {@link ai.djl.ndarray.NDArray}s in
* the NDList is based on the number of different kinds of inputs, not the batch size. Similarly,
* the label {@link NDList} represents the labels for each kind of output.
*
* <p>For example, an Image Question and Answer dataset has two inputs: an image and a question. In
* this case, the data in the {@code Batch} will be an {@link NDList} containing an NCHW image
* {@link ai.djl.ndarray.NDArray} and an NTC question {@link ai.djl.ndarray.NDArray}. The label will
* be an {@link NDList} containing only an NTC answer {@link ai.djl.ndarray.NDArray}.
*
* <p>In order to differentiate a batch vs a single record (despite them both consisting of two
* {@link NDList}s), we have the {@link Batch} and the {@link Record} respectively.
*/
public class Batch implements AutoCloseable {
private NDManager manager;
private NDList data;
private NDList labels;
private Batchifier dataBatchifier;
private Batchifier labelBatchifier;
private int size;
private long progress;
private long progressTotal;
private List<?> indices;
/**
* Creates a new instance of {@code Batch} with the given manager, data and labels.
*
* @param manager the manager for the {@code Batch}
* @param data the {@link NDList} containing the data
* @param labels the {@link NDList} containing the labels
* @param size (batchSize) the number of {@link Record}s in the batch
* @param dataBatchifier the {@link Batchifier} that is used to split data
* @param labelBatchifier the {@link Batchifier} that is used for split labels
* @param progress the progress of the batch if it is part of some kind of iteration like a
* dataset iteration. Returns 0 if there is no iteration.
* @param progressTotal the total or end value for the progress of the batch if it is part of
*/
public Batch(
NDManager manager,
NDList data,
NDList labels,
int size,
Batchifier dataBatchifier,
Batchifier labelBatchifier,
long progress,
long progressTotal) {
this.manager = manager;
this.manager.setName("batch");
data.attach(manager);
labels.attach(manager);
this.data = data;
this.labels = labels;
this.size = size;
this.dataBatchifier = dataBatchifier;
this.labelBatchifier = labelBatchifier;
this.progress = progress;
this.progressTotal = progressTotal;
}
/**
* Creates a new instance of {@code Batch} with the given manager, data and labels.
*
* @param manager the manager for the {@code Batch}
* @param data the {@link NDList} containing the data
* @param labels the {@link NDList} containing the labels
* @param size (batchSize) the number of {@link Record}s in the batch
* @param dataBatchifier the {@link Batchifier} that is used to split data
* @param labelBatchifier the {@link Batchifier} that is used for split labels
* @param progress the progress of the batch if it is part of some kind of iteration like a
* dataset iteration. Returns 0 if there is no iteration.
* @param progressTotal the total or end value for the progress of the batch if it is part of
* @param indices the indices used to extract the data and labels
*/
public Batch(
NDManager manager,
NDList data,
NDList labels,
int size,
Batchifier dataBatchifier,
Batchifier labelBatchifier,
long progress,
long progressTotal,
List<?> indices) {
this.manager = manager;
this.manager.setName("batch");
data.attach(manager);
labels.attach(manager);
this.data = data;
this.labels = labels;
this.size = size;
this.dataBatchifier = dataBatchifier;
this.labelBatchifier = labelBatchifier;
this.progress = progress;
this.progressTotal = progressTotal;
this.indices = indices;
}
/**
* Gets the {@link NDManager} that is attached to this {@code Batch}.
*
* @return the {@link NDManager} attached to this {@code Batch}
*/
public NDManager getManager() {
return manager;
}
/**
* Gets the data of this {@code Batch}.
*
* @return an {@link NDList} that contains the data
*/
public NDList getData() {
return data;
}
/**
* Gets the labels corresponding to the data of this {@code Batch}.
*
* @return an {@link NDList} that contains the labels
*/
public NDList getLabels() {
return labels;
}
/**
* Returns the batchSize.
*
* @return the batchSize or number of {@link Record}s in the batch
*/
public int getSize() {
return size;
}
/**
* Returns the progress of the batch if it is part of some kind of iteration like a dataset
* iteration.
*
* @return the progress of the batch if it is part of some kind of iteration like a dataset
* iteration. Returns 0 if there is no iteration
*/
public long getProgress() {
return progress;
}
/**
* Returns the total or end value for the progress of the batch if it is part of some kind of
* iteration like a dataset iteration.
*
* @return the total or end value for the progress of the batch if it is part of some kind of
* iteration like a dataset iteration. Returns 0 if there is no iteration
*/
public long getProgressTotal() {
return progressTotal;
}
/** {@inheritDoc} */
@Override
public void close() {
manager.close();
manager = null;
}
/**
* Splits the data and labels in the {@code Batch} across the given devices.
*
* <p>if {@code evenSplit} is {@code false}, that last device may have a smaller batch than the
* rest.
*
* @param devices an array of {@link Device} across which the data must be split
* @param evenSplit whether each slice must have the same shape
* @return an array of {@code Batch}, each of which corresponds to a {@link Device}
*/
public Batch[] split(Device[] devices, boolean evenSplit) {
int deviceCount = devices.length;
if (deviceCount == 1) {
// TODO: we should change to following once we support slice:
// NDList d = data.asInDevice(devices[0], false);
// avoid copy if data already in device
if (data.head().getDevice().equals(devices[0])) {
return new Batch[] {
new Batch(
manager.newSubManager(),
data,
labels,
size,
dataBatchifier,
labelBatchifier,
progress,
progressTotal,
indices)
};
} else {
NDList d = data.toDevice(devices[0], true);
NDList l = labels.toDevice(devices[0], true);
return new Batch[] {
new Batch(
manager.newSubManager(devices[0]),
d,
l,
size,
dataBatchifier,
labelBatchifier,
progress,
progressTotal,
indices)
};
}
}
NDList[] splittedData = split(data, dataBatchifier, deviceCount, evenSplit);
NDList[] splittedLabels = split(labels, labelBatchifier, deviceCount, evenSplit);
Batch[] splitted = new Batch[splittedData.length];
int baseSplitSize = size / deviceCount;
for (int i = 0; i < splittedData.length; ++i) {
NDList d = splittedData[i].toDevice(devices[i], true);
NDList l = splittedLabels[i].toDevice(devices[i], true);
int subSize =
(i == splittedData.length - 1) ? (size - i * baseSplitSize) : baseSplitSize;
splitted[i] =
new Batch(
manager.newSubManager(devices[i]),
d,
l,
subSize,
dataBatchifier,
labelBatchifier,
progress,
progressTotal,
indices);
}
return splitted;
}
private NDList[] split(NDList list, Batchifier batchifier, int numOfSlices, boolean evenSplit) {
if (batchifier == null) {
throw new IllegalStateException(
"Split can only be called on a batch containing a batchifier");
}
return batchifier.split(list, numOfSlices, evenSplit);
}
/**
* Returns the indices used to extract the data and labels from the {@link Dataset}.
*
* @return a list of {@link Long} if the {@link Dataset} is a {@link RandomAccessDataset},
* otherwise may return <code>null</code>.
*/
public List<?> getIndices() {
return indices;
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/dataset/BatchSampler.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.dataset;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* {@code BatchSampler} is a {@link Sampler} that returns a single epoch over the data.
*
* <p>{@code BatchSampler} wraps another {@link ai.djl.training.dataset.Sampler.SubSampler} to yield
* a mini-batch of indices.
*/
public class BatchSampler implements Sampler {
private Sampler.SubSampler subSampler;
private int batchSize;
private boolean dropLast;
/**
* Creates a new instance of {@code BatchSampler} that samples from the given {@link
* ai.djl.training.dataset.Sampler.SubSampler}, and yields a mini-batch of indices.
*
* <p>The last batch will not be dropped. The size of the last batch maybe smaller than batch
* size in case the size of the dataset is not a multiple of batch size.
*
* @param subSampler the {@link ai.djl.training.dataset.Sampler.SubSampler} to sample from
* @param batchSize the required batch size
*/
public BatchSampler(Sampler.SubSampler subSampler, int batchSize) {
this(subSampler, batchSize, false);
}
/**
* Creates a new instance of {@code BatchSampler} that samples from the given {@link
* ai.djl.training.dataset.Sampler.SubSampler}, and yields a mini-batch of indices.
*
* @param subSampler the {@link ai.djl.training.dataset.Sampler.SubSampler} to sample from
* @param batchSize the required batch size
* @param dropLast whether the {@code BatchSampler} should drop the last few samples in case the
* size of the dataset is not a multiple of batch size
*/
public BatchSampler(Sampler.SubSampler subSampler, int batchSize, boolean dropLast) {
this.subSampler = subSampler;
this.batchSize = batchSize;
this.dropLast = dropLast;
}
/** {@inheritDoc} */
@Override
public Iterator<List<Long>> sample(RandomAccessDataset dataset) {
return new Iterate(dataset);
}
/** {@inheritDoc} */
@Override
public int getBatchSize() {
return batchSize;
}
class Iterate implements Iterator<List<Long>> {
private long size;
private long current;
private Iterator<Long> subSample;
Iterate(RandomAccessDataset dataset) {
current = 0;
if (dropLast) {
this.size = dataset.size() / batchSize;
} else {
this.size = (dataset.size() + batchSize - 1) / batchSize;
}
subSample = subSampler.sample(dataset);
}
/** {@inheritDoc} */
@Override
public boolean hasNext() {
return current < size;
}
/** {@inheritDoc} */
@Override
public List<Long> next() {
List<Long> batchIndices = new ArrayList<>();
while (subSample.hasNext()) {
batchIndices.add(subSample.next());
if (batchIndices.size() == batchSize) {
break;
}
}
current++;
return batchIndices;
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/dataset/BulkDataIterable.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.dataset;
import ai.djl.Device;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Pipeline;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.ExecutorService;
/**
* BulkDataIterable specializes DataIterable in using {@link ArrayDataset#getByRange(NDManager,
* long, long)} or {@link ArrayDataset#getByIndices(NDManager, long...)} to create {@link Batch}
* instances more efficiently.
*/
public class BulkDataIterable extends DataIterable {
/**
* Creates a new instance of {@code BulkDataIterable} with the given parameters.
*
* @param dataset the dataset to iterate on
* @param manager the manager to create the arrays
* @param sampler a sampler to sample data with
* @param dataBatchifier a batchifier for data
* @param labelBatchifier a batchifier for labels
* @param pipeline the pipeline of transforms to apply on the data
* @param targetPipeline the pipeline of transforms to apply on the labels
* @param executor an {@link ExecutorService}
* @param preFetchNumber the number of samples to prefetch
* @param device the {@link Device}
*/
public BulkDataIterable(
ArrayDataset dataset,
NDManager manager,
Sampler sampler,
Batchifier dataBatchifier,
Batchifier labelBatchifier,
Pipeline pipeline,
Pipeline targetPipeline,
ExecutorService executor,
int preFetchNumber,
Device device) {
super(
dataset,
manager,
sampler,
dataBatchifier,
labelBatchifier,
pipeline,
targetPipeline,
executor,
preFetchNumber,
device);
}
@Override
protected Batch fetch(List<Long> indices, int progress) throws IOException {
NDManager subManager = manager.newSubManager();
subManager.setName("dataIter fetch");
int batchSize = indices.size();
Batch raw;
if (isRange(indices)) {
long fromIndex = indices.get(0);
long toIndex = fromIndex + indices.size();
raw = ((ArrayDataset) dataset).getByRange(subManager, fromIndex, toIndex);
} else {
long[] indicesArr = indices.stream().mapToLong(Long::longValue).toArray();
raw = ((ArrayDataset) dataset).getByIndices(subManager, indicesArr);
}
NDList batchData = raw.getData();
// apply transform
if (pipeline != null) {
batchData = pipeline.transform(batchData);
}
NDList batchLabels = raw.getLabels();
// apply label transform
if (targetPipeline != null) {
batchLabels = targetPipeline.transform(batchLabels);
}
// pin to a specific device
if (device != null) {
batchData = batchData.toDevice(device, false);
batchLabels = batchLabels.toDevice(device, false);
}
return new Batch(
subManager,
batchData,
batchLabels,
batchSize,
dataBatchifier,
labelBatchifier,
progress,
dataset.size(),
indices);
}
/**
* Checks whether the given indices actually represents a range.
*
* @param indices the indices to examine
* @return whether the given indices are sorted in ascending order with no gap and has at least
* one element
*/
public static boolean isRange(List<Long> indices) {
if (indices.isEmpty()) {
return false;
}
long from = indices.get(0);
for (long index : indices) {
if (index != from++) {
return false;
}
}
return true;
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/dataset/DataIterable.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.dataset;
import ai.djl.Device;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Pipeline;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Arrays;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
/**
* DataIterable is a data loader that combines {@link Dataset}, {@link Batchifier}, {@link
* Pipeline}, and {@link Sampler} to provide an iterable over the given {@link RandomAccessDataset}.
*
* <p>We don't recommended using DataIterable directly. Instead use {@link RandomAccessDataset}
* combined with {@link ai.djl.training.Trainer} to iterate over the {@link RandomAccessDataset}}
*/
public class DataIterable implements Iterable<Batch>, Iterator<Batch> {
private static final Logger logger = LoggerFactory.getLogger(DataIterable.class);
protected RandomAccessDataset dataset;
protected NDManager manager;
protected Batchifier dataBatchifier;
protected Batchifier labelBatchifier;
protected Pipeline pipeline;
protected Pipeline targetPipeline;
private ExecutorService executor;
protected Device device;
private Iterator<List<Long>> sample;
// for multithreading
private Queue<Future<Batch>> queue;
private AtomicInteger progressCounter;
private boolean autoClose;
/**
* Creates a new instance of {@code DataIterable} with the given parameters.
*
* @param dataset the dataset to iterate on
* @param manager the manager to create the arrays
* @param sampler a sampler to sample data with
* @param dataBatchifier a batchifier for data
* @param labelBatchifier a batchifier for labels
* @param pipeline the pipeline of transforms to apply on the data
* @param targetPipeline the pipeline of transforms to apply on the labels
* @param executor an {@link ExecutorService}
* @param preFetchNumber the number of samples to prefetch
* @param device the {@link Device}
*/
public DataIterable(
RandomAccessDataset dataset,
NDManager manager,
Sampler sampler,
Batchifier dataBatchifier,
Batchifier labelBatchifier,
Pipeline pipeline,
Pipeline targetPipeline,
ExecutorService executor,
int preFetchNumber,
Device device) {
this.dataset = dataset;
this.manager = manager.newSubManager();
this.manager.setName("dataIter");
this.dataBatchifier = dataBatchifier;
this.labelBatchifier = labelBatchifier;
this.pipeline = pipeline;
this.targetPipeline = targetPipeline;
this.executor = executor;
this.device = device;
progressCounter = new AtomicInteger(0);
String close = System.getProperty("ai.djl.dataiterator.autoclose", "true");
autoClose = Boolean.parseBoolean(close);
sample = sampler.sample(dataset);
if (executor != null) {
queue = new LinkedList<>();
// prefetch
for (int i = 0; i < preFetchNumber; i++) {
preFetch();
}
}
}
/** {@inheritDoc} */
@Override
public Iterator<Batch> iterator() {
return this;
}
/** {@inheritDoc} */
@Override
public boolean hasNext() {
if (executor != null) {
if (queue.isEmpty()) {
if (autoClose) {
manager.close();
}
return false;
}
return true;
}
if (!sample.hasNext()) {
if (autoClose) {
manager.close();
}
return false;
}
return true;
}
/** {@inheritDoc} */
@Override
public Batch next() {
if (executor == null) {
// single thread data loading with blocking fetch
List<Long> indices = sample.next();
try {
int progress = progressCounter.addAndGet(indices.size());
return fetch(indices, progress);
} catch (IOException e) {
logger.error(e.getMessage());
throw new IllegalStateException("Data loading failed", e);
}
} else {
// multithreading data loading with async fetch
preFetch();
Future<Batch> future = queue.poll();
try {
return future.get();
} catch (InterruptedException | ExecutionException e) {
logger.error(e.getMessage());
throw new IllegalStateException("Data loading failed", e);
}
}
}
protected Batch fetch(List<Long> indices, int progress) throws IOException {
NDManager subManager = manager.newSubManager();
subManager.setName("dataIter fetch");
int batchSize = indices.size();
NDList[] data = new NDList[batchSize];
NDList[] labels = new NDList[batchSize];
for (int i = 0; i < batchSize; i++) {
Record record = dataset.get(subManager, indices.get(i));
data[i] = record.getData();
// apply transform
if (pipeline != null) {
data[i] = pipeline.transform(data[i]);
}
labels[i] = record.getLabels();
}
NDList batchData = dataBatchifier.batchify(data);
NDList batchLabels = labelBatchifier.batchify(labels);
Arrays.stream(data).forEach(NDList::close);
Arrays.stream(labels).forEach(NDList::close);
// apply label transform
if (targetPipeline != null) {
batchLabels = targetPipeline.transform(batchLabels);
}
// pin to a specific device
if (device != null) {
batchData = batchData.toDevice(device, false);
batchLabels = batchLabels.toDevice(device, false);
}
return new Batch(
subManager,
batchData,
batchLabels,
batchSize,
dataBatchifier,
labelBatchifier,
progress,
dataset.size(),
indices);
}
private void preFetch() {
if (!sample.hasNext()) {
return;
}
List<Long> indices = sample.next();
Callable<Batch> task = new PreFetchCallable(indices);
Future<Batch> result = executor.submit(task);
queue.offer(result);
}
class PreFetchCallable implements Callable<Batch> {
private List<Long> indices;
private int progress;
public PreFetchCallable(List<Long> indices) {
this.indices = indices;
progress = progressCounter.getAndAdd(indices.size());
}
/** {@inheritDoc} */
@Override
public Batch call() throws IOException {
return fetch(indices, progress);
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/dataset/Dataset.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.dataset;
import ai.djl.ndarray.NDManager;
import ai.djl.translate.TranslateException;
import ai.djl.translate.TranslatorOptions;
import ai.djl.util.Progress;
import java.io.IOException;
import java.util.concurrent.ExecutorService;
/**
* An interface to represent a set of sample data/label pairs to train a model.
*
* @see <a href="https://docs.djl.ai/master/docs/dataset.html">The guide to datasets</a>
* @see <a href="https://docs.djl.ai/master/docs/development/how_to_use_dataset.html">The guide to
* implementing a custom dataset</a>
*/
public interface Dataset {
/**
* Fetches an iterator that can iterate through the {@link Dataset}.
*
* @param manager the dataset to iterate through
* @return an {@link Iterable} of {@link Batch} that contains batches of data from the dataset
* @throws IOException for various exceptions depending on the dataset
* @throws TranslateException if there is an error while processing input
*/
Iterable<Batch> getData(NDManager manager) throws IOException, TranslateException;
/**
* Fetches an iterator that can iterate through the {@link Dataset} with multiple threads.
*
* @param manager the dataset to iterate through
* @param executorService the executorService to use for multi-threading
* @return an {@link Iterable} of {@link Batch} that contains batches of data from the dataset
* @throws IOException for various exceptions depending on the dataset
* @throws TranslateException if there is an error while processing input
*/
default Iterable<Batch> getData(NDManager manager, ExecutorService executorService)
throws IOException, TranslateException {
return getData(manager);
}
/**
* Prepares the dataset for use.
*
* @throws IOException for various exceptions depending on the dataset
* @throws TranslateException if there is an error while processing input
*/
default void prepare() throws IOException, TranslateException {
prepare(null);
}
/**
* Prepares the dataset for use with tracked progress.
*
* @param progress the progress tracker
* @throws IOException for various exceptions depending on the dataset
* @throws TranslateException if there is an error while processing input
*/
void prepare(Progress progress) throws IOException, TranslateException;
/**
* Returns {@link TranslatorOptions} that match the pre-processing and post-processing of this
* dataset.
*
* @return matching translators or null if none defined
*/
default TranslatorOptions matchingTranslatorOptions() {
return null;
}
/** An enum that indicates the mode - training, test or validation. */
enum Usage {
TRAIN,
TEST,
VALIDATION
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/dataset/RandomAccessDataset.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.dataset;
import ai.djl.Device;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Pipeline;
import ai.djl.translate.Transform;
import ai.djl.translate.TranslateException;
import ai.djl.util.Pair;
import ai.djl.util.Progress;
import ai.djl.util.RandomUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
/**
* RandomAccessDataset represent the dataset that support random access reads. i.e. it could access
* a specific data item given the index.
*
* <p>Almost all datasets in DJL extend, either directly or indirectly, {@link RandomAccessDataset}.
*
* @see <a href="https://docs.djl.ai/master/docs/development/how_to_use_dataset.html">The guide to
* implementing a custom dataset</a>
*/
public abstract class RandomAccessDataset implements Dataset {
protected Sampler sampler;
protected Batchifier dataBatchifier;
protected Batchifier labelBatchifier;
protected Pipeline pipeline;
protected Pipeline targetPipeline;
protected int prefetchNumber;
protected long limit;
protected Device device;
RandomAccessDataset() {}
/**
* Creates a new instance of {@link RandomAccessDataset} with the given necessary
* configurations.
*
* @param builder a builder with the necessary configurations
*/
public RandomAccessDataset(BaseBuilder<?> builder) {
this.sampler = builder.getSampler();
this.dataBatchifier = builder.dataBatchifier;
this.labelBatchifier = builder.labelBatchifier;
this.pipeline = builder.pipeline;
this.targetPipeline = builder.targetPipeline;
this.prefetchNumber = builder.prefetchNumber;
this.limit = builder.limit;
this.device = builder.device;
}
/**
* Gets the {@link Record} for the given index from the dataset.
*
* @param manager the manager used to create the arrays
* @param index the index of the requested data item
* @return a {@link Record} that contains the data and label of the requested data item
* @throws IOException if an I/O error occurs
*/
public abstract Record get(NDManager manager, long index) throws IOException;
/** {@inheritDoc} */
@Override
public Iterable<Batch> getData(NDManager manager) throws IOException, TranslateException {
return getData(manager, sampler, null);
}
/** {@inheritDoc} */
@Override
public Iterable<Batch> getData(NDManager manager, ExecutorService executorService)
throws IOException, TranslateException {
return getData(manager, sampler, executorService);
}
/**
* Fetches an iterator that can iterate through the {@link Dataset} with a custom sampler.
*
* @param manager the manager to create the arrays
* @param sampler the sampler to use to iterate through the dataset
* @return an {@link Iterable} of {@link Batch} that contains batches of data from the dataset
* @throws IOException for various exceptions depending on the dataset
* @throws TranslateException if there is an error while processing input
*/
public Iterable<Batch> getData(NDManager manager, Sampler sampler)
throws IOException, TranslateException {
return getData(manager, sampler, null);
}
/**
* Fetches an iterator that can iterate through the {@link Dataset} with a custom sampler
* multi-threaded.
*
* @param manager the manager to create the arrays
* @param sampler the sampler to use to iterate through the dataset
* @param executorService the executorService to multi-thread with
* @return an {@link Iterable} of {@link Batch} that contains batches of data from the dataset
* @throws IOException for various exceptions depending on the dataset
* @throws TranslateException if there is an error while processing input
*/
public Iterable<Batch> getData(
NDManager manager, Sampler sampler, ExecutorService executorService)
throws IOException, TranslateException {
prepare();
return new DataIterable(
this,
manager,
sampler,
dataBatchifier,
labelBatchifier,
pipeline,
targetPipeline,
executorService,
prefetchNumber,
device);
}
/**
* Returns the size of this {@code Dataset}.
*
* @return the size of this {@code Dataset}
*/
public long size() {
return Math.min(limit, availableSize());
}
/**
* Returns the number of records available to be read in this {@code Dataset}.
*
* @return the number of records available to be read in this {@code Dataset}
*/
protected abstract long availableSize();
/**
* Splits the dataset set into multiple portions.
*
* @param ratio the ratio of each sub dataset
* @return an array of the sub dataset
* @throws IOException for various exceptions depending on the dataset
* @throws TranslateException if there is an error while processing input
*/
public RandomAccessDataset[] randomSplit(int... ratio) throws IOException, TranslateException {
prepare();
if (ratio.length < 2) {
throw new IllegalArgumentException("Requires at least two split portion.");
}
int size = Math.toIntExact(size());
int[] indices = IntStream.range(0, size).toArray();
for (int i = 0; i < size; ++i) {
swap(indices, i, RandomUtils.nextInt(size));
}
RandomAccessDataset[] ret = new RandomAccessDataset[ratio.length];
double sum = Arrays.stream(ratio).sum();
int from = 0;
for (int i = 0; i < ratio.length - 1; ++i) {
int to = from + (int) (ratio[i] / sum * size);
ret[i] = newSubDataset(indices, from, to);
from = to;
}
ret[ratio.length - 1] = newSubDataset(indices, from, size);
return ret;
}
/**
* Returns a view of the portion of this data between the specified {@code fromIndex},
* inclusive, and {@code toIndex}, exclusive.
*
* @param fromIndex low endpoint (inclusive) of the subDataset
* @param toIndex high endpoint (exclusive) of the subData
* @return a view of the specified range within this dataset
*/
public RandomAccessDataset subDataset(int fromIndex, int toIndex) {
int size = Math.toIntExact(size());
int[] indices = IntStream.range(0, size).toArray();
return newSubDataset(indices, fromIndex, toIndex);
}
/**
* Returns a view of the portion of this data for the specified {@code subIndices}.
*
* @param subIndices sub-set of indices of this dataset
* @return a view of the specified indices within this dataset
*/
public RandomAccessDataset subDataset(List<Long> subIndices) {
if (BulkDataIterable.isRange(subIndices)) {
int size = Math.toIntExact(size());
int[] indices = IntStream.range(0, size).toArray();
long fromIndex = subIndices.get(0);
long toIndex = subIndices.get(0) + subIndices.size();
return newSubDataset(indices, Math.toIntExact(fromIndex), Math.toIntExact(toIndex));
}
return newSubDataset(subIndices);
}
/**
* Returns a view of the portion of this data for the specified record keys. Assuming that the
* records of this database are represented by the keys in <code>recordKeys</code>, then <code>
* subRecordKeys</code> defines the view on the corresponding records of the database.
*
* @param recordKeys unique keys for all records of this dataset.
* @param subRecordKeys keys to define the view on the dataset. All keys in <code>subRecordKeys
* </code> must be contained in <code>recordKeys</code> but may occur more than once.
* @param <K> the record key type.
* @return a view of the specified records within this dataset
*/
public <K> RandomAccessDataset subDataset(List<K> recordKeys, List<K> subRecordKeys) {
if (this.size() != recordKeys.size()) {
throw new IllegalArgumentException(
"Requires as many record keys as there are records in the dataset.");
}
Map<K, Long> indicesOfRecordKeys = new ConcurrentHashMap<>(recordKeys.size());
for (int index = 0; index < recordKeys.size(); index++) {
Long prevIndex = indicesOfRecordKeys.put(recordKeys.get(index), (long) index);
if (prevIndex != null) {
throw new IllegalArgumentException(
"At least two keys at position "
+ prevIndex
+ " and "
+ index
+ " are equal!");
}
}
return subDataset(indicesOfRecordKeys, subRecordKeys);
}
/**
* Returns a view of the portion of this data for the specified record keys. Assuming that the
* records of this database are represented by the keys in <code>indicesOfRecordKeys</code>,
* then <code>
* subRecordKeys</code> defines the view on the corresponding records of the database.
*
* @param indicesOfRecordKeys Map for keys of the records in this dataset to their index
* position within this dataset. While this map typically maps all records, technically it
* just needs to map the ones occurring in <code>subRecordKeys</code>.
* @param subRecordKeys Keys to define the view on the dataset. All keys in <code>subRecordKeys
* </code> must be contained in <code>indicesOfRecordKeys</code> but may occur more than
* once.
* @param <K> the record key type.
* @return a view of the records identified by the specified keys of this dataset
*/
public <K> RandomAccessDataset subDataset(
Map<K, Long> indicesOfRecordKeys, List<K> subRecordKeys) {
List<Long> subIndices = new ArrayList<>(subRecordKeys.size());
for (K recordKey : subRecordKeys) {
Long index = indicesOfRecordKeys.get(recordKey);
if (index == null) {
throw new IllegalArgumentException(
"The key of subRecordKeys at position "
+ subRecordKeys.indexOf(recordKey)
+ " is not contained in recordKeys!");
}
subIndices.add(index);
}
return subDataset(subIndices);
}
protected RandomAccessDataset newSubDataset(int[] indices, int from, int to) {
return new SubDataset(this, indices, from, to);
}
protected RandomAccessDataset newSubDataset(List<Long> subIndices) {
return new SubDatasetByIndices(this, subIndices);
}
/**
* Returns the dataset contents as a Java array.
*
* <p>Each Number[] is a flattened dataset record and the Number[][] is the array of all
* records.
*
* @param manager the manager to create the arrays
* @return the dataset contents as a Java array
* @throws IOException for various exceptions depending on the dataset
* @throws TranslateException if there is an error while processing input
*/
public Pair<Number[][], Number[][]> toArray(NDManager manager)
throws IOException, TranslateException {
Sampler sampl = new BatchSampler(new SequenceSampler(), 1, false);
int size = Math.toIntExact(size());
Number[][] data = new Number[size][];
Number[][] labels = new Number[size][];
int index = 0;
for (Batch batch : getData(manager, sampl)) {
data[index] = flattenRecord(batch.getData());
labels[index] = flattenRecord(batch.getLabels());
batch.close();
index++;
}
return new Pair<>(data, labels);
}
private Number[] flattenRecord(NDList data) {
NDList flattened =
new NDList(data.stream().map(NDArray::flatten).collect(Collectors.toList()));
if (flattened.size() == 0) {
return null; // NOPMD
}
if (flattened.size() == 1) {
return flattened.get(0).toArray();
}
return NDArrays.concat(flattened).toArray();
}
private static void swap(int[] arr, int i, int j) {
int tmp = arr[i];
arr[i] = arr[j];
arr[j] = tmp;
}
/** The Builder to construct a {@link RandomAccessDataset}. */
public abstract static class BaseBuilder<T extends BaseBuilder<T>> {
protected Sampler sampler;
protected Batchifier dataBatchifier = Batchifier.STACK;
protected Batchifier labelBatchifier = Batchifier.STACK;
protected Pipeline pipeline;
protected Pipeline targetPipeline;
protected int prefetchNumber = 2;
protected long limit = Long.MAX_VALUE;
protected Device device;
/**
* Gets the {@link Sampler} for the dataset.
*
* @return the {@code Sampler}
*/
public Sampler getSampler() {
Objects.requireNonNull(sampler, "The sampler must be set");
return sampler;
}
/**
* Sets the {@link Sampler} with the given batch size.
*
* @param batchSize the batch size
* @param random whether the sampling has to be random
* @return this {@code BaseBuilder}
*/
public T setSampling(int batchSize, boolean random) {
return setSampling(batchSize, random, false);
}
/**
* Sets the {@link Sampler} with the given batch size.
*
* @param batchSize the batch size
* @param random whether the sampling has to be random
* @param dropLast whether to drop the last incomplete batch
* @return this {@code BaseBuilder}
*/
public T setSampling(int batchSize, boolean random, boolean dropLast) {
if (random) {
sampler = new BatchSampler(new RandomSampler(), batchSize, dropLast);
} else {
sampler = new BatchSampler(new SequenceSampler(), batchSize, dropLast);
}
return self();
}
/**
* Sets the {@link Sampler} for the dataset.
*
* @param sampler the {@link Sampler} to be set
* @return this {@code BaseBuilder}
*/
public T setSampling(Sampler sampler) {
this.sampler = sampler;
return self();
}
/**
* Sets the {@link Batchifier} for the data.
*
* @param dataBatchifier the {@link Batchifier} to be set
* @return this {@code BaseBuilder}
*/
public T optDataBatchifier(Batchifier dataBatchifier) {
this.dataBatchifier = dataBatchifier;
return self();
}
/**
* Sets the {@link Batchifier} for the labels.
*
* @param labelBatchifier the {@link Batchifier} to be set
* @return this {@code BaseBuilder}
*/
public T optLabelBatchifier(Batchifier labelBatchifier) {
this.labelBatchifier = labelBatchifier;
return self();
}
/**
* Sets the {@link Pipeline} of {@link ai.djl.translate.Transform} to be applied on the
* data.
*
* @param pipeline the {@link Pipeline} of {@link ai.djl.translate.Transform} to be applied
* on the data
* @return this {@code BaseBuilder}
*/
public T optPipeline(Pipeline pipeline) {
this.pipeline = pipeline;
return self();
}
/**
* Adds the {@link Transform} to the {@link Pipeline} to be applied on the data.
*
* @param transform the {@link Transform} to be added
* @return this builder
*/
public T addTransform(Transform transform) {
if (pipeline == null) {
pipeline = new Pipeline();
}
pipeline.add(transform);
return self();
}
/**
* Sets the {@link Pipeline} of {@link ai.djl.translate.Transform} to be applied on the
* labels.
*
* @param targetPipeline the {@link Pipeline} of {@link ai.djl.translate.Transform} to be
* applied on the labels
* @return this {@code BaseBuilder}
*/
public T optTargetPipeline(Pipeline targetPipeline) {
this.targetPipeline = targetPipeline;
return self();
}
/**
* Adds the {@link Transform} to the target {@link Pipeline} to be applied on the labels.
*
* @param transform the {@link Transform} to be added
* @return this builder
*/
public T addTargetTransform(Transform transform) {
if (targetPipeline == null) {
targetPipeline = new Pipeline();
}
targetPipeline.add(transform);
return self();
}
/**
* Sets the number of batches to prefetch at once.
*
* @param prefetchNumber the number of batches to prefetch at once
* @return this {@code BaseBuilder}
*/
public T optPrefetchNumber(int prefetchNumber) {
this.prefetchNumber = prefetchNumber;
return self();
}
/**
* Sets the {@link Device}.
*
* @param device the device
* @return this {@code BaseBuilder}
*/
public T optDevice(Device device) {
this.device = device;
return self();
}
/**
* Sets this dataset's limit.
*
* <p>The limit is usually used for testing purposes to test only with a subset of the
* dataset.
*
* @param limit the limit of this dataset's records
* @return this {@code BaseBuilder}
*/
public T optLimit(long limit) {
this.limit = limit;
return self();
}
/**
* Returns this {code Builder} object.
*
* @return this {@code BaseBuilder}
*/
protected abstract T self();
}
private static final class SubDataset extends RandomAccessDataset {
private RandomAccessDataset dataset;
private int[] indices;
private int from;
private int to;
public SubDataset(RandomAccessDataset dataset, int[] indices, int from, int to) {
this.dataset = dataset;
this.indices = indices;
this.from = from;
this.to = to;
this.sampler = dataset.sampler;
this.dataBatchifier = dataset.dataBatchifier;
this.labelBatchifier = dataset.labelBatchifier;
this.pipeline = dataset.pipeline;
this.targetPipeline = dataset.targetPipeline;
this.prefetchNumber = dataset.prefetchNumber;
this.device = dataset.device;
limit = Long.MAX_VALUE;
}
/** {@inheritDoc} */
@Override
public Record get(NDManager manager, long index) throws IOException {
if (index >= size()) {
throw new IndexOutOfBoundsException("index(" + index + ") > size(" + size() + ").");
}
return dataset.get(manager, indices[Math.toIntExact(index) + from]);
}
/** {@inheritDoc} */
@Override
protected long availableSize() {
return to - from;
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) {}
}
private static final class SubDatasetByIndices extends RandomAccessDataset {
private RandomAccessDataset dataset;
private List<Long> subIndices;
public SubDatasetByIndices(RandomAccessDataset dataset, List<Long> subIndices) {
this.dataset = dataset;
this.subIndices = subIndices;
this.sampler = dataset.sampler;
this.dataBatchifier = dataset.dataBatchifier;
this.labelBatchifier = dataset.labelBatchifier;
this.pipeline = dataset.pipeline;
this.targetPipeline = dataset.targetPipeline;
this.prefetchNumber = dataset.prefetchNumber;
this.device = dataset.device;
limit = Long.MAX_VALUE;
}
/** {@inheritDoc} */
@Override
public Record get(NDManager manager, long index) throws IOException {
return dataset.get(manager, subIndices.get(Math.toIntExact(index)));
}
/** {@inheritDoc} */
@Override
protected long availableSize() {
return subIndices.size();
}
/** {@inheritDoc} */
@Override
public void prepare(Progress progress) {}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/dataset/RandomSampler.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.dataset;
import ai.djl.util.RandomUtils;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.stream.LongStream;
/**
* {@code RandomSampler} is an implementation of the {@link
* ai.djl.training.dataset.Sampler.SubSampler} interface.
*
* <p>{@code RandomSampler} samples the data from [0, dataset.size) randomly.
*/
public class RandomSampler implements Sampler.SubSampler {
private static void swap(long[] arr, int i, int j) {
long tmp = arr[i];
arr[i] = arr[j];
arr[j] = tmp;
}
/** {@inheritDoc} */
@Override
public Iterator<Long> sample(RandomAccessDataset dataset) {
return new Iterate(dataset);
}
static class Iterate implements Iterator<Long> {
private long[] indices;
private long current;
Iterate(RandomAccessDataset dataset) {
long size = dataset.size();
current = 0;
indices = LongStream.range(0, size).toArray();
// java array didn't support index greater than max integer
// so cast to int for now
for (int i = Math.toIntExact(size) - 1; i > 0; --i) {
swap(indices, i, RandomUtils.nextInt(i));
}
}
/** {@inheritDoc} */
@Override
public boolean hasNext() {
return current < indices.length;
}
/** {@inheritDoc} */
@Override
public Long next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
// java array didn't support index greater than max integer
// so cast to int for now
return indices[Math.toIntExact(current++)];
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/dataset/RawDataset.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.dataset;
import java.io.IOException;
/**
* An interface can read a plain java object from dataset.
*
* @param <T> the raw data type of the dataset
*/
public interface RawDataset<T> extends Dataset {
/**
* Returns a plain java object.
*
* @return a plain java object
* @throws IOException when IO operation fails in loading a resource
*/
T getData() throws IOException;
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/dataset/Record.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.dataset;
import ai.djl.ndarray.NDList;
/**
* {@code Record} represents a single element of data and labels from {@link Dataset}.
*
* <p>The data and labels in record are in the form of an {@link NDList}. This allows it to hold
* multiple types of data and labels. However, note that the {@link NDList} does not include a
* dimension for batch.
*
* <p>In a {@code Record}, data and label are each an {@link NDList}. The data {@link NDList}
* represents the data for each input in the batch. Similarly, the label {@link NDList} represents
* the labels for each output.
*
* <p>For example, an Image Question and Answer dataset has two inputs: an image and a question. In
* this case, the data in the {@code Record} will be an {@link NDList} containing a CHW image {@link
* ai.djl.ndarray.NDArray} and a TC question {@link ai.djl.ndarray.NDArray}. The label will be an
* {@link NDList} containing only a TC answer {@link ai.djl.ndarray.NDArray}.
*
* <p>In order to differentiate a batch vs a single record (despite them both consisting of two
* {@link NDList}s), we have the {@link Batch} and the {@link Record} classes respectively.
*/
public class Record {
private NDList data;
private NDList labels;
/**
* Creates a new instance of {@code Record} with a single element of data and its corresponding
* labels.
*
* @param data an {@link NDList} that contains a single element of data
* @param labels an {@link NDList} that contains the corresponding label
*/
public Record(NDList data, NDList labels) {
this.data = data;
this.labels = labels;
}
/**
* Gets the data of this {@code Record}.
*
* @return an {@link NDList} that contains the data of this {@code Record}
*/
public NDList getData() {
return data;
}
/**
* Gets the labels that correspond to the data of this {@code Record}.
*
* @return an {@link NDList} that contains label that correspond to the data of this {@code
* Record}
*/
public NDList getLabels() {
return labels;
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/dataset/Sampler.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.dataset;
import java.util.Iterator;
import java.util.List;
/**
* An interface for sampling data items from a {@link RandomAccessDataset}.
*
* <p>A {@code Sampler} implementation returns an iterator of batches for the {@link
* RandomAccessDataset}. Instead of returning the actual items, it returns the item indices.
* Different samplers can have different ways of sampling such as sampling with or without
* replacement.
*
* <p>Many of the samplers may also make use of {@link SubSampler}s which sample not in batches but
* in individual data item indices.
*/
public interface Sampler {
/**
* Fetches an iterator that iterates through the given {@link RandomAccessDataset} in
* mini-batches of indices.
*
* @param dataset the {@link RandomAccessDataset} to sample from
* @return an iterator that iterates through the given {@link RandomAccessDataset} in
* mini-batches of indices
*/
Iterator<List<Long>> sample(RandomAccessDataset dataset);
/**
* Returns the batch size of the {@code Sampler}.
*
* @return the batch size of the {@code Sampler}, -1 if batch size is not fixed
*/
int getBatchSize();
/** An interface that samples a single data item at a time. */
interface SubSampler {
/**
* Fetches an iterator that iterates through the indices of the given {@link
* RandomAccessDataset}.
*
* @param dataset the {@link RandomAccessDataset} to sample from
* @return an iterator that iterates through the indices of the given {@link
* RandomAccessDataset}
*/
Iterator<Long> sample(RandomAccessDataset dataset);
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/dataset/SequenceSampler.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.dataset;
import java.util.Iterator;
import java.util.NoSuchElementException;
/**
* {@code SequenceSampler} is an implementation of the {@link
* ai.djl.training.dataset.Sampler.SubSampler} interface.
*
* <p>{@code SequenceSampler} samples the data from [0, dataset.size) sequentially.
*/
public class SequenceSampler implements Sampler.SubSampler {
/** {@inheritDoc} */
@Override
public Iterator<Long> sample(RandomAccessDataset dataset) {
return new Iterate(dataset);
}
static class Iterate implements Iterator<Long> {
private long size;
private long current;
Iterate(RandomAccessDataset dataset) {
size = dataset.size();
current = 0;
}
/** {@inheritDoc} */
@Override
public boolean hasNext() {
return current < size;
}
/** {@inheritDoc} */
@Override
public Long next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
return current++;
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/dataset/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains classes to download and prepare training and testing data.
*
* <p>The central class to work with in this package is the {@link ai.djl.training.dataset.Dataset}.
* In practice, most of the implementations of {@link ai.djl.training.dataset.Dataset} will actually
* extend {@link ai.djl.training.dataset.RandomAccessDataset} instead.
*/
package ai.djl.training.dataset;
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/evaluator/AbstractAccuracy.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.evaluator;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.util.Pair;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* {@code Accuracy} is an {@link Evaluator} that computes the accuracy score.
*
* <p>The accuracy score is defined as \(accuracy(y, \hat{y}) =
* \frac{1}{n}\sum_{i=0}^{n-1}1(\hat{y_i} == y_i)\)
*/
public abstract class AbstractAccuracy extends Evaluator {
protected Map<String, Long> correctInstances;
protected int axis;
/**
* Creates an accuracy evaluator that computes accuracy across axis 1.
*
* @param name the name of the evaluator, default is "Accuracy"
*/
public AbstractAccuracy(String name) {
this(name, 1);
}
/**
* Creates an accuracy evaluator.
*
* @param name the name of the evaluator, default is "Accuracy"
* @param axis the axis that represent classes in prediction, default 1
*/
public AbstractAccuracy(String name, int axis) {
super(name);
correctInstances = new ConcurrentHashMap<>();
this.axis = axis;
}
/**
* A helper for classes extending {@link AbstractAccuracy}.
*
* @param labels the labels to get accuracy for
* @param predictions the predictions to get accuracy for
* @return a pair(number of total values, ndarray int of correct values)
*/
protected abstract Pair<Long, NDArray> accuracyHelper(NDList labels, NDList predictions);
/** {@inheritDoc} */
@Override
public NDArray evaluate(NDList labels, NDList predictions) {
return accuracyHelper(labels, predictions).getValue();
}
/** {@inheritDoc} */
@Override
public void addAccumulator(String key) {
totalInstances.put(key, 0L);
correctInstances.put(key, 0L);
}
/** {@inheritDoc} */
@Override
public void updateAccumulator(String key, NDList labels, NDList predictions) {
updateAccumulators(new String[] {key}, labels, predictions);
}
/** {@inheritDoc} */
@Override
public void updateAccumulators(String[] keys, NDList labels, NDList predictions) {
Pair<Long, NDArray> update = accuracyHelper(labels, predictions);
NDArray value = update.getValue();
NDArray sum = value.sum();
long correct = sum.getLong();
for (String key : keys) {
totalInstances.compute(key, (k, v) -> v + update.getKey());
correctInstances.compute(key, (k, v) -> v + correct);
}
value.close();
sum.close();
}
/** {@inheritDoc} */
@Override
public void resetAccumulator(String key) {
totalInstances.compute(key, (k, v) -> 0L);
correctInstances.compute(key, (k, v) -> 0L);
}
/** {@inheritDoc} */
@Override
public float getAccumulator(String key) {
Long total = totalInstances.get(key);
if (total == null || total == 0) {
return Float.NaN;
}
return (float) correctInstances.get(key) / totalInstances.get(key);
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/evaluator/Accuracy.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.evaluator;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.DataType;
import ai.djl.util.Pair;
/** {@link Accuracy} is the {@link AbstractAccuracy} with multiple classes. */
public class Accuracy extends AbstractAccuracy {
/**
* Creates a multiclass accuracy evaluator that computes accuracy across axis 1 along the 0th
* index.
*/
public Accuracy() {
this("Accuracy", 1);
}
/**
* Creates a multiclass accuracy evaluator that computes accuracy across axis 1 along the 0th
* index.
*
* @param name the name of the evaluator, default is "Accuracy"
*/
public Accuracy(String name) {
this(name, 1);
}
/**
* Creates a multiclass accuracy evaluator.
*
* @param name the name of the evaluator, default is "Accuracy"
* @param axis the axis that represent classes in prediction, default 1
*/
public Accuracy(String name, int axis) {
super(name, axis);
}
/** {@inheritDoc} */
@Override
protected Pair<Long, NDArray> accuracyHelper(NDList labels, NDList predictions) {
NDArray label = labels.head();
NDArray prediction = predictions.head();
checkLabelShapes(label, prediction);
NDArray predictionReduced;
if (!label.getShape().equals(prediction.getShape())) {
// Multi-class, sparse label
predictionReduced = prediction.argMax(axis);
predictionReduced = predictionReduced.reshape(label.getShape());
} else {
// Multi-class, one-hot label
predictionReduced = prediction.argMax(axis);
label = label.argMax(axis);
}
// result of sum is int64 now
long total = label.size();
try (NDArray nd = label.toType(DataType.INT64, true)) {
NDArray correct = predictionReduced.toType(DataType.INT64, false).eq(nd).countNonzero();
return new Pair<>(total, correct);
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/evaluator/BinaryAccuracy.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.evaluator;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.DataType;
import ai.djl.util.Pair;
import ai.djl.util.Preconditions;
/**
* {@link BinaryAccuracy} is the {@link AbstractAccuracy} with two classes.
*
* <p>It is assumed that the classes are identified with a labels array of 0s and 1s and a
* prediction array where values above the threshold are the positive (1) examples and values below
* the threshold are the negative (0) examples. If you have a different encoding, you may want to
* look at the {@link Accuracy}.
*/
public class BinaryAccuracy extends AbstractAccuracy {
float threshold;
/**
* Creates a binary (two class) accuracy evaluator.
*
* @param name the name of the evaluator, default is "Accuracy"
* @param threshold the value differentiating the posive and negative classes (usually 0 or .5)
* @param axis the axis that represent classes in prediction, default 1
*/
public BinaryAccuracy(String name, float threshold, int axis) {
super(name, axis);
this.threshold = threshold;
}
/**
* Creates a binary (two class) accuracy evaluator that computes accuracy across axis 1 along
* given index.
*
* @param name the name of the evaluator, default is "Accuracy"
* @param threshold the value differentiating the posive and negative classes (usually 0 or .5)
*/
public BinaryAccuracy(String name, float threshold) {
this(name, threshold, 1);
}
/**
* Creates a binary (two class) accuracy evaluator that computes accuracy across axis 1 along
* the 0th index.
*
* @param threshold the value differentiating the posive and negative classes (usually 0 or .5)
*/
public BinaryAccuracy(float threshold) {
this("BinaryAccuracy", threshold, 1);
}
/** Creates a binary (two class) accuracy evaluator with 0 threshold. */
public BinaryAccuracy() {
this(0);
}
/** {@inheritDoc} */
@Override
protected Pair<Long, NDArray> accuracyHelper(NDList labels, NDList predictions) {
Preconditions.checkArgument(
labels.size() == predictions.size(),
"labels and prediction length does not match.");
NDArray label = labels.head();
NDArray prediction = predictions.head();
checkLabelShapes(label, prediction, false);
NDArray predictionReduced = prediction.gte(threshold);
// result of sum is int64 now
long total = label.size();
NDArray correct =
label.toType(DataType.INT64, false)
.eq(predictionReduced.toType(DataType.INT64, false))
.countNonzero();
return new Pair<>(total, correct);
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/evaluator/BoundingBoxError.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.evaluator;
import ai.djl.modality.cv.MultiBoxTarget;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
/**
* {@code BoundingBoxError} is an {@link Evaluator} that computes the error in the prediction of
* bounding boxes in SingleShotDetection model.
*/
public class BoundingBoxError extends Evaluator {
private Map<String, Float> ssdBoxPredictionError;
private MultiBoxTarget multiBoxTarget = MultiBoxTarget.builder().build();
/**
* Creates an BoundingBoxError evaluator.
*
* @param name the name of the evaluator
*/
public BoundingBoxError(String name) {
super(name);
ssdBoxPredictionError = new ConcurrentHashMap<>();
}
/** {@inheritDoc} */
@Override
public NDArray evaluate(NDList labels, NDList predictions) {
NDArray anchors = predictions.get(0);
NDArray classPredictions = predictions.get(1);
NDArray boundingBoxPredictions = predictions.get(2);
NDList targets =
multiBoxTarget.target(
new NDList(anchors, labels.head(), classPredictions.transpose(0, 2, 1)));
NDArray boundingBoxLabels = targets.get(0);
NDArray boundingBoxMasks = targets.get(1);
return boundingBoxLabels.sub(boundingBoxPredictions).mul(boundingBoxMasks).abs();
}
/** {@inheritDoc} */
@Override
public void addAccumulator(String key) {
totalInstances.put(key, 0L);
ssdBoxPredictionError.put(key, 0f);
}
/** {@inheritDoc} */
@Override
public void updateAccumulator(String key, NDList labels, NDList predictions) {
updateAccumulators(new String[] {key}, labels, predictions);
}
/** {@inheritDoc} */
@Override
public void updateAccumulators(String[] keys, NDList labels, NDList predictions) {
NDArray boundingBoxError = evaluate(labels, predictions);
float update = boundingBoxError.sum().getFloat();
for (String key : keys) {
totalInstances.compute(key, (k, v) -> v + boundingBoxError.size());
ssdBoxPredictionError.compute(key, (k, v) -> v + update);
}
}
/** {@inheritDoc} */
@Override
public void resetAccumulator(String key) {
totalInstances.compute(key, (k, v) -> 0L);
ssdBoxPredictionError.compute(key, (k, v) -> 0f);
}
/** {@inheritDoc} */
@Override
public float getAccumulator(String key) {
Long total = totalInstances.get(key);
Objects.requireNonNull(total, "No evaluator found at that path");
if (total == 0) {
return Float.NaN;
}
return ssdBoxPredictionError.get(key) / totalInstances.get(key);
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/evaluator/Coverage.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.evaluator;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.util.Pair;
/**
* Coverage for a Regression problem: it measures the percent of predictions greater than the actual
* target, to determine whether the predictor is over-forecasting or under-forecasting. e.g. 0.50 if
* we predict near the median of the distribution.
*
* <pre>
* def coverage(target, forecast):
* return (np.mean((target < forecast)))
* </pre>
*
* <a href="https://bibinmjose.github.io/2021/03/08/errorblog.html">...</a>
*/
public class Coverage extends AbstractAccuracy {
/**
* Creates an evaluator that measures the percent of predictions greater than the actual target.
*/
public Coverage() {
this("Coverage", 1);
}
/**
* Creates an evaluator that measures the percent of predictions greater than the actual target.
*
* @param name the name of the evaluator, default is "Coverage"
* @param axis the axis along which to count the correct prediction, default is 1
*/
public Coverage(String name, int axis) {
super(name, axis);
}
/** {@inheritDoc} */
@Override
protected Pair<Long, NDArray> accuracyHelper(NDList labels, NDList predictions) {
NDArray labl = labels.head();
NDArray pred = predictions.head();
return new Pair<>(labl.size(), labl.lt(pred));
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/evaluator/Evaluator.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.evaluator;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* Base class for all {@code Evaluator}s that can be used to evaluate the performance of a model.
*
* <p>The {@code Evaluator}s can all be monitored to make an assessment about the performance of the
* model. However, only ones that further extend {@link ai.djl.training.loss.Loss} are suited to
* being used to directly optimize a model.
*
* <p>In addition to computing the evaluation, an evaluator can accumulate values to compute a total
* evaluation. For different purposes, it is possible to have multiple accumulators on a single
* evaluator. Each accumulator must be added with a String key to identify the accumulator. Before
* using an accumulator, you must {@link Evaluator#addAccumulator(String)}. Then, call {@link
* Evaluator#updateAccumulator(String, NDList, NDList)} to add more data to the accumulator. You can
* use {@link Evaluator#getAccumulator(String)} to retrieve the accumulated value and {@link
* Evaluator#resetAccumulator(String)} to reset the accumulator to the same value as when just
* added.
*/
public abstract class Evaluator {
private String name;
protected Map<String, Long> totalInstances;
/**
* Creates an evaluator with abstract update methods.
*
* @param name the name of the evaluator
*/
public Evaluator(String name) {
this.name = name;
totalInstances = new ConcurrentHashMap<>();
}
/**
* Returns the name of this {@code Evaluator}.
*
* @return the name of this {@code Evaluator}
*/
public String getName() {
return name;
}
/**
* Calculates the evaluation between the labels and the predictions.
*
* @param labels the correct values
* @param predictions the predicted values
* @return the evaluation result
*/
public abstract NDArray evaluate(NDList labels, NDList predictions);
/**
* Adds an accumulator for the results of the evaluation with the given key.
*
* @param key the key for the new accumulator
*/
public abstract void addAccumulator(String key);
/**
* Updates the evaluator with the given keys based on a {@link NDList} of labels and
* predictions.
*
* <p>This is a synchronized operation. You should only call it at the end of a batch or epoch.
*
* <p>This is an alternative to @{link {@link #updateAccumulator(String, NDList, NDList)}} that
* may be more efficient when updating multiple accumulators at once.
*
* @param keys the keys of all the accumulators to update
* @param labels a {@code NDList} of labels
* @param predictions a {@code NDList} of predictions
*/
public void updateAccumulators(String[] keys, NDList labels, NDList predictions) {
for (String key : keys) {
updateAccumulator(key, labels, predictions);
}
}
/**
* Updates the evaluator with the given key based on a {@link NDList} of labels and predictions.
*
* <p>This is a synchronized operation. You should only call it at the end of a batch or epoch.
*
* @param key the key of the accumulator to update
* @param labels a {@code NDList} of labels
* @param predictions a {@code NDList} of predictions
*/
public abstract void updateAccumulator(String key, NDList labels, NDList predictions);
/**
* Resets the evaluator value with the given key.
*
* @param key the key of the accumulator to reset
*/
public abstract void resetAccumulator(String key);
/**
* Returns the accumulated evaluator value.
*
* @param key the key of the accumulator to get
* @return the accumulated value
* @throws IllegalArgumentException if no accumulator was added with the given key
*/
public abstract float getAccumulator(String key);
/**
* Checks if the two input {@code NDArray} have the same length or shape.
*
* @param labels a {@code NDArray} of labels
* @param predictions a {@code NDArray} of predictions
* @param checkDimOnly whether to check for first dimension only
*/
protected void checkLabelShapes(NDArray labels, NDArray predictions, boolean checkDimOnly) {
if (labels.getShape().get(0) != predictions.getShape().get(0)) {
throw new IllegalArgumentException(
"The size of labels("
+ labels.size()
+ ") does not match that of predictions("
+ predictions.size()
+ ")");
}
if (!checkDimOnly) {
if (!labels.getShape().equals(predictions.getShape())) {
throw new IllegalArgumentException(
"The shape of labels("
+ labels.getShape()
+ ") does not match that of predictions("
+ predictions.getShape()
+ ")");
}
}
}
/**
* Checks the length of NDArrays.
*
* @param labels a {@code NDArray} of labels
* @param predictions a {@code NDArray} of predictions
*/
protected void checkLabelShapes(NDArray labels, NDArray predictions) {
checkLabelShapes(labels, predictions, true);
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/evaluator/IndexEvaluator.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.evaluator;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
/**
* A wrapper for an {@link Evaluator} that evaluates on only a particular {@link NDArray} in the
* predictions and/or labels {@link NDList}s.
*/
public class IndexEvaluator extends Evaluator {
private Evaluator evaluator;
private Integer predictionsIndex;
private Integer labelsIndex;
/**
* Constructs an {@link IndexEvaluator} with the same index for both predictions and labels.
*
* @param evaluator the base evaluator
* @param index the index for both predictions and labels
*/
public IndexEvaluator(Evaluator evaluator, int index) {
this(evaluator, index, index);
}
/**
* Constructs an {@link IndexEvaluator}.
*
* @param evaluator the base evaluator
* @param predictionsIndex the predictions index
* @param labelsIndex the labels index
*/
public IndexEvaluator(Evaluator evaluator, Integer predictionsIndex, Integer labelsIndex) {
super(evaluator.getName());
this.evaluator = evaluator;
this.predictionsIndex = predictionsIndex;
this.labelsIndex = labelsIndex;
}
/** {@inheritDoc} */
@Override
public NDArray evaluate(NDList labels, NDList predictions) {
return evaluator.evaluate(getLabels(labels), getPredictions(predictions));
}
/** {@inheritDoc} */
@Override
public void addAccumulator(String key) {
evaluator.addAccumulator(key);
}
/** {@inheritDoc} */
@Override
public void updateAccumulator(String key, NDList labels, NDList predictions) {
evaluator.updateAccumulator(key, getLabels(labels), getPredictions(predictions));
}
/** {@inheritDoc} */
@Override
public void updateAccumulators(String[] keys, NDList labels, NDList predictions) {
evaluator.updateAccumulators(keys, getLabels(labels), getPredictions(predictions));
}
/** {@inheritDoc} */
@Override
public void resetAccumulator(String key) {
evaluator.resetAccumulator(key);
}
/** {@inheritDoc} */
@Override
public float getAccumulator(String key) {
return evaluator.getAccumulator(key);
}
private NDList getPredictions(NDList predictions) {
if (predictionsIndex == null) {
return predictions;
}
return new NDList(predictions.get(predictionsIndex));
}
private NDList getLabels(NDList labels) {
if (labelsIndex == null) {
return labels;
}
return new NDList(labels.get(labelsIndex));
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/evaluator/SingleShotDetectionAccuracy.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.evaluator;
import ai.djl.modality.cv.MultiBoxTarget;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.DataType;
import ai.djl.util.Pair;
/**
* {@code SingleShotDetectionAccuracy} is an implementation of {@link AbstractAccuracy}. It is used
* while training a Single Shot Detection (SSD) model for object detection. It uses the targets
* computed by {@link MultiBoxTarget}, and computes the class prediction accuracy against the
* computed targets.
*/
public class SingleShotDetectionAccuracy extends AbstractAccuracy {
private MultiBoxTarget multiBoxTarget = MultiBoxTarget.builder().build();
/**
* Creates a new instance of {@link SingleShotDetectionAccuracy} with the given name.
*
* @param name the name given to the accuracy
*/
public SingleShotDetectionAccuracy(String name) {
super(name, 0);
}
/** {@inheritDoc} */
@Override
protected Pair<Long, NDArray> accuracyHelper(NDList labels, NDList predictions) {
NDArray anchors = predictions.get(0);
NDArray classPredictions = predictions.get(1);
NDList targets =
multiBoxTarget.target(
new NDList(anchors, labels.head(), classPredictions.transpose(0, 2, 1)));
NDArray classLabels = targets.get(2);
checkLabelShapes(classLabels, classPredictions);
NDArray predictionReduced = classPredictions.argMax(-1);
long total = classLabels.size();
NDArray numCorrect =
classLabels.toType(DataType.INT64, false).eq(predictionReduced).countNonzero();
return new Pair<>(total, numCorrect);
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/evaluator/TopKAccuracy.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.evaluator;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.DataType;
import ai.djl.util.Pair;
import java.util.stream.IntStream;
/**
* {@code TopKAccuracy} is an {@link Evaluator} that computes the accuracy of the top k predictions.
*
* <p>{@code TopKAccuracy} differs from {@link AbstractAccuracy} in that it considers the prediction
* to be `True` as long as the ground truth label is in the top K predicated labels. If `top_k = 1`,
* then {@code TopKAccuracy} is identical to {@code Accuracy}.
*/
public class TopKAccuracy extends AbstractAccuracy {
private int topK;
/**
* Creates a {@code TopKAccuracy} instance.
*
* @param name the accuracy name, default "Top_K_Accuracy"
* @param topK the value of K
*/
public TopKAccuracy(String name, int topK) {
super(name);
if (topK > 1) {
this.topK = topK;
} else {
throw new IllegalArgumentException("Please use TopKAccuracy with topK more than 1");
}
}
/**
* Creates an instance of {@code TopKAccuracy} evaluator that computes topK accuracy across axis
* 1 along the 0th index.
*
* @param topK the value of K
*/
public TopKAccuracy(int topK) {
this("Top_" + topK + "_Accuracy", topK);
}
/** {@inheritDoc} */
@Override
protected Pair<Long, NDArray> accuracyHelper(NDList labels, NDList predictions) {
NDArray label = labels.head();
NDArray prediction = predictions.head();
// number of labels and predictions should be the same
checkLabelShapes(label, prediction);
// ascending by default
NDArray topKPrediction = prediction.argSort(axis).toType(DataType.INT64, false);
int numDims = topKPrediction.getShape().dimension();
NDArray numCorrect;
if (numDims == 1) {
numCorrect = topKPrediction.flatten().eq(label.flatten()).countNonzero();
} else if (numDims == 2) {
int numClasses = (int) topKPrediction.getShape().get(1);
topK = Math.min(topK, numClasses);
numCorrect =
NDArrays.add(
IntStream.range(0, topK)
.mapToObj(
j -> {
// get from last index as argSort is ascending
NDArray jPrediction =
topKPrediction.get(
":, {}", numClasses - j - 1);
return jPrediction
.flatten()
.eq(
label.flatten()
.toType(
DataType.INT64,
false))
.countNonzero();
})
.toArray(NDArray[]::new));
} else {
throw new IllegalArgumentException("Prediction should be less than 2 dimensions");
}
long total = label.getShape().get(0);
return new Pair<>(total, numCorrect);
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/evaluator/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains classes for evaluating the effectiveness of models.
*
* <p>It contains a main interface {@link ai.djl.training.evaluator.Evaluator} and various
* evaluators that extend it. More evaluators are located within {@link ai.djl.training.loss} which
* have the additional property that those evaluators are suited for training.
*/
package ai.djl.training.evaluator;
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter/EasyHpo.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.hyperparameter;
import ai.djl.Model;
import ai.djl.metric.Metrics;
import ai.djl.ndarray.types.Shape;
import ai.djl.training.EasyTrain;
import ai.djl.training.Trainer;
import ai.djl.training.TrainingConfig;
import ai.djl.training.TrainingResult;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.dataset.RandomAccessDataset;
import ai.djl.training.hyperparameter.optimizer.HpORandom;
import ai.djl.training.hyperparameter.optimizer.HpOptimizer;
import ai.djl.training.hyperparameter.param.HpSet;
import ai.djl.translate.TranslateException;
import ai.djl.util.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
/** Helper for easy training with hyperparameters. */
public abstract class EasyHpo {
private static final Logger logger = LoggerFactory.getLogger(EasyHpo.class);
/**
* Fits the model given the implemented abstract methods.
*
* @return the best model and training results
* @throws IOException for various exceptions depending on the dataset
* @throws TranslateException if there is an error while processing input
*/
public Pair<Model, TrainingResult> fit() throws IOException, TranslateException {
// get training and validation dataset
RandomAccessDataset trainingSet = getDataset(Dataset.Usage.TRAIN);
RandomAccessDataset validateSet = getDataset(Dataset.Usage.TEST);
HpSet hyperParams = setupHyperParams();
HpOptimizer hpOptimizer = new HpORandom(hyperParams);
final int hyperparameterTests = numHyperParameterTests();
for (int i = 0; i < hyperparameterTests; i++) {
HpSet hpVals = hpOptimizer.nextConfig();
Pair<Model, TrainingResult> trained = train(hpVals, trainingSet, validateSet);
trained.getKey().close();
float loss = trained.getValue().getValidateLoss();
hpOptimizer.update(hpVals, loss);
logger.info(
"--------- hp test {}/{} - Loss {} - {}", i, hyperparameterTests, loss, hpVals);
}
HpSet bestHpVals = hpOptimizer.getBest().getKey();
Pair<Model, TrainingResult> trained = train(bestHpVals, trainingSet, validateSet);
TrainingResult result = trained.getValue();
Model model = trained.getKey();
saveModel(model, result);
return trained;
}
private Pair<Model, TrainingResult> train(
HpSet hpVals, RandomAccessDataset trainingSet, RandomAccessDataset validateSet)
throws IOException, TranslateException {
// Construct neural network
Model model = buildModel(hpVals);
// setup training configuration
TrainingConfig config = setupTrainingConfig(hpVals);
try (Trainer trainer = model.newTrainer(config)) {
trainer.setMetrics(new Metrics());
// initialize trainer with proper input shape
trainer.initialize(inputShape(hpVals));
EasyTrain.fit(trainer, numEpochs(hpVals), trainingSet, validateSet);
TrainingResult result = trainer.getTrainingResult();
return new Pair<>(model, result);
}
}
/**
* Returns the initial hyperparameters.
*
* @return the initial hyperparameters
*/
protected abstract HpSet setupHyperParams();
/**
* Returns the dataset to train with.
*
* @param usage the usage of the dataset
* @return the dataset to train with
* @throws IOException if the dataset could not be loaded
*/
protected abstract RandomAccessDataset getDataset(Dataset.Usage usage) throws IOException;
/**
* Returns the {@link ai.djl.training.TrainingConfig} to use to train each hyperparameter set.
*
* @param hpVals the hyperparameters to train with
* @return the {@link ai.djl.training.TrainingConfig} to use to train each hyperparameter set
*/
protected abstract TrainingConfig setupTrainingConfig(HpSet hpVals);
/**
* Builds the {@link Model} and {@link ai.djl.nn.Block} to train.
*
* @param hpVals the hyperparameter values to use for the model
* @return the model to train
*/
protected abstract Model buildModel(HpSet hpVals);
/**
* Returns the input shape for the model.
*
* @param hpVals the hyperparameter values for the model
* @return returns the model input shape
*/
protected abstract Shape inputShape(HpSet hpVals);
/**
* Returns the number of epochs to train for the current hyperparameter set.
*
* @param hpVals the current hyperparameter set
* @return the number of epochs
*/
protected abstract int numEpochs(HpSet hpVals);
/**
* Returns the number of hyperparameter sets to train with.
*
* @return the number of hyperparameter sets to train with
*/
protected abstract int numHyperParameterTests();
/**
* Saves the best hyperparameter set.
*
* @param model the model to save
* @param result the training result for training with this model's hyperparameters
* @throws IOException if the model could not be saved
*/
protected void saveModel(Model model, TrainingResult result) throws IOException {}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter/package-info.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains utilities to train, describe, and manipulate {@link
* ai.djl.training.hyperparameter.param.Hyperparameter}s.
*/
package ai.djl.training.hyperparameter;
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter/optimizer/BaseHpOptimizer.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.hyperparameter.optimizer;
import ai.djl.training.hyperparameter.param.HpSet;
import ai.djl.util.Pair;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Map.Entry;
/**
* A base containing shared implementations for {@link HpOptimizer}s.
*
* @see HpOptimizer
*/
public abstract class BaseHpOptimizer implements HpOptimizer {
protected HpSet hyperParams;
protected Map<HpSet, Float> results;
/**
* Constructs a {@link BaseHpOptimizer}.
*
* @param hyperParams the set of hyperparameters
*/
public BaseHpOptimizer(HpSet hyperParams) {
this.hyperParams = hyperParams;
results = new LinkedHashMap<>();
}
/** {@inheritDoc} */
@Override
public void update(HpSet config, float loss) {
results.compute(config, (k, oldLoss) -> oldLoss != null ? Math.max(oldLoss, loss) : loss);
}
/** {@inheritDoc} */
@Override
public float getLoss(HpSet config) {
return results.get(config);
}
/** {@inheritDoc} */
@Override
public Pair<HpSet, Float> getBest() {
Entry<HpSet, Float> entry =
Collections.min(
results.entrySet(),
(e1, e2) -> Float.compare(e1.getValue(), e2.getValue()));
return new Pair<>(entry.getKey(), entry.getValue());
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter/optimizer/HpORandom.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.hyperparameter.optimizer;
import ai.djl.training.hyperparameter.param.HpSet;
/** A simple {@link HpOptimizer} that tries random hyperparameter choices within the range. */
public class HpORandom extends BaseHpOptimizer {
/**
* Constructs a new {@link HpORandom}.
*
* @param hyperParams the hyperparameters to test with
*/
public HpORandom(HpSet hyperParams) {
super(hyperParams);
}
/** {@inheritDoc} */
@Override
public HpSet nextConfig() {
return hyperParams.random();
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter/optimizer/HpOptimizer.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.hyperparameter.optimizer;
import ai.djl.training.hyperparameter.param.HpSet;
import ai.djl.training.hyperparameter.param.Hyperparameter;
import ai.djl.util.Pair;
/**
* An optimizer for {@link Hyperparameter}s.
*
* @see Hyperparameter
*/
public interface HpOptimizer {
/**
* Returns the next hyperparameters to test.
*
* @return the hyperparameters to test
*/
HpSet nextConfig();
/**
* Updates the optimizer with the results of a hyperparameter test.
*
* @param config the tested hyperparameters
* @param loss the <b>validation</b> loss from training with those hyperparameters
*/
void update(HpSet config, float loss);
/**
* Returns the recorded loss.
*
* @param config the hyperparameters that were trained with
* @return the loss
* @throws java.util.NoSuchElementException if the hyperparameters were not trained with before
*/
float getLoss(HpSet config);
/**
* Returns the best hyperparameters and loss.
*
* @return the best hyperparameters and loss
*/
Pair<HpSet, Float> getBest();
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter/optimizer/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains classes to optimize {@link ai.djl.training.hyperparameter.param.Hyperparameter}s.
*
* <p>Contains an interface {@link ai.djl.training.hyperparameter.optimizer.HpOptimizer}, a base
* implementation {@link ai.djl.training.hyperparameter.optimizer.BaseHpOptimizer}, and various
* implementations extending them.
*/
package ai.djl.training.hyperparameter.optimizer;
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter/param/HpBool.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.hyperparameter.param;
import java.util.Arrays;
/** A {@link Hyperparameter} for a boolean option. */
public class HpBool extends HpCategorical<Boolean> {
/**
* Constructs a {@link HpBool}.
*
* @param name the name of the hyperparameter
*/
public HpBool(String name) {
super(name, Arrays.asList(false, true));
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter/param/HpCategorical.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.hyperparameter.param;
import ai.djl.util.RandomUtils;
import java.util.List;
/**
* A {@link Hyperparameter} which is one of a fixed number of options (similar to an enum).
*
* @param <T> the type of the options
*/
public class HpCategorical<T> extends Hyperparameter<T> {
private List<T> categories;
/**
* Constructs a {@link HpCategorical}.
*
* @param name the name of the hyperparameters
* @param categories the valid values for the hyperparameter
*/
public HpCategorical(String name, List<T> categories) {
super(name);
this.categories = categories;
}
/** {@inheritDoc} */
@Override
public T random() {
int index = RandomUtils.nextInt(categories.size());
return categories.get(index);
}
/** {@inheritDoc} */
@Override
public String toString() {
return "HPCategorical{" + "categories=" + categories + ", name='" + name + '\'' + '}';
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter/param/HpFloat.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.hyperparameter.param;
import ai.djl.util.RandomUtils;
/** A {@link Hyperparameter} for a float. */
public class HpFloat extends Hyperparameter<Float> {
private float lower;
private float upper;
private boolean log;
/**
* Constructs a {@link HpFloat}.
*
* @param name the name of the hyperparameter
* @param lower the lower bound (inclusive)
* @param upper the upper bound (exclusive)
* @param log whether to use log space. This is useful if your bounds cover different orders of
* magnitude (e.g. 1E-5 to 1E-2) instead of same magnitude (e.g. 2 to 5).
*/
public HpFloat(String name, float lower, float upper, boolean log) {
super(name);
this.log = log;
this.lower = lower;
this.upper = upper;
}
/** {@inheritDoc} */
@Override
public Float random() {
if (log) {
float logLower = (float) Math.log(lower);
float logUpper = (float) Math.log(upper);
return (float) Math.exp(RandomUtils.nextFloat(logLower, logUpper));
} else {
return RandomUtils.nextFloat(lower, upper);
}
}
/** {@inheritDoc} */
@Override
public String toString() {
return "HPReal{"
+ "lower="
+ lower
+ ", upper="
+ upper
+ ", log="
+ log
+ ", name='"
+ name
+ '\''
+ '}';
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter/param/HpInt.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.hyperparameter.param;
import ai.djl.util.RandomUtils;
/** A {@link Hyperparameter} for an integer. */
public class HpInt extends Hyperparameter<Integer> {
int lower;
int upper;
/**
* Constructs a {@link HpInt}.
*
* @param name the name of the hyperparameter
* @param lower the lower bound (inclusive)
* @param upper the upper bound (exclusive)
*/
public HpInt(String name, int lower, int upper) {
super(name);
this.lower = lower;
this.upper = upper;
}
/** {@inheritDoc} */
@Override
public Integer random() {
int range = upper - lower;
return RandomUtils.nextInt(range) + lower;
}
/** {@inheritDoc} */
@Override
public String toString() {
return "HPInt{" + "lower=" + lower + ", upper=" + upper + ", name='" + name + '\'' + '}';
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter/param/HpSet.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.hyperparameter.param;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/** A nestable set of {@link Hyperparameter}s. */
public final class HpSet extends Hyperparameter<HpSet> {
private Map<String, Hyperparameter<?>> hyperParams;
/**
* Cosntructs a new {@link HpSet}.
*
* @param name the name of the hyperparameter set
* @param hyperParams the included hyperparameters in the set
*/
public HpSet(String name, List<Hyperparameter<?>> hyperParams) {
super(name);
this.hyperParams = new ConcurrentHashMap<>();
for (Hyperparameter<?> hparam : hyperParams) {
add(hparam);
}
}
/**
* Cosntructs a new empty {@link HpSet}.
*
* @param name the name of the hyperparameter set
*/
public HpSet(String name) {
super(name);
hyperParams = new ConcurrentHashMap<>();
}
/**
* Adds a hyperparameter to the set.
*
* @param hparam the hyperparameter to add
*/
public void add(Hyperparameter<?> hparam) {
hyperParams.put(hparam.getName(), hparam);
}
/**
* Returns the hyperparameter in the set with the given name.
*
* @param name the name of the hyperparameter to return
* @return the hyperparameter
*/
public Hyperparameter<?> getHParam(String name) {
return hyperParams.get(name);
}
/** {@inheritDoc} */
@Override
public HpSet random() {
HpSet rand = new HpSet(name);
for (Hyperparameter<?> hparam : hyperParams.values()) {
rand.add(new HpVal<>(hparam.getName(), hparam.random()));
}
return rand;
}
/** {@inheritDoc} */
@Override
public String toString() {
return "HPSet{" + "hyperParams=" + hyperParams + ", name='" + name + '\'' + '}';
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter/param/HpVal.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.hyperparameter.param;
/**
* A {@link Hyperparameter} with a known value instead of a range of possible values.
*
* <p>{@link HpVal}s and {@link HpSet}s of {@link HpVal}s are used to represent sampled
* hyperparameters.
*
* @param <T> the type of the value
*/
public class HpVal<T> extends Hyperparameter<T> {
T value;
/**
* Cosntructs a new {@link HpVal}.
*
* @param name the name of the hyperparameter
* @param value the fixed value of the hyperparameter
*/
public HpVal(String name, T value) {
super(name);
this.value = value;
}
/** {@inheritDoc} */
@Override
public T random() {
return value;
}
/** {@inheritDoc} */
@Override
public String toString() {
return "HPVal{" + "value=" + value + ", name='" + name + '\'' + '}';
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter/param/Hyperparameter.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.hyperparameter.param;
import ai.djl.training.hyperparameter.optimizer.HpOptimizer;
/**
* A class representing an input to the network that can't be differentiated.
*
* <p>Some hyperparameters include learning rates, network sizes and shapes, activation choices, and
* model selection. In order to evaluate a set of hyperparameters, the only way is to fully train
* your model using those choices of hyperparameters. So, the full training loop involves training
* the model a number of times using different choices of hyperparameters. This can be mostly
* automated by using a {@link HpOptimizer}.
*
* @param <T> the type of the hyperparameter
*/
public abstract class Hyperparameter<T> {
protected String name;
/**
* Constructs a hyperparameter with the given name.
*
* @param name the name of the hyperparameter
*/
public Hyperparameter(String name) {
this.name = name;
}
/**
* Returns the name of the hyperparameter.
*
* @return the name of the hyperparameter
*/
public String getName() {
return name;
}
/**
* Returns a random value for the hyperparameter for a range of a fixed value if it is a {@link
* HpVal}.
*
* @return a random value for the hyperparameter for a range of a fixed value if it is a {@link
* HpVal}
*/
public abstract T random();
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/hyperparameter/param/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains different types of {@link ai.djl.training.hyperparameter.param.Hyperparameter}s. */
package ai.djl.training.hyperparameter.param;
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/initializer/ConstantInitializer.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.initializer;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
/** Initializer that generates tensors with constant values. */
public class ConstantInitializer implements Initializer {
private float value;
/**
* Creates a Constant Initializer.
*
* @param value the value to fill
*/
public ConstantInitializer(float value) {
this.value = value;
}
/**
* Initializes a single {@link NDArray}.
*
* @param manager the {@link NDManager} to create the new {@link NDArray} in
* @param shape the {@link Shape} for the new NDArray
* @param dataType the {@link DataType} for the new NDArray
* @return the {@link NDArray} initialized with the manager and shape
*/
@Override
public NDArray initialize(NDManager manager, Shape shape, DataType dataType) {
return manager.full(shape, value, dataType);
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/initializer/Initializer.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.initializer;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Block;
/**
* An interface representing an initialization method.
*
* <p>Used to initialize the {@link NDArray} parameters stored within a {@link Block}.
*
* @see <a
* href="https://d2l.djl.ai/chapter_multilayer-perceptrons/numerical-stability-and-init.html">The
* D2L chapter on numerical stability and initialization</a>
*/
public interface Initializer {
Initializer ZEROS = (m, s, t) -> m.zeros(s, t, m.getDevice());
Initializer ONES = (m, s, t) -> m.ones(s, t, m.getDevice());
/**
* Initializes a single {@link NDArray}.
*
* @param manager the {@link NDManager} to create the new {@link NDArray} in
* @param shape the {@link Shape} for the new NDArray
* @param dataType the {@link DataType} for the new NDArray
* @return the {@link NDArray} initialized with the manager and shape
*/
NDArray initialize(NDManager manager, Shape shape, DataType dataType);
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/initializer/NormalInitializer.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.initializer;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
/**
* {@code NormalInitializer} initializes weights with random values sampled from a normal
* distribution with a mean of zero and standard deviation of {@code sigma}. Default standard
* deviation is 0.01.
*/
public class NormalInitializer implements Initializer {
private float sigma;
/** Creates an instance of {@code NormalInitializer} with a default sigma of 0.01. */
public NormalInitializer() {
this.sigma = 0.01f;
}
/**
* Creates a Normal initializer.
*
* @param sigma the standard deviation of the normal distribution
*/
public NormalInitializer(float sigma) {
this.sigma = sigma;
}
/** {@inheritDoc} */
@Override
public NDArray initialize(NDManager manager, Shape shape, DataType dataType) {
return manager.randomNormal(0.0f, sigma, shape, dataType, manager.getDevice());
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/initializer/TruncatedNormalInitializer.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.initializer;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.index.NDIndex;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
/**
* Naive implementation of a truncated normal initializer. Simply samples from a normal distribution
* and throws away anything outside two standard deviations.
*
* @see <a
* href="https://en.wikipedia.org/wiki/Truncated_normal_distribution">https://en.wikipedia.org/wiki/Truncated_normal_distribution</a>
*/
@SuppressWarnings("unused")
public class TruncatedNormalInitializer implements Initializer {
private final float sigma;
/** Creates an instance of {@code TruncatedNormalInitializer} with a default sigma of 0.01. */
public TruncatedNormalInitializer() {
this(0.01f);
}
/**
* Creates a TruncatedNormalInitializer initializer.
*
* @param sigma the standard deviation of the truncated normal distribution. Values outside
* (-2σ, 2σ) will be rejected.
*/
public TruncatedNormalInitializer(final float sigma) {
this.sigma = sigma;
}
/** {@inheritDoc} */
@Override
public NDArray initialize(
final NDManager baseManager, final Shape shape, final DataType dataType) {
long size = shape.size();
if (size < 0) {
throw new IllegalArgumentException("Shape is not determined.");
}
// We need to clean up intermediary arrays, so we perform all initialization in our own
// memory scope.
NDManager manager = baseManager.newSubManager();
// We start with an empty array to which we will concat non-rejected samples
NDArray result = manager.create(new float[] {}, new Shape(0));
// We keep count of the steps - this should normally take only up to three steps
// (almost always only one), we need to stop if we have too many steps as something
// would be seriously wrong then
int steps = 0;
NDArray lowerBound = manager.create(-2f * sigma);
NDArray upperBound = manager.create(2f * sigma);
// Repeat until enough samples are within the truncated normal distribution
while (result.size() < size) {
// We create more samples than we need, as we have to discard some.
// 95,45 % of samples are expected to fit, so we create 10% more - that will most
// likely by enough so we have our result in one go.
long samplesToCreate = (long) ((size - result.size()) * 1.1);
// Create normal distribution
final NDArray normalDistribution =
manager.randomNormal(
0.0f, sigma, new Shape(samplesToCreate), dataType, manager.getDevice());
// Create bitmask for all elements that are inside 2σ
final NDArray larger2Sigma = normalDistribution.gt(lowerBound);
final NDArray smaller2Sigma = normalDistribution.lt(upperBound);
final NDArray withinBounds = larger2Sigma.logicalAnd(smaller2Sigma);
// Select elements that fit criteria
final NDArray truncatedNormalDistribution = normalDistribution.get(withinBounds);
// Concat to result
final NDArray newResult = result.concat(truncatedNormalDistribution);
result = newResult;
steps++;
if (steps > 10) {
throw new IllegalStateException(
"Initialization of truncated normal takes too long - This is incredibly "
+ "unlikely, something must be seriously wrong.");
}
}
// truncate superfluous values
result = result.get(new NDIndex().addSliceDim(0, size));
// reshape to target size
result = result.reshape(shape);
result.attach(baseManager);
manager.close();
// done!
return result;
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/initializer/UniformInitializer.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.initializer;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
/**
* {@code UniformInitializer} initializes weights with random values uniformly sampled from a given
* range.
*/
public class UniformInitializer implements Initializer {
private float scale;
/** Creates an instance of {@code UniformInitializer} with a default {@code scale} of 0.07. */
public UniformInitializer() {
this.scale = 0.07f;
}
/**
* Initializes a uniform initializer.
*
* @param scale the bound on the range of the generated random values. Values are generated from
* the range [-`scale`, `scale`]. Default scale is 0.07.
*/
public UniformInitializer(float scale) {
this.scale = scale;
}
/** {@inheritDoc} */
@Override
public NDArray initialize(NDManager manager, Shape shape, DataType dataType) {
return manager.randomUniform(-scale, scale, shape, dataType, manager.getDevice());
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/initializer/XavierInitializer.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.initializer;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
/**
* {@code XavierInitializer} is an {@link Initializer} that performs "Xavier" initialization for
* parameters. This initializer is designed to keep the scale of gradients roughly the same in all
* layers. It was originally defined in the paper <a
* href="http://proceedings.mlr.press/v9/glorot10a/glorot10a.pdf"><i>Understanding the difficulty of
* training deep feedforward neural networks</i></a>.
*
* <p>{@code XavierInitializer} is specified by the type of random distribution({@link RandomType}),
* the factor type({@link FactorType}), and the magnitude of the scale. By default, {@link
* RandomType} is {@code UNIFORM} and {@link FactorType} is {@code AVG}. The initializer fills the
* weights with random numbers in the range of \([-c, c]\), where \(c = \sqrt{\frac{3.}{0.5 *
* (n_{in} + n_{out})}}\) where \(n_{in}\) is the number of neurons feeding into weights, and
* \(n_{out}\) is the number of neurons the result is fed to.
*
* <p>If {@link RandomType} is {@code UNIFORM} and {@link FactorType} is {@code IN}, then \(c =
* \sqrt{\frac{3.}{n_{in}}}\). Similarly when {@link FactorType} is {@code OUT}, then \(c =
* \sqrt{\frac{3.}{n_{out}}}\).
*
* <p>If {@link RandomType} is {@code GAUSSIAN} and {@link FactorType} is {@code AVG}, the
* initializer fills the weights with numbers from normal distribution with a standard deviation of
* \(\sqrt{\frac{3.}{0.5 * (n_{in} + n_{out})}}\).
*
* <p>Another common setting of the {@code XavierInitializer} is defined in the paper <a
* href="https://arxiv.org/abs/1502.01852"><i>Delving Deep into Rectifiers: Surpassing Human-Level
* Performance on ImageNet Classification</i></a>. These settings better handle non-linearity when
* preserving the variance across layers in a neural network. It can be initialized with {@code new
* XavierInitializer(RandomType.GAUSSIAN, FactorType.IN, 2))}.
*/
public class XavierInitializer implements Initializer {
/** Enum for different types of random distributions. */
public enum RandomType {
UNIFORM,
GAUSSIAN
}
/** Enum for different types of factor type. */
public enum FactorType {
AVG,
IN,
OUT
}
private RandomType randomType;
private FactorType factorType;
private float magnitude;
/**
* Initializes a Xavier initializer.
*
* @param randomType the random generator type, can be GAUSSIAN or UNIFORM
* @param factorType the factor type, can be one of AVG, IN, or OUT
* @param magnitude the scale of the random number
*/
public XavierInitializer(RandomType randomType, FactorType factorType, float magnitude) {
this.randomType = randomType;
this.factorType = factorType;
this.magnitude = magnitude;
}
/** Creates a new instance of {@code XavierInitializer}. */
public XavierInitializer() {
this(RandomType.UNIFORM, FactorType.AVG, 6f);
}
/** {@inheritDoc} */
@Override
public NDArray initialize(NDManager manager, Shape shape, DataType dataType) {
float hwScale;
long dimension = shape.dimension();
if (dimension < 2) {
throw new IllegalArgumentException(
"XavierInitializer cannot be applied to Shape with dimension: "
+ dimension
+ ", it requires shape to be at least 2D.");
} else if (dimension == 2) {
hwScale = 1.0f;
} else {
Shape shapeSliced = shape.slice(2);
hwScale = shapeSliced.size();
}
float fanIn = shape.get(1) * hwScale;
float fanOut = shape.head() * hwScale;
float factor;
switch (factorType) {
case AVG:
factor = (fanIn + fanOut) / 2.0f;
break;
case IN:
factor = fanIn;
break;
case OUT:
factor = fanOut;
break;
default:
throw new IllegalArgumentException(
"Invalid factor type, valid types are: avg, in, out");
}
if (factor == 0f) {
throw new IllegalStateException(
"Xavier initializer factor is 0, please check your input shape.");
}
float scale = (float) StrictMath.sqrt(magnitude / factor);
switch (randomType) {
case UNIFORM:
return manager.randomUniform(-scale, scale, shape, dataType, manager.getDevice());
case GAUSSIAN:
return manager.randomNormal(0f, scale, shape, dataType, manager.getDevice());
default:
throw new IllegalArgumentException("Invalid randomType");
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/initializer/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains classes for initializing a {@link ai.djl.nn.Block}'s {@link ai.djl.nn.Parameter}s.
*
* <p>It contains a main interface {@link ai.djl.training.initializer.Initializer} and various
* initializers that extend it.
*/
package ai.djl.training.initializer;
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/listener/DivergenceCheckTrainingListener.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.listener;
import ai.djl.TrainingDivergedException;
import ai.djl.training.Trainer;
import ai.djl.training.loss.Loss;
/** {@link TrainingListener} that gives early warning if your training has failed by divergence. */
public class DivergenceCheckTrainingListener extends TrainingListenerAdapter {
/** {@inheritDoc} */
@Override
public void onTrainingBatch(Trainer trainer, BatchData batchData) {
Loss trainingLoss = trainer.getLoss();
if (Float.isNaN(trainingLoss.getAccumulator(EvaluatorTrainingListener.TRAIN_ALL))) {
throw new TrainingDivergedException(
"The Loss became NaN, try reduce learning rate,add clipGradient option to your"
+ " optimizer, check input data and loss calculation.");
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/listener/EarlyStoppingListener.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.listener;
import ai.djl.training.Trainer;
import ai.djl.training.TrainingResult;
import java.time.Duration;
/**
* Listener that allows the training to be stopped early if the validation loss is not improving, or
* if time has expired. <br>
*
* <p>Usage: Add this listener to the training config, and add it as the last one.
*
* <pre>
* new DefaultTrainingConfig(...)
* .addTrainingListeners(EarlyStoppingListener.builder()
* .setEpochPatience(1)
* .setEarlyStopPctImprovement(1)
* .setMaxDuration(Duration.ofMinutes(42))
* .setMinEpochs(1)
* .build()
* );
* </pre>
*
* <p>Then surround the fit with a try catch that catches the {@link
* EarlyStoppingListener.EarlyStoppedException}. <br>
* Example:
*
* <pre>
* try {
* EasyTrain.fit(trainer, 5, trainDataset, testDataset);
* } catch (EarlyStoppingListener.EarlyStoppedException e) {
* // handle early stopping
* log.info("Stopped early at epoch {} because: {}", e.getEpoch(), e.getMessage());
* }
* </pre>
*
* <br>
* Note: Ensure that Metrics are set on the trainer.
*/
public final class EarlyStoppingListener implements TrainingListener {
private final double objectiveSuccess;
private final int minEpochs;
private final long maxMillis;
private final double earlyStopPctImprovement;
private final int epochPatience;
private long startTimeMills;
private double prevMetricValue;
private int numberOfEpochsWithoutImprovements;
private final String monitoredMetric;
private EarlyStoppingListener(
double objectiveSuccess,
int minEpochs,
long maxMillis,
double earlyStopPctImprovement,
int earlyStopPatience,
String monitoredMetric) {
this.objectiveSuccess = objectiveSuccess;
this.minEpochs = minEpochs;
this.maxMillis = maxMillis;
this.earlyStopPctImprovement = earlyStopPctImprovement;
this.epochPatience = earlyStopPatience;
this.monitoredMetric = monitoredMetric;
}
/** {@inheritDoc} */
@Override
public void onEpoch(Trainer trainer) {
int currentEpoch = trainer.getTrainingResult().getEpoch();
// stopping criteria
final double metricValue = getMetric(trainer.getTrainingResult());
if (currentEpoch >= minEpochs) {
if (metricValue < objectiveSuccess) {
throw new EarlyStoppedException(
currentEpoch,
String.format(
"validation loss %s < objectiveSuccess %s",
metricValue, objectiveSuccess));
}
long elapsedMillis = System.currentTimeMillis() - startTimeMills;
if (elapsedMillis >= maxMillis) {
throw new EarlyStoppedException(
currentEpoch,
String.format("%s ms elapsed >= %s maxMillis", elapsedMillis, maxMillis));
}
// consider early stopping?
if (Double.isFinite(prevMetricValue)) {
double goalImprovement = prevMetricValue * (100 - earlyStopPctImprovement) / 100.0;
boolean improved = metricValue <= goalImprovement; // false if any NANs
if (improved) {
numberOfEpochsWithoutImprovements = 0;
} else {
numberOfEpochsWithoutImprovements++;
if (numberOfEpochsWithoutImprovements >= epochPatience) {
throw new EarlyStoppedException(
currentEpoch,
String.format(
"failed to achieve %s%% improvement %s times in a row",
earlyStopPctImprovement, epochPatience));
}
}
}
}
if (Double.isFinite(metricValue)) {
prevMetricValue = metricValue;
}
}
private double getMetric(TrainingResult trainingResult) {
if ("validateLoss".equals(monitoredMetric)) {
Float vLoss = trainingResult.getValidateLoss();
return vLoss != null ? vLoss.doubleValue() : Double.NaN;
} else if ("trainLoss".equals(monitoredMetric)) {
Float tLoss = trainingResult.getTrainLoss();
return tLoss != null ? tLoss.doubleValue() : Double.NaN;
} else {
Float val = trainingResult.getEvaluations().get(monitoredMetric);
return val != null ? val.doubleValue() : Double.NaN;
}
}
/** {@inheritDoc} */
@Override
public void onTrainingBatch(Trainer trainer, BatchData batchData) {
// do nothing
}
/** {@inheritDoc} */
@Override
public void onValidationBatch(Trainer trainer, BatchData batchData) {
// do nothing
}
/** {@inheritDoc} */
@Override
public void onTrainingBegin(Trainer trainer) {
this.startTimeMills = System.currentTimeMillis();
this.prevMetricValue = Double.NaN;
this.numberOfEpochsWithoutImprovements = 0;
}
/** {@inheritDoc} */
@Override
public void onTrainingEnd(Trainer trainer) {
// do nothing
}
/**
* Creates a builder to build a {@link EarlyStoppingListener}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** A builder for a {@link EarlyStoppingListener}. */
public static final class Builder {
private final double objectiveSuccess;
private int minEpochs;
private long maxMillis;
private double earlyStopPctImprovement;
private int epochPatience;
private String monitoredMetric;
/** Constructs a {@link Builder} with default values. */
public Builder() {
this.objectiveSuccess = 0;
this.minEpochs = 0;
this.maxMillis = Long.MAX_VALUE;
this.earlyStopPctImprovement = 0;
this.epochPatience = 0;
this.monitoredMetric = "validateLoss";
}
/**
* Set the minimum # epochs, defaults to 0.
*
* @param minEpochs the minimum # epochs
* @return this builder
*/
public Builder optMinEpochs(int minEpochs) {
this.minEpochs = minEpochs;
return this;
}
/**
* Set the maximum duration a training run should take, defaults to Long.MAX_VALUE in ms.
*
* @param duration the maximum duration a training run should take
* @return this builder
*/
public Builder optMaxDuration(Duration duration) {
this.maxMillis = duration.toMillis();
return this;
}
/**
* Set the maximum # milliseconds a training run should take, defaults to Long.MAX_VALUE.
*
* @param maxMillis the maximum # milliseconds a training run should take
* @return this builder
*/
public Builder optMaxMillis(int maxMillis) {
this.maxMillis = maxMillis;
return this;
}
/**
* Consider early stopping if not x% improvement, defaults to 0.
*
* @param earlyStopPctImprovement the percentage improvement to consider early stopping,
* must be between 0 and 100.
* @return this builder
*/
public Builder optEarlyStopPctImprovement(double earlyStopPctImprovement) {
this.earlyStopPctImprovement = earlyStopPctImprovement;
return this;
}
/**
* Stop if insufficient improvement for x epochs in a row, defaults to 0.
*
* @param epochPatience the number of epochs without improvement to consider stopping, must
* be greater than 0.
* @return this builder
*/
public Builder optEpochPatience(int epochPatience) {
this.epochPatience = epochPatience;
return this;
}
/**
* Sets the name of the metric to monitor for early stopping.
*
* @param metricName the name of the metric (e.g., "validateLoss", "trainAccuracy", etc.)
* @return this builder instance
*/
public Builder optMonitoredMetric(String metricName) {
this.monitoredMetric = metricName;
return this;
}
/**
* Builds a {@link EarlyStoppingListener} with the specified values.
*
* @return a new {@link EarlyStoppingListener}
*/
public EarlyStoppingListener build() {
return new EarlyStoppingListener(
objectiveSuccess,
minEpochs,
maxMillis,
earlyStopPctImprovement,
epochPatience,
monitoredMetric);
}
}
/**
* Thrown when training is stopped early, the message will contain the reason why it is stopped
* early.
*/
public static class EarlyStoppedException extends RuntimeException {
private static final long serialVersionUID = 1L;
private final int stopEpoch;
/**
* Constructs an {@link EarlyStoppedException} with the specified message and epoch.
*
* @param stopEpoch the epoch at which training was stopped early
* @param message the message/reason why training was stopped early
*/
public EarlyStoppedException(int stopEpoch, String message) {
super(message);
this.stopEpoch = stopEpoch;
}
/**
* Gets the epoch at which training was stopped early.
*
* @return the epoch at which training was stopped early.
*/
public int getStopEpoch() {
return stopEpoch;
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/listener/EpochTrainingListener.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.listener;
import ai.djl.metric.Metrics;
import ai.djl.training.Trainer;
/**
* {@link EpochTrainingListener} that tracks epochs.
*
* <p>Adds "epoch" metric with epoch times and saves "epoch" model property with numEpochs
*/
public class EpochTrainingListener extends TrainingListenerAdapter {
private long epochTime;
private int numEpochs;
/** {@inheritDoc} */
@Override
public void onEpoch(Trainer trainer) {
Metrics metrics = trainer.getMetrics();
if (metrics != null) {
metrics.addMetric("epoch", System.nanoTime() - epochTime);
}
epochTime = System.nanoTime();
numEpochs++;
}
/** {@inheritDoc} */
@Override
public void onTrainingBegin(Trainer trainer) {
epochTime = System.nanoTime();
}
/** {@inheritDoc} */
@Override
public void onTrainingEnd(Trainer trainer) {
trainer.getModel().setProperty("Epoch", Integer.toString(numEpochs));
}
/**
* Returns the number of epochs.
*
* @return the number of epochs
*/
public int getNumEpochs() {
return numEpochs;
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/listener/EvaluatorTrainingListener.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.listener;
import ai.djl.Device;
import ai.djl.metric.Metrics;
import ai.djl.ndarray.NDList;
import ai.djl.training.Trainer;
import ai.djl.training.evaluator.Evaluator;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* {@link TrainingListener} that records evaluator results.
*
* <p>Results are recorded for the following stages:
*
* <ul>
* <li>{@link #TRAIN_EPOCH} - This accumulates for the whole epoch and is recorded to a metric at
* the end of the epoch
* <li>{@link #TRAIN_PROGRESS} - This accumulates for {@link #progressUpdateFrequency} batches and
* is recorded to a metric at the end
* <li>{@link #TRAIN_ALL} - This does not accumulates and records every training batch to a metric
* <li>{@link #VALIDATE_EPOCH} - This accumulates for the whole validation epoch and is recorded
* to a metric at the end of the epoch
* </ul>
*
* <p>The training and validation evaluators are saved as metrics with names that can be found using
* {@link EvaluatorTrainingListener#metricName(Evaluator, String)}. The validation evaluators are
* also saved as model properties with the evaluator name.
*/
public class EvaluatorTrainingListener extends TrainingListenerAdapter {
public static final String TRAIN_EPOCH = "train/epoch";
public static final String TRAIN_PROGRESS = "train/progress";
public static final String TRAIN_ALL = "train/all";
public static final String VALIDATE_EPOCH = "validate/epoch";
private int progressUpdateFrequency;
private int progressCounter;
private Map<String, Float> latestEvaluations;
/**
* Constructs an {@link EvaluatorTrainingListener} that updates the training progress the
* default frequency.
*
* <p>Current default frequency is every 5 batches.
*/
public EvaluatorTrainingListener() {
this(5);
}
/**
* Constructs an {@link EvaluatorTrainingListener} that updates the training progress the given
* frequency.
*
* @param progressUpdateFrequency the number of batches to accumulate an evaluator before it is
* stable enough to output
*/
public EvaluatorTrainingListener(int progressUpdateFrequency) {
this.progressUpdateFrequency = progressUpdateFrequency;
progressCounter = 0;
latestEvaluations = new ConcurrentHashMap<>();
}
/** {@inheritDoc} */
@Override
public void onEpoch(Trainer trainer) {
Metrics metrics = trainer.getMetrics();
for (Evaluator evaluator : trainer.getEvaluators()) {
float trainValue = evaluator.getAccumulator(TRAIN_EPOCH);
float validateValue = evaluator.getAccumulator(VALIDATE_EPOCH);
if (metrics != null) {
String key = metricName(evaluator, TRAIN_EPOCH);
metrics.addMetric(key, trainValue);
String validateKey = metricName(evaluator, VALIDATE_EPOCH);
metrics.addMetric(validateKey, validateValue);
}
latestEvaluations.put("train_" + evaluator.getName(), trainValue);
latestEvaluations.put("validate_" + evaluator.getName(), validateValue);
if (evaluator == trainer.getLoss()) {
latestEvaluations.put("train_loss", trainValue);
latestEvaluations.put("validate_loss", validateValue);
}
}
for (Evaluator evaluator : trainer.getEvaluators()) {
evaluator.resetAccumulator(TRAIN_EPOCH);
evaluator.resetAccumulator(TRAIN_PROGRESS);
evaluator.resetAccumulator(TRAIN_ALL);
evaluator.resetAccumulator(VALIDATE_EPOCH);
}
progressCounter = 0;
}
/** {@inheritDoc} */
@Override
public void onTrainingBatch(Trainer trainer, BatchData batchData) {
for (Evaluator evaluator : trainer.getEvaluators()) {
evaluator.resetAccumulator(TRAIN_ALL);
}
updateEvaluators(trainer, batchData, new String[] {TRAIN_EPOCH, TRAIN_PROGRESS, TRAIN_ALL});
Metrics metrics = trainer.getMetrics();
if (metrics != null) {
for (Evaluator evaluator : trainer.getEvaluators()) {
String key = metricName(evaluator, TRAIN_ALL);
float value = evaluator.getAccumulator(TRAIN_ALL);
metrics.addMetric(key, value);
}
progressCounter++;
if (progressCounter == progressUpdateFrequency) {
for (Evaluator evaluator : trainer.getEvaluators()) {
String key = metricName(evaluator, TRAIN_PROGRESS);
float value = evaluator.getAccumulator(TRAIN_PROGRESS);
metrics.addMetric(key, value);
}
progressCounter = 0;
}
}
}
/** {@inheritDoc} */
@Override
public void onValidationBatch(Trainer trainer, BatchData batchData) {
updateEvaluators(trainer, batchData, new String[] {VALIDATE_EPOCH});
}
private void updateEvaluators(Trainer trainer, BatchData batchData, String[] accumulators) {
for (Evaluator evaluator : trainer.getEvaluators()) {
for (Device device : batchData.getLabels().keySet()) {
NDList labels = batchData.getLabels().get(device);
NDList predictions = batchData.getPredictions().get(device);
evaluator.updateAccumulators(accumulators, labels, predictions);
}
}
}
/** {@inheritDoc} */
@Override
public void onTrainingBegin(Trainer trainer) {
for (Evaluator evaluator : trainer.getEvaluators()) {
evaluator.addAccumulator(TRAIN_EPOCH);
evaluator.addAccumulator(TRAIN_PROGRESS);
evaluator.addAccumulator(TRAIN_ALL);
evaluator.addAccumulator(VALIDATE_EPOCH);
}
}
/**
* Returns the metric created with the evaluator for the given stage.
*
* @param evaluator the evaluator to read the metric from
* @param stage one of {@link #TRAIN_EPOCH}, {@link #TRAIN_PROGRESS}, or {@link #VALIDATE_EPOCH}
* @return the metric name to use
*/
public static String metricName(Evaluator evaluator, String stage) {
switch (stage) {
case TRAIN_EPOCH:
return "train_epoch_" + evaluator.getName();
case TRAIN_PROGRESS:
return "train_progress_" + evaluator.getName();
case TRAIN_ALL:
return "train_all_" + evaluator.getName();
case VALIDATE_EPOCH:
return "validate_epoch_" + evaluator.getName();
default:
throw new IllegalArgumentException("Invalid metric stage");
}
}
/**
* Returns the latest evaluations.
*
* <p>The latest evaluations are updated on each epoch.
*
* @return the latest evaluations
*/
public Map<String, Float> getLatestEvaluations() {
return latestEvaluations;
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/listener/LoggingTrainingListener.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.listener;
import ai.djl.Device;
import ai.djl.engine.Engine;
import ai.djl.metric.Metrics;
import ai.djl.training.Trainer;
import ai.djl.training.evaluator.Evaluator;
import ai.djl.training.loss.Loss;
import ai.djl.training.util.ProgressBar;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
/**
* {@link TrainingListener} that outputs the progress of training each batch and epoch into logs.
*
* @see <a href="https://docs.djl.ai/master/docs/development/configure_logging.html">The guide on
* DJL logging</a>
*/
public class LoggingTrainingListener implements TrainingListener {
private static final Logger logger = LoggerFactory.getLogger(LoggingTrainingListener.class);
private int frequency;
private int numEpochs;
private ProgressBar trainingProgressBar;
private ProgressBar validateProgressBar;
/** Constructs a {@code LoggingTrainingListener} instance. */
public LoggingTrainingListener() {}
/**
* Constructs a {@code LoggingTrainingListener} instance with specified steps.
*
* <p>Print out logs every {@code frequency} epoch.
*
* @param frequency the frequency of epoch to print out
*/
public LoggingTrainingListener(int frequency) {
this.frequency = frequency;
}
/** {@inheritDoc} */
@Override
public void onEpoch(Trainer trainer) {
numEpochs++;
if (frequency > 1 && numEpochs % frequency != 1) {
return;
}
logger.info("Epoch {} finished.", numEpochs);
Metrics metrics = trainer.getMetrics();
if (metrics != null) {
Loss loss = trainer.getLoss();
String status =
getEvaluatorsStatus(
metrics,
trainer.getEvaluators(),
EvaluatorTrainingListener.TRAIN_EPOCH,
Short.MAX_VALUE);
logger.info("Train: {}", status);
String metricName =
EvaluatorTrainingListener.metricName(
loss, EvaluatorTrainingListener.VALIDATE_EPOCH);
if (metrics.hasMetric(metricName)) {
status =
getEvaluatorsStatus(
metrics,
trainer.getEvaluators(),
EvaluatorTrainingListener.VALIDATE_EPOCH,
Short.MAX_VALUE);
if (!status.isEmpty()) {
logger.info("Validate: {}", status);
}
} else {
logger.info("validation has not been run.");
}
}
}
/** {@inheritDoc} */
@Override
public void onTrainingBatch(Trainer trainer, BatchData batchData) {
if (frequency > 1 && numEpochs % frequency != 1) {
return;
}
if (trainingProgressBar == null) {
trainingProgressBar =
new ProgressBar("Training", batchData.getBatch().getProgressTotal());
}
trainingProgressBar.update(
batchData.getBatch().getProgress(),
getTrainingStatus(trainer, batchData.getBatch().getSize()));
}
private String getTrainingStatus(Trainer trainer, int batchSize) {
Metrics metrics = trainer.getMetrics();
if (metrics == null) {
return "";
}
StringBuilder sb = new StringBuilder();
sb.append(
getEvaluatorsStatus(
metrics,
trainer.getEvaluators(),
EvaluatorTrainingListener.TRAIN_PROGRESS,
2));
if (metrics.hasMetric("train")) {
float batchTime = metrics.latestMetric("train").getValue().longValue() / 1_000_000_000f;
sb.append(String.format(", speed: %.2f items/sec", batchSize / batchTime));
}
return sb.toString();
}
/** {@inheritDoc} */
@Override
public void onValidationBatch(Trainer trainer, BatchData batchData) {
if (frequency > 1 && numEpochs % frequency != 1) {
return;
}
if (validateProgressBar == null) {
validateProgressBar =
new ProgressBar("Validating", batchData.getBatch().getProgressTotal());
}
validateProgressBar.update(batchData.getBatch().getProgress());
}
/** {@inheritDoc} */
@Override
public void onTrainingBegin(Trainer trainer) {
String devicesMsg;
Device[] devices = trainer.getDevices();
if (devices.length == 1 && Device.Type.CPU.equals(devices[0].getDeviceType())) {
devicesMsg = Device.cpu().toString();
} else {
devicesMsg = devices.length + " GPUs";
}
logger.info("Training on: {}.", devicesMsg);
long init = System.nanoTime();
Engine engine = trainer.getManager().getEngine();
String engineName = engine.getEngineName();
String version = engine.getVersion();
long loaded = System.nanoTime();
logger.info(
String.format(
"Load %s Engine Version %s in %.3f ms.",
engineName, version, (loaded - init) / 1_000_000f));
}
/** {@inheritDoc} */
@Override
public void onTrainingEnd(Trainer trainer) {
Metrics metrics = trainer.getMetrics();
if (metrics == null) {
return;
}
float p50;
float p90;
if (metrics.hasMetric("train")) {
// possible no train metrics if only one iteration is executed
p50 = metrics.percentile("train", 50).getValue().longValue() / 1_000_000f;
p90 = metrics.percentile("train", 90).getValue().longValue() / 1_000_000f;
logger.info(String.format("train P50: %.3f ms, P90: %.3f ms", p50, p90));
}
if (metrics.hasMetric("forward")) {
p50 = metrics.percentile("forward", 50).getValue().longValue() / 1_000_000f;
p90 = metrics.percentile("forward", 90).getValue().longValue() / 1_000_000f;
logger.info(String.format("forward P50: %.3f ms, P90: %.3f ms", p50, p90));
}
if (metrics.hasMetric("training-metrics")) {
p50 = metrics.percentile("training-metrics", 50).getValue().longValue() / 1_000_000f;
p90 = metrics.percentile("training-metrics", 90).getValue().longValue() / 1_000_000f;
logger.info(String.format("training-metrics P50: %.3f ms, P90: %.3f ms", p50, p90));
}
if (metrics.hasMetric("backward")) {
p50 = metrics.percentile("backward", 50).getValue().longValue() / 1_000_000f;
p90 = metrics.percentile("backward", 90).getValue().longValue() / 1_000_000f;
logger.info(String.format("backward P50: %.3f ms, P90: %.3f ms", p50, p90));
}
if (metrics.hasMetric("step")) {
p50 = metrics.percentile("step", 50).getValue().longValue() / 1_000_000f;
p90 = metrics.percentile("step", 90).getValue().longValue() / 1_000_000f;
logger.info(String.format("step P50: %.3f ms, P90: %.3f ms", p50, p90));
}
if (metrics.hasMetric("epoch")) {
p50 = metrics.percentile("epoch", 50).getValue().longValue() / 1_000_000_000f;
p90 = metrics.percentile("epoch", 90).getValue().longValue() / 1_000_000_000f;
logger.info(String.format("epoch P50: %.3f s, P90: %.3f s", p50, p90));
}
}
private String getEvaluatorsStatus(
Metrics metrics, List<Evaluator> toOutput, String stage, int limit) {
List<String> metricOutputs = new ArrayList<>(limit + 1);
int count = 0;
for (Evaluator evaluator : toOutput) {
if (++count > limit) {
metricOutputs.add("...");
break;
}
String metricName = EvaluatorTrainingListener.metricName(evaluator, stage);
if (metrics.hasMetric(metricName)) {
float value = metrics.latestMetric(metricName).getValue().floatValue();
// use .2 precision to avoid new line in progress bar
String output;
if (Math.abs(value) < .01 || Math.abs(value) > 9999) {
output = String.format("%s: %.2E", evaluator.getName(), value);
} else if (metricName.startsWith("validate_") && Float.isNaN(value)) {
continue;
} else {
output = String.format("%s: %.2f", evaluator.getName(), value);
}
metricOutputs.add(output);
} else {
metricOutputs.add(String.format("%s: _", evaluator.getName()));
}
}
return String.join(", ", metricOutputs);
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/listener/MemoryTrainingListener.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.listener;
import ai.djl.Device;
import ai.djl.metric.Metric;
import ai.djl.metric.Metrics;
import ai.djl.metric.Unit;
import ai.djl.training.Trainer;
import ai.djl.util.cuda.CudaUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedWriter;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.lang.management.MemoryUsage;
import java.lang.management.RuntimeMXBean;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.List;
/**
* {@link TrainingListener} that collects the memory usage information.
*
* <p>If an outputDir is provided, the file "$outputDir/memory.log" will be created after training
* with the memory usage results.
*/
public class MemoryTrainingListener extends TrainingListenerAdapter {
private static final Logger logger = LoggerFactory.getLogger(MemoryTrainingListener.class);
private String outputDir;
/** Constructs a {@link MemoryTrainingListener} that does not output data to a file. */
public MemoryTrainingListener() {}
/**
* Constructs a {@link MemoryTrainingListener} that outputs data in the given directory.
*
* <p>If an output directory is provided, the file "$outputDir/memory.log" will be created after
* training with the memory usage results. The log file consists of heap bytes, non-heap bytes,
* cpu percentage and rss bytes consumption along with the timestamps.
*
* @param outputDir the directory to output the tracked memory data in
*/
public MemoryTrainingListener(String outputDir) {
this.outputDir = outputDir;
}
/** {@inheritDoc} */
@Override
public void onTrainingBatch(Trainer trainer, BatchData batchData) {
Metrics metrics = trainer.getMetrics();
collectMemoryInfo(metrics);
}
/** {@inheritDoc} */
@Override
public void onValidationBatch(Trainer trainer, BatchData batchData) {
Metrics metrics = trainer.getMetrics();
collectMemoryInfo(metrics);
}
/** {@inheritDoc} */
@Override
public void onTrainingEnd(Trainer trainer) {
Metrics metrics = trainer.getMetrics();
dumpMemoryInfo(metrics, outputDir);
}
/**
* Collects memory information. In order to collect metrics, the {@link Trainer} must set
* metrics. Monitor the metrics by enabling the following flag in the command line arguments:
* -Dcollect-memory=true
*
* @param metrics {@link Metrics} to store memory information
*/
public static void collectMemoryInfo(Metrics metrics) {
if (metrics != null && Boolean.getBoolean("collect-memory")) {
MemoryMXBean memBean = ManagementFactory.getMemoryMXBean();
MemoryUsage heap = memBean.getHeapMemoryUsage();
MemoryUsage nonHeap = memBean.getNonHeapMemoryUsage();
long heapUsed = heap.getUsed();
long nonHeapUsed = nonHeap.getUsed();
getProcessInfo(metrics);
metrics.addMetric("Heap", heapUsed, Unit.BYTES);
metrics.addMetric("NonHeap", nonHeapUsed, Unit.BYTES);
int gpuCount = CudaUtils.getGpuCount();
// CudaUtils.getGpuMemory() will allocates memory on GPUs if CUDA runtime is not
// initialized.
for (int i = 0; i < gpuCount; ++i) {
Device device = Device.gpu(i);
MemoryUsage mem = CudaUtils.getGpuMemory(device);
metrics.addMetric("GPU-" + i, mem.getCommitted(), Unit.BYTES);
}
}
}
/**
* Dump memory metrics into log directory.
*
* @param metrics metrics contains memory information
* @param logDir output log directory
*/
public static void dumpMemoryInfo(Metrics metrics, String logDir) {
if (metrics == null || logDir == null) {
return;
}
try {
Path dir = Paths.get(logDir);
Files.createDirectories(dir);
Path file = dir.resolve("memory.log");
try (BufferedWriter writer =
Files.newBufferedWriter(
file, StandardOpenOption.CREATE, StandardOpenOption.APPEND)) {
List<Metric> list = new ArrayList<>();
list.addAll(metrics.getMetric("Heap"));
list.addAll(metrics.getMetric("NonHeap"));
list.addAll(metrics.getMetric("cpu"));
list.addAll(metrics.getMetric("rss"));
int gpuCount = CudaUtils.getGpuCount();
for (int i = 0; i < gpuCount; ++i) {
list.addAll(metrics.getMetric("GPU-" + i));
}
for (Metric metric : list) {
writer.append(metric.toString());
writer.newLine();
}
}
} catch (IOException e) {
logger.error("Failed dump memory log", e);
}
}
private static void getProcessInfo(Metrics metrics) {
if (System.getProperty("os.name").startsWith("Linux")
|| System.getProperty("os.name").startsWith("Mac")) {
// This solution only work for Linux like system.
RuntimeMXBean mxBean = ManagementFactory.getRuntimeMXBean();
String pid = mxBean.getName().split("@")[0];
String cmd = "ps -o %cpu= -o rss= -p " + pid;
try {
Process process = Runtime.getRuntime().exec(cmd);
try (InputStream is = process.getInputStream()) {
String line = new String(readAll(is), StandardCharsets.UTF_8).trim();
String[] tokens = line.split("\\s+");
if (tokens.length != 2) {
logger.error("Invalid ps output: {}", line);
return;
}
float cpu = Float.parseFloat(tokens[0]);
long rss = Long.parseLong(tokens[1]) * 1024;
metrics.addMetric("cpu", cpu, Unit.PERCENT);
metrics.addMetric("rss", rss, Unit.BYTES);
}
} catch (IOException e) {
logger.error("Failed execute cmd: {}", cmd, e);
}
}
}
private static byte[] readAll(InputStream is) throws IOException {
try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) {
int read;
byte[] buf = new byte[8192];
while ((read = is.read(buf)) != -1) {
bos.write(buf, 0, read);
}
return bos.toByteArray();
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/listener/SaveModelTrainingListener.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.listener;
import ai.djl.Model;
import ai.djl.training.Trainer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.function.Consumer;
/** A {@link TrainingListener} that saves a model and can save checkpoints. */
public class SaveModelTrainingListener extends TrainingListenerAdapter {
private static final Logger logger = LoggerFactory.getLogger(SaveModelTrainingListener.class);
private String outputDir;
private String overrideModelName;
private Consumer<Trainer> onSaveModel;
private int checkpoint;
private int epoch;
/**
* Constructs a {@link SaveModelTrainingListener} using the model's name.
*
* @param outputDir the directory to output the checkpointed models in
*/
public SaveModelTrainingListener(String outputDir) {
this(outputDir, null, -1);
}
/**
* Constructs a {@link SaveModelTrainingListener}.
*
* @param overrideModelName an override model name to save checkpoints with
* @param outputDir the directory to output the checkpointed models in
*/
public SaveModelTrainingListener(String outputDir, String overrideModelName) {
this(outputDir, overrideModelName, -1);
}
/**
* Constructs a {@link SaveModelTrainingListener}.
*
* @param overrideModelName an override model name to save checkpoints with
* @param outputDir the directory to output the checkpointed models in
* @param checkpoint adds a checkpoint every n epochs
*/
public SaveModelTrainingListener(String outputDir, String overrideModelName, int checkpoint) {
this.outputDir = outputDir;
this.checkpoint = checkpoint;
if (outputDir == null) {
throw new IllegalArgumentException(
"Can not save checkpoint without specifying an output directory");
}
this.overrideModelName = overrideModelName;
}
/** {@inheritDoc} */
@Override
public void onEpoch(Trainer trainer) {
epoch++;
if (outputDir == null) {
return;
}
if (checkpoint > 0 && epoch % checkpoint == 0) {
// save model at end of each epoch
saveModel(trainer);
}
}
/** {@inheritDoc} */
@Override
public void onTrainingEnd(Trainer trainer) {
if (checkpoint == -1 || epoch % checkpoint != 0) {
saveModel(trainer);
}
}
/**
* Returns the override model name to save checkpoints with.
*
* @return the override model name to save checkpoints with
*/
public String getOverrideModelName() {
return overrideModelName;
}
/**
* Sets the override model name to save checkpoints with.
*
* @param overrideModelName the override model name to save checkpoints with
*/
public void setOverrideModelName(String overrideModelName) {
this.overrideModelName = overrideModelName;
}
/**
* Returns the checkpoint frequency (or -1 for no checkpointing) in {@link
* SaveModelTrainingListener}.
*
* @return the checkpoint frequency (or -1 for no checkpointing)
*/
public int getCheckpoint() {
return checkpoint;
}
/**
* Sets the checkpoint frequency in {@link SaveModelTrainingListener}.
*
* @param checkpoint how many epochs between checkpoints (or -1 for no checkpoints)
*/
public void setCheckpoint(int checkpoint) {
this.checkpoint = checkpoint;
}
/**
* Sets the callback function on model saving.
*
* <p>This allows user to set custom properties to model metadata.
*
* @param onSaveModel the callback function on model saving
*/
public void setSaveModelCallback(Consumer<Trainer> onSaveModel) {
this.onSaveModel = onSaveModel;
}
protected void saveModel(Trainer trainer) {
Model model = trainer.getModel();
String modelName = model.getName();
if (overrideModelName != null) {
modelName = overrideModelName;
}
try {
model.setProperty("Epoch", String.valueOf(epoch));
if (onSaveModel != null) {
onSaveModel.accept(trainer);
}
model.save(Paths.get(outputDir), modelName);
} catch (IOException e) {
logger.error("Failed to save checkpoint", e);
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/listener/TimeMeasureTrainingListener.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.listener;
import ai.djl.metric.Metric;
import ai.djl.metric.Metrics;
import ai.djl.training.Trainer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.List;
/**
* {@link TrainingListener} that outputs the training time metrics after training is done.
*
* <p>The training time data is placed in the file "$outputDir/training.log" and the validation data
* is placed in "$outputDir/validate.log".
*/
public class TimeMeasureTrainingListener extends TrainingListenerAdapter {
private static final Logger logger = LoggerFactory.getLogger(TimeMeasureTrainingListener.class);
private String outputDir;
private long trainBatchBeginTime;
private long validateBatchBeginTime;
/**
* Constructs a {@link TimeMeasureTrainingListener}.
*
* @param outputDir the directory to output the tracked timing data in
*/
public TimeMeasureTrainingListener(String outputDir) {
this.outputDir = outputDir;
trainBatchBeginTime = -1;
validateBatchBeginTime = -1;
}
/** {@inheritDoc} */
@Override
public void onEpoch(Trainer trainer) {
trainBatchBeginTime = -1;
validateBatchBeginTime = -1;
}
/** {@inheritDoc} */
@Override
public void onTrainingBatch(Trainer trainer, BatchData batchData) {
if (trainBatchBeginTime != -1) {
trainer.addMetric("train", trainBatchBeginTime);
}
trainBatchBeginTime = System.nanoTime();
}
/** {@inheritDoc} */
@Override
public void onValidationBatch(Trainer trainer, BatchData batchData) {
if (validateBatchBeginTime != -1) {
trainer.addMetric("validate", validateBatchBeginTime);
}
validateBatchBeginTime = System.nanoTime();
}
/** {@inheritDoc} */
@Override
public void onTrainingEnd(Trainer trainer) {
Metrics metrics = trainer.getMetrics();
dumpTrainingTimeInfo(metrics, outputDir);
}
private static void dumpTrainingTimeInfo(Metrics metrics, String logDir) {
if (metrics == null || logDir == null) {
return;
}
try {
Path dir = Paths.get(logDir);
Files.createDirectories(dir);
dumpMetricToFile(dir.resolve("training.log"), metrics.getMetric("train"));
dumpMetricToFile(dir.resolve("validate.log"), metrics.getMetric("validate"));
} catch (IOException e) {
logger.error("Failed dump training log", e);
}
}
private static void dumpMetricToFile(Path path, List<Metric> metrics) throws IOException {
if (metrics == null || metrics.isEmpty()) {
return;
}
try (BufferedWriter writer =
Files.newBufferedWriter(
path, StandardOpenOption.CREATE, StandardOpenOption.APPEND)) {
for (Metric metric : metrics) {
writer.append(metric.toString());
writer.newLine();
}
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/listener/TrainingListener.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.listener;
import ai.djl.Device;
import ai.djl.ndarray.NDList;
import ai.djl.training.Trainer;
import ai.djl.training.dataset.Batch;
import java.util.Map;
/**
* {@code TrainingListener} offers an interface that performs some actions when certain events have
* occurred in the {@link Trainer}.
*
* <p>The methods {@link #onEpoch(Trainer) onEpoch}, {@link #onTrainingBatch(Trainer, BatchData)
* onTrainingBatch}, {@link #onValidationBatch(Trainer, BatchData) onValidationBatch} are called
* during training. Adding an implementation of the listener to the {@link Trainer} will perform any
* desired action at those junctures. These could be used for collection metrics, or logging, or any
* other purpose to enhance the training process.
*
* <p>There are many listeners that contain different functionality, and it is often best to combine
* a number of listeners. We recommend starting with one of our sets of {@link
* TrainingListener.Defaults}. Then, more listeners can be added afterwards.
*/
public interface TrainingListener {
/**
* Listens to the end of an epoch during training.
*
* @param trainer the trainer the listener is attached to
*/
void onEpoch(Trainer trainer);
/**
* Listens to the end of training one batch of data during training.
*
* @param trainer the trainer the listener is attached to
* @param batchData the data from the batch
*/
void onTrainingBatch(Trainer trainer, BatchData batchData);
/**
* Listens to the end of validating one batch of data during validation.
*
* @param trainer the trainer the listener is attached to
* @param batchData the data from the batch
*/
void onValidationBatch(Trainer trainer, BatchData batchData);
/**
* Listens to the beginning of training.
*
* @param trainer the trainer the listener is attached to
*/
void onTrainingBegin(Trainer trainer);
/**
* Listens to the end of training.
*
* @param trainer the trainer the listener is attached to
*/
void onTrainingEnd(Trainer trainer);
/** Contains default {@link TrainingListener} sets. */
interface Defaults {
/**
* A basic {@link TrainingListener} set with the minimal recommended functionality.
*
* <p>This contains:
*
* <ul>
* <li>{@link EpochTrainingListener}
* <li>{@link EvaluatorTrainingListener}
* <li>{@link DivergenceCheckTrainingListener}
* </ul>
*
* @return the new set of listeners
*/
static TrainingListener[] basic() {
return new TrainingListener[] {
new EpochTrainingListener(),
new EvaluatorTrainingListener(),
new DivergenceCheckTrainingListener()
};
}
/**
* A default {@link TrainingListener} set including batch output logging.
*
* <p>This contains:
*
* <ul>
* <li>Everything from {@link Defaults#basic()}
* <li>{@link LoggingTrainingListener}
* </ul>
*
* @return the new set of listeners
*/
static TrainingListener[] logging() {
return new TrainingListener[] {
new EpochTrainingListener(),
new EvaluatorTrainingListener(),
new DivergenceCheckTrainingListener(),
new LoggingTrainingListener()
};
}
/**
* A default {@link TrainingListener} set including batch output logging.
*
* <p>This has the same listeners as {@link Defaults#logging()}, but reduces the logging
* frequency.
*
* @param frequency the frequency of epoch to print out
* @return the new set of listeners
*/
static TrainingListener[] logging(int frequency) {
return new TrainingListener[] {
new EpochTrainingListener(),
new EvaluatorTrainingListener(),
new DivergenceCheckTrainingListener(),
new LoggingTrainingListener(frequency)
};
}
/**
* A default {@link TrainingListener} set including batch output logging and output
* directory.
*
* <p>This contains:
*
* <ul>
* <li>Everything from {@link Defaults#logging()}
* <li>{@link MemoryTrainingListener}
* <li>{@link TimeMeasureTrainingListener}
* </ul>
*
* @param outputDir the output directory to store created log files. Can't be null
* @return the new set of listeners
*/
static TrainingListener[] logging(String outputDir) {
if (outputDir == null) {
throw new IllegalArgumentException("The output directory can't be null");
}
return new TrainingListener[] {
new EpochTrainingListener(),
new MemoryTrainingListener(outputDir),
new EvaluatorTrainingListener(),
new DivergenceCheckTrainingListener(),
new LoggingTrainingListener(),
new TimeMeasureTrainingListener(outputDir)
};
}
}
/** A class to pass data from the batch into the training listeners. */
class BatchData {
private Batch batch;
private Map<Device, NDList> labels;
private Map<Device, NDList> predictions;
/**
* Constructs a new {@link BatchData}.
*
* @param batch the original batch
* @param labels the labels for each device
* @param predictions the predictions for each device
*/
public BatchData(Batch batch, Map<Device, NDList> labels, Map<Device, NDList> predictions) {
this.batch = batch;
this.labels = labels;
this.predictions = predictions;
}
/**
* Returns the original batch.
*
* @return the original batch
*/
public Batch getBatch() {
return batch;
}
/**
* Returns the labels for each device.
*
* @return the labels for each device
*/
public Map<Device, NDList> getLabels() {
return labels;
}
/**
* Returns the predictions for each device.
*
* @return the predictions for each device
*/
public Map<Device, NDList> getPredictions() {
return predictions;
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/listener/TrainingListenerAdapter.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.listener;
import ai.djl.training.Trainer;
/**
* Base implementation of the training listener that does nothing. This is to be used as a base
* class for custom training listeners that just want to listen to one event, so it is not necessary
* to override methods you do not care for.
*/
public abstract class TrainingListenerAdapter implements TrainingListener {
/** {@inheritDoc} */
@Override
public void onEpoch(Trainer trainer) {}
/** {@inheritDoc} */
@Override
public void onTrainingBatch(Trainer trainer, BatchData batchData) {}
/** {@inheritDoc} */
@Override
public void onValidationBatch(Trainer trainer, BatchData batchData) {}
/** {@inheritDoc} */
@Override
public void onTrainingBegin(Trainer trainer) {}
/** {@inheritDoc} */
@Override
public void onTrainingEnd(Trainer trainer) {}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/listener/package-info.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains classes providing functionality during training through {@link
* ai.djl.training.listener.TrainingListener}.
*
* <p>It contains a main interface {@link ai.djl.training.listener.TrainingListener} and various
* listeners that extend it.
*/
package ai.djl.training.listener;
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/loss/AbstractCompositeLoss.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.loss;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.util.Pair;
import java.util.List;
/**
* {@code AbstractCompositeLoss} is a {@link Loss} class that can combine other {@link Loss}es
* together to make a larger loss.
*
* <p>The AbstractCompositeLoss is designed to be extended for more complicated composite losses.
* For simpler use cases, consider using the {@link SimpleCompositeLoss}.
*/
public abstract class AbstractCompositeLoss extends Loss {
protected List<Loss> components;
/**
* Constructs a composite loss with the given name.
*
* @param name the display name of the loss
*/
public AbstractCompositeLoss(String name) {
super(name);
}
/**
* Returns the inputs to computing the loss for a component loss.
*
* @param componentIndex the index of the component loss
* @param labels the label input to the composite loss
* @param predictions the predictions input to the composite loss
* @return a pair of the (labels, predictions) inputs to the component loss
*/
protected abstract Pair<NDList, NDList> inputForComponent(
int componentIndex, NDList labels, NDList predictions);
/**
* Returns the component losses that make up the composite loss.
*
* @return the component losses that make up the composite loss
*/
public List<Loss> getComponents() {
return components;
}
/** {@inheritDoc} */
@Override
public NDArray evaluate(NDList labels, NDList predictions) {
NDArray[] lossComponents = new NDArray[components.size()];
for (int i = 0; i < components.size(); i++) {
Pair<NDList, NDList> inputs = inputForComponent(i, labels, predictions);
lossComponents[i] = components.get(i).evaluate(inputs.getKey(), inputs.getValue());
}
return NDArrays.add(lossComponents);
}
/** {@inheritDoc} */
@Override
public void addAccumulator(String key) {
for (Loss component : components) {
component.addAccumulator(key);
}
}
/** {@inheritDoc} */
@Override
public void updateAccumulators(String[] keys, NDList labels, NDList predictions) {
for (int i = 0; i < components.size(); i++) {
Pair<NDList, NDList> inputs = inputForComponent(i, labels, predictions);
components.get(i).updateAccumulators(keys, inputs.getKey(), inputs.getValue());
}
}
/** {@inheritDoc} */
@Override
public void resetAccumulator(String key) {
for (Loss component : components) {
component.resetAccumulator(key);
}
}
/** {@inheritDoc} */
@Override
public float getAccumulator(String key) {
return (float)
components.stream().mapToDouble(component -> component.getAccumulator(key)).sum();
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/loss/ElasticNetWeightDecay.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.loss;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
/**
* {@code ElasticWeightDecay} calculates L1+L2 penalty of a set of parameters. Used for
* regularization.
*
* <p>L loss is defined as \(L = \lambda_1 \sum_i \vert W_i\vert + \lambda_2 \sum_i {W_i}^2\).
*/
public class ElasticNetWeightDecay extends Loss {
private float lambda1;
private float lambda2;
private NDList parameters;
/**
* Calculates Elastic Net weight decay for regularization.
*
* @param parameters holds the model weights that will be penalized
*/
public ElasticNetWeightDecay(NDList parameters) {
this("ElasticNetWeightDecay", parameters);
}
/**
* Calculates Elastic Net weight decay for regularization.
*
* @param name the name of the penalty
* @param parameters holds the model weights that will be penalized
*/
public ElasticNetWeightDecay(String name, NDList parameters) {
this(name, parameters, 1);
}
/**
* Calculates Elastic Net weight decay for regularization.
*
* @param name the name of the penalty
* @param parameters holds the model weights that will be penalized
* @param lambda the weight to apply to the penalty value, default 1 (both L1 and L2)
*/
public ElasticNetWeightDecay(String name, NDList parameters, float lambda) {
super(name);
this.lambda1 = lambda;
this.lambda2 = lambda;
this.parameters = parameters;
}
/**
* Calculates Elastic Net weight decay for regularization.
*
* @param name the name of the penalty
* @param parameters holds the model weights that will be penalized
* @param lambda1 the weight to apply to the L1 penalty value, default 1
* @param lambda2 the weight to apply to the L2 penalty value, default 1
*/
public ElasticNetWeightDecay(String name, NDList parameters, float lambda1, float lambda2) {
super(name);
this.lambda1 = lambda1;
this.lambda2 = lambda2;
this.parameters = parameters;
}
private NDArray l1(NDArray w) {
return ((w.abs()).sum());
}
private NDArray l2(NDArray w) {
return ((w.square()).sum());
}
/** {@inheritDoc} */
@Override
public NDArray evaluate(NDList label, NDList prediction) {
NDManager manager = parameters.getManager();
NDArray sum1 = manager.create(0.0f);
NDArray sum2 = manager.create(0.0f);
for (NDArray wi : parameters) {
sum1.addi(l1(wi));
sum2.addi(l2(wi));
}
return sum1.muli(lambda1).addi(sum2.muli(lambda2));
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/loss/HingeLoss.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.loss;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.nn.Activation;
/**
* {@code HingeLoss} is a type of {@link Loss}.
*
* <p>Hinge loss is defined by: \(L = \sum_i max(0, margin - pred_i \cdot label_i)\)
*/
public class HingeLoss extends Loss {
private int margin;
private float weight;
/** Calculates Hinge loss. */
public HingeLoss() {
this("HingeLoss");
}
/**
* Calculates Hinge loss.
*
* @param name the name of the loss
*/
public HingeLoss(String name) {
this(name, 1, 1);
}
/**
* Calculates Hinge loss.
*
* @param name the name of the loss
* @param margin the margin in hinge loss. Defaults to 1.0
* @param weight the weight to apply on loss value, default 1
*/
public HingeLoss(String name, int margin, float weight) {
super(name);
this.margin = margin;
this.weight = weight;
}
/** {@inheritDoc} */
@Override
public NDArray evaluate(NDList label, NDList prediction) {
NDArray pred = prediction.singletonOrThrow();
NDArray labelReshaped = label.singletonOrThrow().reshape(pred.getShape());
NDArray loss = Activation.relu(NDArrays.sub(margin, labelReshaped.mul(pred)));
if (weight != 1) {
loss = loss.mul(weight);
}
return loss.mean();
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/loss/IndexLoss.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.loss;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
/**
* A wrapper for a {@link Loss} that evaluates on only a particular {@link NDArray} in the
* predictions and/or labels {@link NDList}s.
*/
public class IndexLoss extends Loss {
private Loss loss;
private Integer predictionsIndex;
private Integer labelsIndex;
/**
* Constructs an {@link IndexLoss} with the same index for both predictions and labels.
*
* @param loss the base evaluator
* @param index the index for both predictions and labels
*/
public IndexLoss(Loss loss, int index) {
this(loss, index, index);
}
/**
* Constructs an {@link IndexLoss}.
*
* @param loss the base evaluator
* @param predictionsIndex the predictions index
* @param labelsIndex the labels index
*/
public IndexLoss(Loss loss, Integer predictionsIndex, Integer labelsIndex) {
super(loss.getName());
this.loss = loss;
this.predictionsIndex = predictionsIndex;
this.labelsIndex = labelsIndex;
}
/** {@inheritDoc} */
@Override
public NDArray evaluate(NDList labels, NDList predictions) {
return loss.evaluate(getLabels(labels), getPredictions(predictions));
}
private NDList getPredictions(NDList predictions) {
if (predictionsIndex == null) {
return predictions;
}
return new NDList(predictions.get(predictionsIndex));
}
private NDList getLabels(NDList labels) {
if (labelsIndex == null) {
return labels;
}
return new NDList(labels.get(labelsIndex));
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/loss/L1Loss.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.loss;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
/**
* {@code L1Loss} calculates L1 loss between label and prediction.
*
* <p>L1 loss is defined by \(L = \sum_i \vert {label}_i - {prediction}_i \vert\).
*/
public class L1Loss extends Loss {
private float weight;
/** Calculates L1 Loss between the label and prediction, a.k.a. MAE(Mean Absolute Error). */
public L1Loss() {
this("L1Loss");
}
/**
* Calculates L1 Loss between the label and prediction, a.k.a. MAE(Mean Absolute Error).
*
* @param name the name of the loss
*/
public L1Loss(String name) {
this(name, 1);
}
/**
* Calculates L1 Loss between the label and prediction, a.k.a. MAE(Mean Absolute Error).
*
* @param name the name of the loss
* @param weight the weight to apply on loss value, default 1
*/
public L1Loss(String name, float weight) {
super(name);
this.weight = weight;
}
/** {@inheritDoc} */
@Override
public NDArray evaluate(NDList label, NDList prediction) {
NDArray pred = prediction.singletonOrThrow();
NDArray labelReshaped = label.singletonOrThrow().reshape(pred.getShape());
NDArray loss = labelReshaped.sub(pred).abs();
if (weight != 1) {
// avoid broadcast mul
loss = labelReshaped.mul(weight);
}
return loss.mean();
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/loss/L1WeightDecay.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.loss;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
/**
* {@code L1WeightDecay} calculates L1 penalty of a set of parameters. Used for regularization.
*
* <p>L1 loss is defined as \(L1 = \lambda \sum_i \vert W_i\vert\).
*/
public class L1WeightDecay extends Loss {
private float lambda;
private NDList parameters;
/**
* Calculates L1 weight decay for regularization.
*
* @param parameters holds the model weights that will be penalized
*/
public L1WeightDecay(NDList parameters) {
this("L1WeightDecay", parameters);
}
/**
* Calculates L1 weight decay for regularization.
*
* @param name the name of the penalty
* @param parameters holds the model weights that will be penalized
*/
public L1WeightDecay(String name, NDList parameters) {
this(name, parameters, 1);
}
/**
* Calculates L1 weight decay for regularization.
*
* @param name the name of the penalty
* @param parameters holds the model weights that will be penalized
* @param lambda the weight to apply to the penalty value, default 1
*/
public L1WeightDecay(String name, NDList parameters, float lambda) {
super(name);
this.lambda = lambda;
this.parameters = parameters;
}
private NDArray l1(NDArray w) {
return ((w.abs()).sum());
}
/** {@inheritDoc} */
@Override
public NDArray evaluate(NDList label, NDList prediction) {
NDManager manager = parameters.getManager();
NDArray sum = manager.create(0.0f);
for (NDArray wi : parameters) {
sum.addi(l1(wi));
}
return sum.muli(lambda);
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/loss/L2Loss.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.loss;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
/**
* Calculates L2Loss between label and prediction, a.k.a. MSE(Mean Square Error).
*
* <p>L2 loss is defined by \(L = \frac{1}{2} \sum_i \vert {label}_i - {prediction}_i \vert^2\)
*/
public class L2Loss extends Loss {
private float weight;
/** Calculate L2Loss between the label and prediction, a.k.a. MSE(Mean Square Error). */
public L2Loss() {
this("L2Loss");
}
/**
* Calculate L2Loss between the label and prediction, a.k.a. MSE(Mean Square Error).
*
* @param name the name of the loss
*/
public L2Loss(String name) {
this(name, 1.f / 2);
}
/**
* Calculates L2Loss between the label and prediction, a.k.a. MSE(Mean Square Error).
*
* @param name the name of the loss
* @param weight the weight to apply on loss value, default 1/2
*/
public L2Loss(String name, float weight) {
super(name);
this.weight = weight;
}
/** {@inheritDoc} */
@Override
public NDArray evaluate(NDList label, NDList prediction) {
NDArray pred = prediction.singletonOrThrow();
NDArray labelReshaped = label.singletonOrThrow().reshape(pred.getShape());
NDArray loss = labelReshaped.sub(pred).square().mul(weight);
return loss.mean();
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/loss/L2WeightDecay.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.loss;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
/**
* {@code L2WeightDecay} calculates L2 penalty of a set of parameters. Used for regularization.
*
* <p>L2 loss is defined by \(L2 = \lambda \sum_i {W_i}^2\).
*/
public class L2WeightDecay extends Loss {
private float lambda;
private NDList parameters;
/**
* Calculates L2 weight decay for regularization.
*
* @param parameters holds the model weights that will be penalized
*/
public L2WeightDecay(NDList parameters) {
this("L2WeightDecay", parameters);
}
/**
* Calculates L2 weight decay for regularization.
*
* @param name the name of the penalty
* @param parameters holds the model weights that will be penalized
*/
public L2WeightDecay(String name, NDList parameters) {
this(name, parameters, 1);
}
/**
* Calculates L2 weight decay for regularization.
*
* @param name the name of the penalty
* @param parameters holds the model weights that will be penalized
* @param lambda the weight to apply to the penalty value, default 1
*/
public L2WeightDecay(String name, NDList parameters, float lambda) {
super(name);
this.lambda = lambda;
this.parameters = parameters;
}
private NDArray l2(NDArray w) {
return ((w.square()).sum());
}
/** {@inheritDoc} */
@Override
public NDArray evaluate(NDList label, NDList prediction) {
NDManager manager = parameters.getManager();
NDArray sum = manager.create(0.0f);
for (NDArray wi : parameters) {
sum.addi(l2(wi));
}
return sum.muli(lambda);
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/loss/Loss.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.loss;
import ai.djl.ndarray.NDList;
import ai.djl.training.evaluator.Evaluator;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* Loss functions (or Cost functions) are used to evaluate the model predictions against true labels
* for optimization.
*
* <p>Although all evaluators can be used to measure the performance of a model, not all of them are
* suited to being used by an optimizer. Loss functions are usually non-negative where a larger loss
* represents worse performance. They are also real-valued to accurately compare models.
*
* <p>When creating a loss function, you should avoid having the loss depend on the batch size. For
* example, if you have a loss per item in a batch and sum those losses, your loss would be {@code
* numItemsInBatch*avgLoss}. Instead, you should take the mean of those losses to reduce out the
* batchSize factor. Otherwise, it can make it difficult to tune the learning rate since any change
* in the batch size would throw it off. If you have a variable batch size, it would be even more
* difficult.
*
* <p>For more details about the class internals, see {@link Evaluator}.
*/
public abstract class Loss extends Evaluator {
private Map<String, Float> totalLoss;
/**
* Base class for metric with abstract update methods.
*
* @param name The display name of the Loss
*/
public Loss(String name) {
super(name);
totalLoss = new ConcurrentHashMap<>();
}
/**
* Returns a new instance of {@link L1Loss} with default weight and batch axis.
*
* @return a new instance of {@link L1Loss}
*/
public static L1Loss l1Loss() {
return new L1Loss();
}
/**
* Returns a new instance of {@link L1Loss} with default weight and batch axis.
*
* @param name the name of the loss
* @return a new instance of {@link L1Loss}
*/
public static L1Loss l1Loss(String name) {
return new L1Loss(name);
}
/**
* Returns a new instance of {@link L1Loss} with given weight.
*
* @param name the name of the loss
* @param weight the weight to apply on loss value, default 1
* @return a new instance of {@link L1Loss}
*/
public static L1Loss l1Loss(String name, float weight) {
return new L1Loss(name, weight);
}
/**
* Returns a new instance of {@link QuantileL1Loss} with given quantile.
*
* @param quantile the quantile position of the data to focus on
* @return a new instance of {@link QuantileL1Loss}
*/
public static QuantileL1Loss quantileL1Loss(float quantile) {
return new QuantileL1Loss(quantile);
}
/**
* Returns a new instance of {@link QuantileL1Loss} with given quantile.
*
* @param name the name of the loss
* @param quantile the quantile position of the data to focus on
* @return a new instance of {@link QuantileL1Loss}
*/
public static QuantileL1Loss quantileL1Loss(String name, float quantile) {
return new QuantileL1Loss(name, quantile);
}
/**
* Returns a new instance of {@link L2Loss} with default weight and batch axis.
*
* @return a new instance of {@link L2Loss}
*/
public static L2Loss l2Loss() {
return new L2Loss();
}
/**
* Returns a new instance of {@link L2Loss} with default weight and batch axis.
*
* @param name the name of the loss
* @return a new instance of {@link L2Loss}
*/
public static L2Loss l2Loss(String name) {
return new L2Loss(name);
}
/**
* Returns a new instance of {@link L2Loss} with given weight and batch axis.
*
* @param name the name of the loss
* @param weight the weight to apply on loss value, default 1
* @return a new instance of {@link L2Loss}
*/
public static L2Loss l2Loss(String name, float weight) {
return new L2Loss(name, weight);
}
/**
* Returns a new instance of {@link SigmoidBinaryCrossEntropyLoss} with default arguments.
*
* @return a new instance of {@link SigmoidBinaryCrossEntropyLoss}
*/
public static SigmoidBinaryCrossEntropyLoss sigmoidBinaryCrossEntropyLoss() {
return new SigmoidBinaryCrossEntropyLoss();
}
/**
* Returns a new instance of {@link SigmoidBinaryCrossEntropyLoss} with default arguments.
*
* @param name the name of the loss
* @return a new instance of {@link SigmoidBinaryCrossEntropyLoss}
*/
public static SigmoidBinaryCrossEntropyLoss sigmoidBinaryCrossEntropyLoss(String name) {
return new SigmoidBinaryCrossEntropyLoss(name);
}
/**
* Returns a new instance of {@link SigmoidBinaryCrossEntropyLoss} with the given arguments.
*
* @param name the name of the loss
* @param weight the weight to apply on the loss value, default 1
* @param fromSigmoid whether the input is from the output of sigmoid, default false
* @return a new instance of {@link SigmoidBinaryCrossEntropyLoss}
*/
public static SigmoidBinaryCrossEntropyLoss sigmoidBinaryCrossEntropyLoss(
String name, float weight, boolean fromSigmoid) {
return new SigmoidBinaryCrossEntropyLoss(name, weight, fromSigmoid);
}
/**
* Returns a new instance of {@link SoftmaxCrossEntropyLoss} with default arguments.
*
* @return a new instance of {@link SoftmaxCrossEntropyLoss}
*/
public static SoftmaxCrossEntropyLoss softmaxCrossEntropyLoss() {
return new SoftmaxCrossEntropyLoss();
}
/**
* Returns a new instance of {@link SoftmaxCrossEntropyLoss} with default arguments.
*
* @param name the name of the loss
* @return a new instance of {@link SoftmaxCrossEntropyLoss}
*/
public static SoftmaxCrossEntropyLoss softmaxCrossEntropyLoss(String name) {
return new SoftmaxCrossEntropyLoss(name);
}
/**
* Returns a new instance of {@link SoftmaxCrossEntropyLoss} with the given arguments.
*
* @param name the name of the loss
* @param weight the weight to apply on the loss value, default 1
* @param classAxis the axis that represents the class probabilities, default -1
* @param sparseLabel whether labels are integer array or probabilities, default true
* @param fromLogit whether labels are log probabilities or un-normalized numbers
* @return a new instance of {@link SoftmaxCrossEntropyLoss}
*/
public static SoftmaxCrossEntropyLoss softmaxCrossEntropyLoss(
String name, float weight, int classAxis, boolean sparseLabel, boolean fromLogit) {
return new SoftmaxCrossEntropyLoss(name, weight, classAxis, sparseLabel, fromLogit);
}
/**
* Returns a new instance of {@link MaskedSoftmaxCrossEntropyLoss} with default arguments.
*
* @return a new instance of {@link MaskedSoftmaxCrossEntropyLoss}
*/
public static MaskedSoftmaxCrossEntropyLoss maskedSoftmaxCrossEntropyLoss() {
return new MaskedSoftmaxCrossEntropyLoss();
}
/**
* Returns a new instance of {@link MaskedSoftmaxCrossEntropyLoss} with default arguments.
*
* @param name the name of the loss
* @return a new instance of {@link MaskedSoftmaxCrossEntropyLoss}
*/
public static MaskedSoftmaxCrossEntropyLoss maskedSoftmaxCrossEntropyLoss(String name) {
return new MaskedSoftmaxCrossEntropyLoss(name);
}
/**
* Returns a new instance of {@link MaskedSoftmaxCrossEntropyLoss} with the given arguments.
*
* @param name the name of the loss
* @param weight the weight to apply on the loss value, default 1
* @param classAxis the axis that represents the class probabilities, default -1
* @param sparseLabel whether labels are integer array or probabilities, default true
* @param fromLogit whether labels are log probabilities or un-normalized numbers
* @return a new instance of {@link MaskedSoftmaxCrossEntropyLoss}
*/
public static MaskedSoftmaxCrossEntropyLoss maskedSoftmaxCrossEntropyLoss(
String name, float weight, int classAxis, boolean sparseLabel, boolean fromLogit) {
return new MaskedSoftmaxCrossEntropyLoss(name, weight, classAxis, sparseLabel, fromLogit);
}
/**
* Returns a new instance of {@link HingeLoss} with default arguments.
*
* @return a new instance of {@link HingeLoss}
*/
public static HingeLoss hingeLoss() {
return new HingeLoss();
}
/**
* Returns a new instance of {@link HingeLoss} with default arguments.
*
* @param name the name of the loss
* @return a new instance of {@link HingeLoss}
*/
public static HingeLoss hingeLoss(String name) {
return new HingeLoss(name);
}
/**
* Returns a new instance of {@link HingeLoss} with the given arguments.
*
* @param name the name of the loss
* @param margin the margin in hinge loss. Defaults to 1.0
* @param weight the weight to apply on loss value, default 1
* @return a new instance of {@link HingeLoss}
*/
public static HingeLoss hingeLoss(String name, int margin, float weight) {
return new HingeLoss(name, margin, weight);
}
/**
* Returns a new instance of {@link L1WeightDecay} with default weight and name.
*
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link L1WeightDecay}
*/
public static L1WeightDecay l1WeightedDecay(NDList parameters) {
return new L1WeightDecay(parameters);
}
/**
* Returns a new instance of {@link L1WeightDecay} with default weight.
*
* @param name the name of the weight decay
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link L1WeightDecay}
*/
public static L1WeightDecay l1WeightedDecay(String name, NDList parameters) {
return new L1WeightDecay(name, parameters);
}
/**
* Returns a new instance of {@link L1WeightDecay}.
*
* @param name the name of the weight decay
* @param weight the weight to apply on weight decay value, default 1
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link L1WeightDecay}
*/
public static L1WeightDecay l1WeightedDecay(String name, float weight, NDList parameters) {
return new L1WeightDecay(name, parameters, weight);
}
/**
* Returns a new instance of {@link L2WeightDecay} with default weight and name.
*
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link L2WeightDecay}
*/
public static L2WeightDecay l2WeightedDecay(NDList parameters) {
return new L2WeightDecay(parameters);
}
/**
* Returns a new instance of {@link L2WeightDecay} with default weight.
*
* @param name the name of the weight decay
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link L2WeightDecay}
*/
public static L2WeightDecay l2WeightedDecay(String name, NDList parameters) {
return new L2WeightDecay(name, parameters);
}
/**
* Returns a new instance of {@link L2WeightDecay}.
*
* @param name the name of the weight decay
* @param weight the weight to apply on weight decay value, default 1
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link L2WeightDecay}
*/
public static L2WeightDecay l2WeightedDecay(String name, float weight, NDList parameters) {
return new L2WeightDecay(name, parameters, weight);
}
/**
* Returns a new instance of {@link ElasticNetWeightDecay} with default weight and name.
*
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link ElasticNetWeightDecay}
*/
public static ElasticNetWeightDecay elasticNetWeightedDecay(NDList parameters) {
return new ElasticNetWeightDecay(parameters);
}
/**
* Returns a new instance of {@link ElasticNetWeightDecay} with default weight.
*
* @param name the name of the weight decay
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link ElasticNetWeightDecay}
*/
public static ElasticNetWeightDecay elasticNetWeightedDecay(String name, NDList parameters) {
return new ElasticNetWeightDecay(name, parameters);
}
/**
* Returns a new instance of {@link ElasticNetWeightDecay}.
*
* @param name the name of the weight decay
* @param weight the weight to apply on weight decay values, default 1
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link ElasticNetWeightDecay}
*/
public static ElasticNetWeightDecay elasticNetWeightedDecay(
String name, float weight, NDList parameters) {
return new ElasticNetWeightDecay(name, parameters, weight);
}
/**
* Returns a new instance of {@link ElasticNetWeightDecay}.
*
* @param name the name of the weight decay
* @param weight1 the weight to apply on weight decay L1 value, default 1
* @param weight2 the weight to apply on weight decay L2 value, default 1
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link ElasticNetWeightDecay}
*/
public static ElasticNetWeightDecay elasticNetWeightedDecay(
String name, float weight1, float weight2, NDList parameters) {
return new ElasticNetWeightDecay(name, parameters, weight1, weight2);
}
/** {@inheritDoc} */
@Override
public void addAccumulator(String key) {
totalInstances.put(key, 0L);
totalLoss.put(key, 0f);
}
/** {@inheritDoc} */
@Override
public void updateAccumulator(String key, NDList labels, NDList predictions) {
updateAccumulators(new String[] {key}, labels, predictions);
}
/** {@inheritDoc} */
@Override
public void updateAccumulators(String[] keys, NDList labels, NDList predictions) {
// this is a synchronized operation, only call it at end of batch or epoch
float update = evaluate(labels, predictions).sum().getFloat();
for (String key : keys) {
totalInstances.compute(key, (k, v) -> v + 1);
totalLoss.compute(key, (k, v) -> v + update);
}
}
/** {@inheritDoc} */
@Override
public void resetAccumulator(String key) {
totalInstances.compute(key, (k, v) -> 0L);
totalLoss.compute(key, (k, v) -> 0f);
}
/** {@inheritDoc} */
@Override
public float getAccumulator(String key) {
Long total = totalInstances.get(key);
if (total == null) {
throw new IllegalArgumentException("No loss found at that path");
}
if (total == 0) {
return Float.NaN;
}
return totalLoss.get(key) / totalInstances.get(key);
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/loss/MaskedSoftmaxCrossEntropyLoss.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.loss;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.index.NDIndex;
/**
* {@code MaskedSoftmaxCrossEntropyLoss} is an implementation of {@link Loss} that only considers a
* specific number of values for the loss computations, and masks the rest according to the given
* sequence.
*/
public class MaskedSoftmaxCrossEntropyLoss extends Loss {
private float weight;
private int classAxis;
private boolean sparseLabel;
private boolean fromLogit;
/** Creates a new instance of {@code SoftmaxCrossEntropyLoss} with default parameters. */
public MaskedSoftmaxCrossEntropyLoss() {
this("MaskedSoftmaxCrossEntropyLoss");
}
/**
* Creates a new instance of {@code SoftmaxCrossEntropyLoss} with default parameters.
*
* @param name the name of the loss
*/
public MaskedSoftmaxCrossEntropyLoss(String name) {
this(name, 1, -1, true, false);
}
/**
* Creates a new instance of {@code MaskedSoftmaxCrossEntropyLoss} with the given parameters.
*
* @param name the name of the loss
* @param weight the weight to apply on the loss value, default 1
* @param classAxis the axis that represents the class probabilities, default -1
* @param sparseLabel whether labels are 1-D integer array of [batch_size] (false) or 2-D
* probabilities of [batch_size, n-class] (true), default true
* @param fromLogit if true, the inputs are assumed to be the numbers before being applied with
* softmax. Then logSoftmax will be applied to input, default false
*/
public MaskedSoftmaxCrossEntropyLoss(
String name, float weight, int classAxis, boolean sparseLabel, boolean fromLogit) {
super(name);
this.weight = weight;
this.classAxis = classAxis;
this.sparseLabel = sparseLabel;
this.fromLogit = fromLogit;
}
/**
* Calculates the evaluation between the labels and the predictions. The {@code label} parameter
* is an {@link NDList} that contains the label and the mask sequence in that order.
*
* @param labels the {@link NDList} that contains correct values and the mask sequence
* @param predictions the predicted values
* @return the evaluation result
*/
@Override
public NDArray evaluate(NDList labels, NDList predictions) {
NDArray weights = labels.head().onesLike().expandDims(-1).sequenceMask(labels.get(1));
NDArray pred = predictions.singletonOrThrow();
if (!fromLogit) {
pred = pred.logSoftmax(classAxis);
}
NDArray loss;
NDArray lab = labels.head();
if (sparseLabel) {
NDIndex pickIndex =
new NDIndex()
.addAllDim(Math.floorMod(classAxis, pred.getShape().dimension()))
.addPickDim(lab);
loss = pred.get(pickIndex).neg();
} else {
lab = lab.reshape(pred.getShape());
loss = pred.mul(lab).neg().sum(new int[] {classAxis}, true);
}
loss = loss.mul(weights);
if (weight != 1) {
loss = loss.mul(weight);
}
return loss.mean(new int[] {1});
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/loss/QuantileL1Loss.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.loss;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.DataType;
/**
* {@code QuantileL1Loss} calculates the Weighted Quantile Loss between labels and predictions. It
* is useful in regression problems to target the best-fit line at a particular quantile. E.g., to
* target the P90, instantiate {@code new QuantileL1Loss("P90", 0.90)}. Basically, what this loss
* function does is to focus on a certain percentile of the data. E.g. q=0.5 is the original default
* case of regression, meaning the best-fit line lies in the center. When q=0.9, the best-fit line
* will lie above the center. By differentiating the loss function, the optimal solution will yield
* the result that, for some special cases like those where \partial forecast / \partial w are
* uniform, exactly 0.9 of total data points will lie below the best-fit line.
*
* <pre>
* def quantile_loss(target, forecast, q):
* return 2 * np.sum(np.abs((forecast - target) * ((target <= forecast) - q)))
* </pre>
*
* <p>Reference: <a href="https://bibinmjose.github.io/2021/03/08/errorblog.html">...</a>
*/
public class QuantileL1Loss extends Loss {
private Number quantile;
/**
* Computes QuantileL1Loss for regression problem.
*
* @param quantile the quantile position of the data to focus on
*/
public QuantileL1Loss(float quantile) {
this("QuantileL1Loss", quantile);
}
/**
* Computes QuantileL1Loss for regression problem.
*
* @param name the name of the loss function, default "QuantileL1Loss"
* @param quantile the quantile position of the data to focus on
*/
public QuantileL1Loss(String name, float quantile) {
super(name);
this.quantile = quantile;
}
/** {@inheritDoc} */
@Override
public NDArray evaluate(NDList labels, NDList predictions) {
NDArray pred = predictions.singletonOrThrow();
NDArray labelReshaped = labels.singletonOrThrow().reshape(pred.getShape());
NDArray loss =
pred.sub(labelReshaped)
.mul(labelReshaped.lte(pred).toType(DataType.FLOAT32, false).sub(quantile))
.abs()
.mul(2);
return loss.mean();
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/loss/SigmoidBinaryCrossEntropyLoss.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.loss;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.nn.Activation;
/**
* {@code SigmoidBinaryCrossEntropyLoss} is a type of {@link Loss}.
*
* <p>Sigmoid binary cross-entropy loss is defined by: \(L = -\sum_i {label_i * log(prob_i) *
* posWeight + (1 - label_i) * log(1 - prob_i)}\) where \(prob = \frac{1}{1 + e^{-pred}}\)
*/
public class SigmoidBinaryCrossEntropyLoss extends Loss {
private float weight;
private boolean fromSigmoid;
/** Performs Sigmoid cross-entropy loss for binary classification. */
public SigmoidBinaryCrossEntropyLoss() {
this("SigmoidBinaryCrossEntropyLoss");
}
/**
* Performs Sigmoid cross-entropy loss for binary classification.
*
* @param name the name of the loss
*/
public SigmoidBinaryCrossEntropyLoss(String name) {
this(name, 1, false);
}
/**
* Performs Sigmoid cross-entropy loss for binary classification.
*
* @param name the name of the loss
* @param weight the weight to apply on the loss value, default 1
* @param fromSigmoid whether the input is from the output of sigmoid, default false
*/
public SigmoidBinaryCrossEntropyLoss(String name, float weight, boolean fromSigmoid) {
super(name);
this.weight = weight;
this.fromSigmoid = fromSigmoid;
}
/** {@inheritDoc} */
@Override
public NDArray evaluate(NDList label, NDList prediction) {
NDArray pred = prediction.singletonOrThrow();
NDArray lab = label.singletonOrThrow();
lab = lab.reshape(pred.getShape());
NDArray loss;
if (!fromSigmoid) {
// TODO: Add Position weight option
loss =
Activation.relu(pred)
.sub(pred.mul(lab))
.add(Activation.softPlus(pred.abs().neg()));
} else {
loss =
epsLog(pred)
.mul(lab)
.add(epsLog(NDArrays.sub(1., pred)).mul(NDArrays.sub(1., lab)));
}
if (weight != 1f) {
loss = loss.mul(weight);
}
return loss.mean();
}
/**
* Computes a log with added epsilon to avoid errors.
*
* @param a the input array
* @return the computed value
*/
private NDArray epsLog(NDArray a) {
double eps = 1e-12;
return a.add(eps).log();
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/loss/SimpleCompositeLoss.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.loss;
import ai.djl.ndarray.NDList;
import ai.djl.util.Pair;
import java.util.ArrayList;
/**
* {@code SimpleCompositeLoss} is an implementation of the {@link Loss} abstract class that can
* combine different {@link Loss} functions by adding the individual losses together.
*
* <p>For cases where the losses use only a single index of the labels and/or predictions, use the
* {@link IndexLoss}.
*
* <p>For an example of using this loss, see <a
* href="https://github.com/deepjavalibrary/djl/blob/master/examples/src/main/java/ai/djl/examples/training/TrainCaptcha.java">the
* captcha training example.</a>
*/
public class SimpleCompositeLoss extends AbstractCompositeLoss {
/**
* Creates a new empty instance of {@code CompositeLoss} that can combine the given {@link Loss}
* components.
*/
public SimpleCompositeLoss() {
this("CompositeLoss");
}
/**
* Creates a new empty instance of {@code CompositeLoss} that can combine the given {@link Loss}
* components.
*
* @param name the display name of the loss
*/
public SimpleCompositeLoss(String name) {
super(name);
components = new ArrayList<>();
}
/**
* Adds a Loss that applies to all labels and predictions to this composite loss.
*
* @param loss the loss to add
* @return this composite loss
*/
public SimpleCompositeLoss addLoss(Loss loss) {
components.add(loss);
return this;
}
/** {@inheritDoc} */
@Override
protected Pair<NDList, NDList> inputForComponent(
int componentIndex, NDList labels, NDList predictions) {
return new Pair<>(labels, predictions);
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/loss/SingleShotDetectionLoss.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.loss;
import ai.djl.modality.cv.MultiBoxTarget;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.util.Pair;
import java.util.Arrays;
/**
* {@code SingleShotDetectionLoss} is an implementation of {@link Loss}. It is used to compute the
* loss while training a Single Shot Detection (SSD) model for object detection. It involves
* computing the targets given the generated anchors, labels and predictions, and then computing the
* sum of class predictions and bounding box predictions.
*/
public class SingleShotDetectionLoss extends AbstractCompositeLoss {
private MultiBoxTarget multiBoxTarget = MultiBoxTarget.builder().build();
/** Base class for metric with abstract update methods. */
public SingleShotDetectionLoss() {
super("SingleShotDetectionLoss");
components =
Arrays.asList(
Loss.softmaxCrossEntropyLoss("ClassLoss"), Loss.l1Loss("BoundingBoxLoss"));
}
/**
* Calculate loss between label and prediction.
*
* @param labels target labels. Must contain (offsetLabels, masks, classlabels). This is
* returned by MultiBoxTarget function
* @param predictions predicted labels (class prediction, offset prediction)
* @return loss value
*/
@Override
protected Pair<NDList, NDList> inputForComponent(
int componentIndex, NDList labels, NDList predictions) {
NDArray anchors = predictions.get(0);
NDArray classPredictions = predictions.get(1);
NDList targets =
multiBoxTarget.target(
new NDList(anchors, labels.head(), classPredictions.transpose(0, 2, 1)));
switch (componentIndex) {
case 0: // ClassLoss
NDArray classLabels = targets.get(2);
return new Pair<>(new NDList(classLabels), new NDList(classPredictions));
case 1: // BoundingBoxLoss
NDArray boundingBoxPredictions = predictions.get(2);
NDArray boundingBoxLabels = targets.get(0);
NDArray boundingBoxMasks = targets.get(1);
return new Pair<>(
new NDList(boundingBoxLabels.mul(boundingBoxMasks)),
new NDList(boundingBoxPredictions.mul(boundingBoxMasks)));
default:
throw new IllegalArgumentException("Invalid component index");
}
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/loss/SoftmaxCrossEntropyLoss.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.loss;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.index.NDIndex;
/**
* {@code SoftmaxCrossEntropyLoss} is a type of {@link Loss} that calculates the softmax cross
* entropy loss.
*
* <p>If {@code sparse_label} is {@code true} (default), {@code label} should contain integer
* category indicators. Then, \(L = -\sum_i \log p_{i, label_i}\). If {@code sparse_label} is {@code
* false}, {@code label} should be one-hot class coding or probability distribution and its shape
* should be the same as the shape of {@code prediction}. Then, \(L = -\sum_i \sum_j {label}_j \log
* p_{ij}\).
*/
public class SoftmaxCrossEntropyLoss extends Loss {
private float weight;
private int classAxis;
private boolean sparseLabel;
private boolean fromLogit;
/** Creates a new instance of {@code SoftmaxCrossEntropyLoss} with default parameters. */
public SoftmaxCrossEntropyLoss() {
this("SoftmaxCrossEntropyLoss");
}
/**
* Creates a new instance of {@code SoftmaxCrossEntropyLoss} with default parameters.
*
* @param name the name of the loss
*/
public SoftmaxCrossEntropyLoss(String name) {
// By default, fromLogit=true, means it takes the prediction before being
// applied softmax.
this(name, 1, -1, true, true);
}
/**
* Creates a new instance of {@code SoftmaxCrossEntropyLoss} with the given parameters.
*
* @param name the name of the loss
* @param weight the weight to apply on the loss value, default 1
* @param classAxis the axis that represents the class probabilities, default -1
* @param sparseLabel whether labels are rank-1 integer array of [batch_size] (true) or rank-2
* one-hot or probability distribution of shape [batch_size, n-class] (false), default true
* @param fromLogit if true, the inputs are assumed to be the numbers before being applied with
* softmax. Then logSoftmax will be applied to input, default true
*/
public SoftmaxCrossEntropyLoss(
String name, float weight, int classAxis, boolean sparseLabel, boolean fromLogit) {
super(name);
this.weight = weight;
this.classAxis = classAxis;
this.sparseLabel = sparseLabel;
this.fromLogit = fromLogit;
}
/** {@inheritDoc} */
@Override
public NDArray evaluate(NDList label, NDList prediction) {
NDArray pred = prediction.singletonOrThrow();
if (fromLogit) {
pred = pred.logSoftmax(classAxis);
}
NDArray loss;
NDArray lab = label.singletonOrThrow();
if (sparseLabel) {
NDIndex pickIndex =
new NDIndex()
.addAllDim(Math.floorMod(classAxis, pred.getShape().dimension()))
.addPickDim(lab);
loss = pred.get(pickIndex).neg();
} else {
lab = lab.reshape(pred.getShape());
loss = pred.mul(lab).neg().sum(new int[] {classAxis}, true);
}
if (weight != 1) {
loss = loss.mul(weight);
}
return loss.mean();
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/loss/TabNetClassificationLoss.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.loss;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
/**
* Calculates the loss for tabNet in Classification tasks.
*
* <p>Actually, tabNet is not only used for Supervised Learning, it's also widely used in
* unsupervised learning. For unsupervised learning, it should come from the decoder(aka
* attentionTransformer of tabNet)
*/
public final class TabNetClassificationLoss extends Loss {
/** Calculates the loss of a TabNet instance for regression tasks. */
public TabNetClassificationLoss() {
this("TabNetClassificationLoss");
}
/**
* Calculates the loss of a TabNet instance for regression tasks.
*
* @param name the name of the loss function
*/
public TabNetClassificationLoss(String name) {
super(name);
}
/** {@inheritDoc} */
@Override
public NDArray evaluate(NDList labels, NDList predictions) {
return Loss.softmaxCrossEntropyLoss()
.evaluate(labels, new NDList(predictions.get(0)))
.add(predictions.get(1).mean());
}
}
|
0
|
java-sources/ai/djl/api/0.34.0/ai/djl/training
|
java-sources/ai/djl/api/0.34.0/ai/djl/training/loss/TabNetRegressionLoss.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.training.loss;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
/**
* Calculates the loss of tabNet for regression tasks.
*
* <p>Actually, tabNet is not only used for Supervised Learning, it's also widely used in
* unsupervised learning. For unsupervised learning, it should come from the decoder(aka
* attentionTransformer of tabNet)
*/
public class TabNetRegressionLoss extends Loss {
/** Calculates the loss of a TabNet instance for regression tasks. */
public TabNetRegressionLoss() {
this("TabNetRegressionLoss");
}
/**
* Calculates the loss of a TabNet instance for regression tasks.
*
* @param name the name of the loss function
*/
public TabNetRegressionLoss(String name) {
super(name);
}
/** {@inheritDoc} */
@Override
public NDArray evaluate(NDList labels, NDList predictions) {
// sparseLoss is already calculated inside the forward of tabNet
// so here we just need to get it out from prediction
return labels.singletonOrThrow()
.sub(predictions.get(0))
.square()
.mean()
.add(predictions.get(1).mean());
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.