index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/plugins/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains plugin manager implementation of the model server.
*
* @author erik.bamberg@web.de
*/
package ai.djl.serving.plugins;
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/util/ConfigManager.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.util;
import ai.djl.serving.Arguments;
import ai.djl.util.Utils;
import ai.djl.util.cuda.CudaUtils;
import io.netty.handler.ssl.SslContext;
import io.netty.handler.ssl.SslContextBuilder;
import io.netty.handler.ssl.util.SelfSignedCertificate;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.GeneralSecurityException;
import java.security.KeyException;
import java.security.KeyFactory;
import java.security.KeyStore;
import java.security.PrivateKey;
import java.security.cert.Certificate;
import java.security.cert.CertificateFactory;
import java.security.cert.X509Certificate;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collection;
import java.util.Enumeration;
import java.util.List;
import java.util.Properties;
/** A class that hold configuration information. */
public final class ConfigManager {
private static final String DEBUG = "debug";
private static final String INFERENCE_ADDRESS = "inference_address";
private static final String MANAGEMENT_ADDRESS = "management_address";
private static final String LOAD_MODELS = "load_models";
private static final String DEFAULT_WORKERS_PER_MODEL = "default_workers_per_model";
private static final String NUMBER_OF_NETTY_THREADS = "number_of_netty_threads";
private static final String NETTY_CLIENT_THREADS = "netty_client_threads";
private static final String JOB_QUEUE_SIZE = "job_queue_size";
private static final String MAX_IDLE_TIME = "max_idle_time";
private static final String NUMBER_OF_GPU = "number_of_gpu";
private static final String BATCH_SIZE = "batch_size";
private static final String MAX_BATCH_DELAY = "max_batch_delay";
private static final String CORS_ALLOWED_ORIGIN = "cors_allowed_origin";
private static final String CORS_ALLOWED_METHODS = "cors_allowed_methods";
private static final String CORS_ALLOWED_HEADERS = "cors_allowed_headers";
private static final String KEYSTORE = "keystore";
private static final String KEYSTORE_PASS = "keystore_pass";
private static final String KEYSTORE_TYPE = "keystore_type";
private static final String CERTIFICATE_FILE = "certificate_file";
private static final String PRIVATE_KEY_FILE = "private_key_file";
private static final String MAX_REQUEST_SIZE = "max_request_size";
private static final String MODEL_STORE = "model_store";
private static final String MODEL_URL_PATTERN = "model_url_pattern";
private static final String PLUGIN_FOLDER = "plugin_folder";
// Configuration which are not documented or enabled through environment variables
private static final String USE_NATIVE_IO = "use_native_io";
private static final String IO_RATIO = "io_ratio";
private static ConfigManager instance;
private Properties prop;
private ConfigManager(Arguments args) {
prop = new Properties();
Path file = args.getConfigFile();
if (file != null) {
try (InputStream stream = Files.newInputStream(file)) {
prop.load(stream);
} catch (IOException e) {
throw new IllegalArgumentException("Unable to read configuration file", e);
}
prop.put("configFile", file.toString());
}
String modelStore = args.getModelStore();
if (modelStore != null) {
prop.setProperty(MODEL_STORE, modelStore);
}
String[] models = args.getModels();
if (models != null) {
prop.setProperty(LOAD_MODELS, String.join(",", models));
}
int gpus = getIntProperty(NUMBER_OF_GPU, Integer.MAX_VALUE);
gpus = Integer.min(gpus, CudaUtils.getGpuCount());
prop.setProperty(NUMBER_OF_GPU, String.valueOf(gpus));
}
/**
* Initialize the global {@code ConfigManager} instance.
*
* @param args the command line arguments
*/
public static void init(Arguments args) {
instance = new ConfigManager(args);
}
/**
* Returns the singleton {@code ConfigManager} instance.
*
* @return the singleton {@code ConfigManager} instance
*/
public static ConfigManager getInstance() {
return instance;
}
/**
* Returns if debug is enabled.
*
* @return {@code true} if debug is enabled
*/
public boolean isDebug() {
return Boolean.getBoolean("ai.djl.debug")
|| Boolean.parseBoolean(prop.getProperty(DEBUG, "false"));
}
/**
* Returns the models server socket connector.
*
* @param type the type of connector
* @return the {@code Connector}
*/
public Connector getConnector(Connector.ConnectorType type) {
String binding;
if (type == Connector.ConnectorType.MANAGEMENT) {
binding = prop.getProperty(MANAGEMENT_ADDRESS, "http://127.0.0.1:8080");
} else {
binding = prop.getProperty(INFERENCE_ADDRESS, "http://127.0.0.1:8080");
}
return Connector.parse(binding, type);
}
/**
* Returns the configured netty threads.
*
* @return the configured netty threads
*/
public int getNettyThreads() {
return getIntProperty(NUMBER_OF_NETTY_THREADS, 0);
}
/**
* Returns the configured netty client threads.
*
* @return the configured netty client threads
*/
public int getNettyClientThreads() {
return getIntProperty(NETTY_CLIENT_THREADS, 0);
}
/**
* Returns the default job queue size.
*
* @return the default job queue size
*/
public int getJobQueueSize() {
return getIntProperty(JOB_QUEUE_SIZE, 100);
}
/**
* Returns the default max idle time for workers.
*
* @return the default max idle time
*/
public int getMaxIdleTime() {
return getIntProperty(MAX_IDLE_TIME, 60);
}
/**
* Returns the default batchSize for workers.
*
* @return the default max idle time
*/
public int getBatchSize() {
return getIntProperty(BATCH_SIZE, 1);
}
/**
* Returns the default maxBatchDelay for the working queue.
*
* @return the default max batch delay
*/
public int getMaxBatchDelay() {
return getIntProperty(MAX_BATCH_DELAY, 300);
}
/**
* Returns the number of GPUs to be used.
*
* @return the number of GPUs to be used
*/
public int getNumberOfGpu() {
return getIntProperty(NUMBER_OF_GPU, 0);
}
/**
* Returns the default number of workers for a new registered model.
*
* @return the default number of workers for a new registered model
*/
public int getDefaultWorkers() {
if (isDebug()) {
return 1;
}
int workers = getIntProperty(DEFAULT_WORKERS_PER_MODEL, 0);
if (workers == 0) {
workers = getNumberOfGpu();
}
if (workers == 0) {
workers = Runtime.getRuntime().availableProcessors();
}
return workers;
}
/**
* Returns the model server home directory.
*
* @return the model server home directory
*/
public static String getModelServerHome() {
String home = System.getenv("MODEL_SERVER_HOME");
if (home == null) {
home = System.getProperty("MODEL_SERVER_HOME");
if (home == null) {
home = getCanonicalPath(".");
return home;
}
}
Path dir = Paths.get(home);
if (!Files.isDirectory(dir)) {
throw new IllegalArgumentException("Model server home not exist: " + home);
}
home = getCanonicalPath(dir);
return home;
}
/**
* Returns the model store location.
*
* @return the model store location
*/
public Path getModelStore() {
return getPathProperty(MODEL_STORE);
}
/**
* Returns the allowed model url pattern regex.
*
* @return the allowed model url pattern regex
*/
public String getModelUrlPattern() {
return prop.getProperty(MODEL_URL_PATTERN);
}
/**
* Returns the model urls that to be loaded at startup.
*
* @return the model urls that to be loaded at startup
*/
public String getLoadModels() {
return prop.getProperty(LOAD_MODELS);
}
/**
* Returns the CORS allowed origin setting.
*
* @return the CORS allowed origin setting
*/
public String getCorsAllowedOrigin() {
return prop.getProperty(CORS_ALLOWED_ORIGIN);
}
/**
* Returns the CORS allowed method setting.
*
* @return the CORS allowed method setting
*/
public String getCorsAllowedMethods() {
return prop.getProperty(CORS_ALLOWED_METHODS);
}
/**
* Returns the CORS allowed headers setting.
*
* @return the CORS allowed headers setting
*/
public String getCorsAllowedHeaders() {
return prop.getProperty(CORS_ALLOWED_HEADERS);
}
/**
* return the folder where the model search for plugins.
*
* @return the configured plugin folder or the default folder.
*/
public Path getPluginFolder() {
return getPathProperty(PLUGIN_FOLDER, "plugins");
}
/**
* Returns a {@code SSLContext} instance.
*
* @return a {@code SSLContext} instance
* @throws IOException if failed to read certificate file
* @throws GeneralSecurityException if failed to initialize {@code SSLContext}
*/
public SslContext getSslContext() throws IOException, GeneralSecurityException {
List<String> supportedCiphers =
Arrays.asList(
"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA",
"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256");
PrivateKey privateKey;
X509Certificate[] chain;
Path keyStoreFile = getPathProperty(KEYSTORE);
Path privateKeyFile = getPathProperty(PRIVATE_KEY_FILE);
Path certificateFile = getPathProperty(CERTIFICATE_FILE);
if (keyStoreFile != null) {
char[] keystorePass = getProperty(KEYSTORE_PASS, "changeit").toCharArray();
String keystoreType = getProperty(KEYSTORE_TYPE, "PKCS12");
KeyStore keyStore = KeyStore.getInstance(keystoreType);
try (InputStream is = Files.newInputStream(keyStoreFile)) {
keyStore.load(is, keystorePass);
}
Enumeration<String> en = keyStore.aliases();
String keyAlias = null;
while (en.hasMoreElements()) {
String alias = en.nextElement();
if (keyStore.isKeyEntry(alias)) {
keyAlias = alias;
break;
}
}
if (keyAlias == null) {
throw new KeyException("No key entry found in keystore.");
}
privateKey = (PrivateKey) keyStore.getKey(keyAlias, keystorePass);
Certificate[] certs = keyStore.getCertificateChain(keyAlias);
chain = new X509Certificate[certs.length];
for (int i = 0; i < certs.length; ++i) {
chain[i] = (X509Certificate) certs[i];
}
} else if (privateKeyFile != null && certificateFile != null) {
privateKey = loadPrivateKey(privateKeyFile);
chain = loadCertificateChain(certificateFile);
} else {
SelfSignedCertificate ssc = new SelfSignedCertificate();
privateKey = ssc.key();
chain = new X509Certificate[] {ssc.cert()};
}
return SslContextBuilder.forServer(privateKey, chain)
.protocols("TLSv1.2")
.ciphers(supportedCiphers)
.build();
}
/**
* Returns the value with the specified key in this configuration.
*
* @param key the key
* @param def a default value
* @return the value with the specified key in this configuration
*/
public String getProperty(String key, String def) {
return prop.getProperty(key, def);
}
/**
* Prints out this configuration.
*
* @return a string representation of this configuration
*/
public String dumpConfigurations() {
Runtime runtime = Runtime.getRuntime();
return "\nModel server home: "
+ getModelServerHome()
+ "\nCurrent directory: "
+ getCanonicalPath(".")
+ "\nTemp directory: "
+ System.getProperty("java.io.tmpdir")
+ "\nNumber of GPUs: "
+ getNumberOfGpu()
+ "\nNumber of CPUs: "
+ runtime.availableProcessors()
+ "\nMax heap size: "
+ (runtime.maxMemory() / 1024 / 1024)
+ "\nConfig file: "
+ prop.getProperty("configFile", "N/A")
+ "\nInference address: "
+ getConnector(Connector.ConnectorType.INFERENCE)
+ "\nManagement address: "
+ getConnector(Connector.ConnectorType.MANAGEMENT)
+ "\nModel Store: "
+ (getModelStore() == null ? "N/A" : getModelStore())
+ "\nInitial Models: "
+ (getLoadModels() == null ? "N/A" : getLoadModels())
+ "\nNetty threads: "
+ getNettyThreads()
+ "\nNetty client threads: "
+ getNettyClientThreads()
+ "\nDefault workers per model: "
+ getDefaultWorkers()
+ "\nMaximum Request Size: "
+ prop.getProperty(MAX_REQUEST_SIZE, "6553500");
}
/**
* Returns if use netty native IO.
*
* @return {@code true} if use netty native IO
*/
public boolean useNativeIo() {
return Boolean.parseBoolean(prop.getProperty(USE_NATIVE_IO, "true"));
}
/**
* Returns the native IO ratio.
*
* @return the native IO ratio
*/
public int getIoRatio() {
return getIntProperty(IO_RATIO, 50);
}
/**
* Returns the maximum allowed request size in bytes.
*
* @return the maximum allowed request size in bytes
*/
public int getMaxRequestSize() {
return getIntProperty(MAX_REQUEST_SIZE, 6553500);
}
private int getIntProperty(String key, int def) {
String value = prop.getProperty(key);
if (value == null) {
return def;
}
return Integer.parseInt(value);
}
private Path getPathProperty(String key) {
return getPathProperty(key, null);
}
private Path getPathProperty(String key, String defaultValue) {
String property = prop.getProperty(key, defaultValue);
if (property == null) {
return null;
}
Path path = Paths.get(property);
if (!path.isAbsolute()) {
path = Paths.get(getModelServerHome()).resolve(path);
}
return path;
}
private static String getCanonicalPath(Path file) {
try {
return file.toRealPath().toString();
} catch (IOException e) {
return file.toAbsolutePath().toString();
}
}
private static String getCanonicalPath(String path) {
if (path == null) {
return null;
}
return getCanonicalPath(Paths.get(path));
}
private PrivateKey loadPrivateKey(Path keyFile) throws IOException, GeneralSecurityException {
KeyFactory keyFactory = KeyFactory.getInstance("RSA");
try (InputStream is = Files.newInputStream(keyFile)) {
String content = Utils.toString(is);
content = content.replaceAll("-----(BEGIN|END)( RSA)? PRIVATE KEY-----\\s*", "");
byte[] buf = Base64.getMimeDecoder().decode(content);
try {
PKCS8EncodedKeySpec privKeySpec = new PKCS8EncodedKeySpec(buf);
return keyFactory.generatePrivate(privKeySpec);
} catch (InvalidKeySpecException e) {
// old private key is OpenSSL format private key
buf = OpenSslKey.convertPrivateKey(buf);
PKCS8EncodedKeySpec privKeySpec = new PKCS8EncodedKeySpec(buf);
return keyFactory.generatePrivate(privKeySpec);
}
}
}
private X509Certificate[] loadCertificateChain(Path keyFile)
throws IOException, GeneralSecurityException {
CertificateFactory cf = CertificateFactory.getInstance("X.509");
try (InputStream is = Files.newInputStream(keyFile)) {
Collection<? extends Certificate> certs = cf.generateCertificates(is);
int i = 0;
X509Certificate[] chain = new X509Certificate[certs.size()];
for (Certificate cert : certs) {
chain[i++] = (X509Certificate) cert;
}
return chain;
}
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/util/Connector.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.util;
import ai.djl.util.Utils;
import io.netty.channel.Channel;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.ServerChannel;
import io.netty.channel.epoll.Epoll;
import io.netty.channel.epoll.EpollDomainSocketChannel;
import io.netty.channel.epoll.EpollEventLoopGroup;
import io.netty.channel.epoll.EpollServerDomainSocketChannel;
import io.netty.channel.epoll.EpollServerSocketChannel;
import io.netty.channel.epoll.EpollSocketChannel;
import io.netty.channel.kqueue.KQueue;
import io.netty.channel.kqueue.KQueueDomainSocketChannel;
import io.netty.channel.kqueue.KQueueEventLoopGroup;
import io.netty.channel.kqueue.KQueueServerDomainSocketChannel;
import io.netty.channel.kqueue.KQueueServerSocketChannel;
import io.netty.channel.kqueue.KQueueSocketChannel;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.channel.unix.DomainSocketAddress;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.file.Paths;
import java.util.Objects;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/** A class represents model server's socket listener. */
public final class Connector {
private static final Pattern ADDRESS_PATTERN =
Pattern.compile(
"((https|http)://([^:^/]+)(:([0-9]+))?)|(unix:(/.*))",
Pattern.CASE_INSENSITIVE);
private static boolean useNativeIo = ConfigManager.getInstance().useNativeIo();
private boolean uds;
private String socketPath;
private String bindIp;
private int port;
private boolean ssl;
private ConnectorType type;
private Connector(
int port,
boolean uds,
String bindIp,
String socketPath,
boolean ssl,
ConnectorType type) {
this.port = port;
this.uds = uds;
this.bindIp = bindIp;
this.socketPath = socketPath;
this.ssl = ssl;
this.type = type;
}
/**
* Create a {@code Connector} instance based on binding string.
*
* @param binding the binding string
* @param connectorType the type of the connector
* @return a {@code Connector} instance
*/
public static Connector parse(String binding, ConnectorType connectorType) {
Matcher matcher = ADDRESS_PATTERN.matcher(binding);
if (!matcher.matches()) {
throw new IllegalArgumentException("Invalid binding address: " + binding);
}
boolean uds = matcher.group(7) != null;
if (uds) {
if (!useNativeIo) {
throw new IllegalArgumentException(
"unix domain socket requires use_native_io set to true.");
}
String path = matcher.group(7);
return new Connector(-1, true, "", path, false, connectorType);
}
String protocol = matcher.group(2);
String host = matcher.group(3);
String listeningPort = matcher.group(5);
boolean ssl = "https".equalsIgnoreCase(protocol);
int port;
if (listeningPort == null) {
port = ssl ? 443 : 80;
} else {
port = Integer.parseInt(listeningPort);
}
if (port >= 65535) {
throw new IllegalArgumentException("Invalid port number: " + binding);
}
return new Connector(port, false, host, String.valueOf(port), ssl, connectorType);
}
/**
* Returns the socket type.
*
* @return the socket type
*/
public String getSocketType() {
return uds ? "unix" : "tcp";
}
/**
* Returns if the connector is using unix domain socket.
*
* @return {@code true} if the connector is using unix domain socket
*/
public boolean isUds() {
return uds;
}
/**
* Return if the connector requires SSL.
*
* @return {@code true} if the connector requires SSL
*/
public boolean isSsl() {
return ssl;
}
/**
* Returns the unix domain socket path.
*
* @return the unix domain socket path
*/
public String getSocketPath() {
return socketPath;
}
/**
* Returns the TCP socket listening address.
*
* @return the TCP socket listening address
*/
public SocketAddress getSocketAddress() {
return uds ? new DomainSocketAddress(socketPath) : new InetSocketAddress(bindIp, port);
}
/**
* Returns the type of the connector.
*
* @return the type of the connector
*/
public ConnectorType getType() {
return type;
}
/**
* Creates a new netty {@code EventLoopGroup}.
*
* @param threads the number of threads
* @return a new netty {@code EventLoopGroup}
*/
public static EventLoopGroup newEventLoopGroup(int threads) {
if (useNativeIo && Epoll.isAvailable()) {
return new EpollEventLoopGroup(threads);
} else if (useNativeIo && KQueue.isAvailable()) {
return new KQueueEventLoopGroup(threads);
}
NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(threads);
eventLoopGroup.setIoRatio(ConfigManager.getInstance().getIoRatio());
return eventLoopGroup;
}
/**
* Returns the server channel class.
*
* @return the server channel class
*/
public Class<? extends ServerChannel> getServerChannel() {
if (useNativeIo && Epoll.isAvailable()) {
return uds ? EpollServerDomainSocketChannel.class : EpollServerSocketChannel.class;
} else if (useNativeIo && KQueue.isAvailable()) {
return uds ? KQueueServerDomainSocketChannel.class : KQueueServerSocketChannel.class;
}
return NioServerSocketChannel.class;
}
/**
* Returns the client channel class.
*
* @return the client channel class
*/
public Class<? extends Channel> getClientChannel() {
if (useNativeIo && Epoll.isAvailable()) {
return uds ? EpollDomainSocketChannel.class : EpollSocketChannel.class;
} else if (useNativeIo && KQueue.isAvailable()) {
return uds ? KQueueDomainSocketChannel.class : KQueueSocketChannel.class;
}
return NioSocketChannel.class;
}
/** Cleans up the left over resources. */
public void clean() {
if (uds) {
Utils.deleteQuietly(Paths.get(socketPath));
}
}
/** {@inheritDoc} */
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Connector connector = (Connector) o;
return uds == connector.uds
&& port == connector.port
&& socketPath.equals(connector.socketPath)
&& bindIp.equals(connector.bindIp);
}
/** {@inheritDoc} */
@Override
public int hashCode() {
return Objects.hash(uds, socketPath, bindIp, port);
}
/** {@inheritDoc} */
@Override
public String toString() {
if (uds) {
return "unix:" + socketPath;
} else if (ssl) {
return "https://" + bindIp + ':' + port;
}
return "http://" + bindIp + ':' + port;
}
/** An enum represents type of connector. */
public enum ConnectorType {
INFERENCE,
MANAGEMENT,
BOTH
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/util/NettyUtils.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.util;
import ai.djl.ModelException;
import ai.djl.modality.Input;
import ai.djl.repository.zoo.ModelNotFoundException;
import ai.djl.serving.http.BadRequestException;
import ai.djl.serving.http.ErrorResponse;
import ai.djl.serving.http.MethodNotAllowedException;
import ai.djl.serving.http.ResourceNotFoundException;
import ai.djl.serving.http.ServiceUnavailableException;
import ai.djl.serving.http.Session;
import ai.djl.util.JsonSerializable;
import ai.djl.util.JsonUtils;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.DefaultFullHttpResponse;
import io.netty.handler.codec.http.FullHttpResponse;
import io.netty.handler.codec.http.HttpHeaderNames;
import io.netty.handler.codec.http.HttpHeaderValues;
import io.netty.handler.codec.http.HttpHeaders;
import io.netty.handler.codec.http.HttpRequest;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.codec.http.HttpUtil;
import io.netty.handler.codec.http.HttpVersion;
import io.netty.handler.codec.http.QueryStringDecoder;
import io.netty.handler.codec.http.multipart.Attribute;
import io.netty.handler.codec.http.multipart.FileUpload;
import io.netty.handler.codec.http.multipart.InterfaceHttpData;
import io.netty.util.AttributeKey;
import io.netty.util.CharsetUtil;
import java.io.IOException;
import java.net.SocketAddress;
import java.nio.charset.StandardCharsets;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** A utility class that handling Netty request and response. */
public final class NettyUtils {
private static final Logger logger = LoggerFactory.getLogger("ACCESS_LOG");
private static final String REQUEST_ID = "x-request-id";
private static final AttributeKey<Session> SESSION_KEY = AttributeKey.valueOf("session");
private NettyUtils() {}
/**
* Updates session when a HTTP request is received.
*
* @param channel the connection channel
* @param request the HTTP request
*/
public static void requestReceived(Channel channel, HttpRequest request) {
Session session = channel.attr(SESSION_KEY).get();
assert session == null;
SocketAddress address = channel.remoteAddress();
String remoteIp;
if (address == null) {
// This is can be null on UDS, or on certain case in Windows
remoteIp = "0.0.0.0";
} else {
remoteIp = address.toString();
}
channel.attr(SESSION_KEY).set(new Session(remoteIp, request));
}
/**
* Returns the request ID for the specified channel.
*
* @param channel the connection channel
* @return the request ID for the specified channel
*/
public static String getRequestId(Channel channel) {
Session accessLog = channel.attr(SESSION_KEY).get();
if (accessLog != null) {
return accessLog.getRequestId();
}
return null;
}
/**
* Sends the json object to client.
*
* @param ctx the connection context
* @param obj the object to be sent
*/
public static void sendJsonResponse(ChannelHandlerContext ctx, Object obj) {
sendJsonResponse(ctx, obj, HttpResponseStatus.OK);
}
/**
* Sends the json string to client with specified status.
*
* @param ctx the connection context
* @param obj the object to be sent
* @param status the HTTP status
*/
public static void sendJsonResponse(
ChannelHandlerContext ctx, Object obj, HttpResponseStatus status) {
String content;
if (obj instanceof JsonSerializable) {
content = ((JsonSerializable) obj).toJson();
} else {
content = JsonUtils.GSON_PRETTY.toJson(obj);
}
sendJsonResponse(ctx, content, status);
}
/**
* Sends the json string to client.
*
* @param ctx the connection context
* @param json the json string
*/
public static void sendJsonResponse(ChannelHandlerContext ctx, String json) {
sendJsonResponse(ctx, json, HttpResponseStatus.OK);
}
/**
* Sends the json object to client with specified status.
*
* @param ctx the connection context
* @param json the json object
* @param status the HTTP status
*/
public static void sendJsonResponse(
ChannelHandlerContext ctx, String json, HttpResponseStatus status) {
FullHttpResponse resp = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, status, false);
resp.headers().set(HttpHeaderNames.CONTENT_TYPE, HttpHeaderValues.APPLICATION_JSON);
ByteBuf content = resp.content();
content.writeCharSequence(json, CharsetUtil.UTF_8);
content.writeByte('\n');
sendHttpResponse(ctx, resp, true);
}
/**
* Sends error to client with exception.
*
* @param ctx the connection context
* @param t the exception to be send
*/
public static void sendError(ChannelHandlerContext ctx, Throwable t) {
if (t instanceof ResourceNotFoundException || t instanceof ModelNotFoundException) {
logger.trace("", t);
NettyUtils.sendError(ctx, HttpResponseStatus.NOT_FOUND, t);
} else if (t instanceof BadRequestException || t instanceof ModelException) {
logger.trace("", t);
NettyUtils.sendError(ctx, HttpResponseStatus.BAD_REQUEST, t);
} else if (t instanceof MethodNotAllowedException) {
logger.trace("", t);
NettyUtils.sendError(ctx, HttpResponseStatus.METHOD_NOT_ALLOWED, t);
} else if (t instanceof ServiceUnavailableException) {
logger.trace("", t);
NettyUtils.sendError(ctx, HttpResponseStatus.SERVICE_UNAVAILABLE, t);
} else {
logger.error("", t);
NettyUtils.sendError(ctx, HttpResponseStatus.INTERNAL_SERVER_ERROR, t);
}
}
/**
* Sends error to client with HTTP status and exception.
*
* @param ctx the connection context
* @param status the HTTP status
* @param t the exception to be send
*/
public static void sendError(
ChannelHandlerContext ctx, HttpResponseStatus status, Throwable t) {
ErrorResponse error =
new ErrorResponse(status.code(), t.getClass().getSimpleName(), t.getMessage());
sendJsonResponse(ctx, error, status);
}
/**
* Send HTTP response to client.
*
* @param ctx ChannelHandlerContext
* @param resp HttpResponse to send
* @param keepAlive if keep the connection
*/
public static void sendHttpResponse(
ChannelHandlerContext ctx, FullHttpResponse resp, boolean keepAlive) {
// Send the response and close the connection if necessary.
Channel channel = ctx.channel();
Session session = channel.attr(SESSION_KEY).getAndSet(null);
HttpHeaders headers = resp.headers();
ConfigManager configManager = ConfigManager.getInstance();
int code = resp.status().code();
if (session != null) {
// session might be recycled if channel is closed already.
session.setCode(code);
headers.set(REQUEST_ID, session.getRequestId());
logger.info(session.toString());
} else {
logger.info("HTTP " + code);
}
String allowedOrigin = configManager.getCorsAllowedOrigin();
String allowedMethods = configManager.getCorsAllowedMethods();
String allowedHeaders = configManager.getCorsAllowedHeaders();
if (allowedOrigin != null
&& !allowedOrigin.isEmpty()
&& !headers.contains(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN)) {
headers.set(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN, allowedOrigin);
}
if (allowedMethods != null
&& !allowedMethods.isEmpty()
&& !headers.contains(HttpHeaderNames.ACCESS_CONTROL_ALLOW_METHODS)) {
headers.set(HttpHeaderNames.ACCESS_CONTROL_ALLOW_METHODS, allowedMethods);
}
if (allowedHeaders != null
&& !allowedHeaders.isEmpty()
&& !headers.contains(HttpHeaderNames.ACCESS_CONTROL_ALLOW_HEADERS)) {
headers.set(HttpHeaderNames.ACCESS_CONTROL_ALLOW_HEADERS, allowedHeaders);
}
// Add cache-control headers to avoid browser cache response
headers.set("Pragma", "no-cache");
headers.set("Cache-Control", "no-cache; no-store, must-revalidate, private");
headers.set("Expires", "Thu, 01 Jan 1970 00:00:00 UTC");
HttpUtil.setContentLength(resp, resp.content().readableBytes());
if (!keepAlive || code >= 400) {
headers.set(HttpHeaderNames.CONNECTION, HttpHeaderValues.CLOSE);
ChannelFuture f = channel.writeAndFlush(resp);
f.addListener(ChannelFutureListener.CLOSE);
} else {
headers.set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE);
channel.writeAndFlush(resp);
}
}
/**
* Closes the specified channel after all queued write requests are flushed.
*
* @param ch the channel to be closed
*/
public static void closeOnFlush(Channel ch) {
if (ch.isActive()) {
ch.writeAndFlush(Unpooled.EMPTY_BUFFER).addListener(ChannelFutureListener.CLOSE);
}
}
/**
* Returns the bytes for the specified {@code ByteBuf}.
*
* @param buf the {@code ByteBuf} to read
* @return the bytes for the specified {@code ByteBuf}
*/
public static byte[] getBytes(ByteBuf buf) {
if (buf.hasArray()) {
return buf.array();
}
byte[] ret = new byte[buf.readableBytes()];
int readerIndex = buf.readerIndex();
buf.getBytes(readerIndex, ret);
return ret;
}
/**
* Reads the parameter's value for the key from the uri.
*
* @param decoder the {@code QueryStringDecoder} parsed from uri
* @param key the parameter key
* @param def the default value
* @return the parameter's value
*/
public static String getParameter(QueryStringDecoder decoder, String key, String def) {
List<String> param = decoder.parameters().get(key);
if (param != null && !param.isEmpty()) {
return param.get(0);
}
return def;
}
/**
* Read the parameter's integer value for the key from the uri.
*
* @param decoder the {@code QueryStringDecoder} parsed from uri
* @param key the parameter key
* @param def the default value
* @return the parameter's integer value
* @throws NumberFormatException exception is thrown when the parameter-value is not numeric.
*/
public static int getIntParameter(QueryStringDecoder decoder, String key, int def) {
String value = getParameter(decoder, key, null);
if (value == null || value.isEmpty()) {
return def;
}
return Integer.parseInt(value);
}
/**
* Parses form data and added to the {@link Input} object.
*
* @param data the form data
* @param input the {@link Input} object to be added to
*/
public static void addFormData(InterfaceHttpData data, Input input) {
if (data == null) {
return;
}
try {
String name = data.getName();
switch (data.getHttpDataType()) {
case Attribute:
Attribute attribute = (Attribute) data;
input.addData(name, attribute.getValue().getBytes(StandardCharsets.UTF_8));
break;
case FileUpload:
FileUpload fileUpload = (FileUpload) data;
input.addData(name, getBytes(fileUpload.getByteBuf()));
break;
default:
throw new IllegalArgumentException(
"Except form field, but got " + data.getHttpDataType());
}
} catch (IOException e) {
throw new AssertionError(e);
}
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/util/OpenSslKey.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.util;
/** A utility class converting OpenSSL private key to PKCS8 private key. */
public final class OpenSslKey {
private static final int[] RSA_ENCRYPTION = {1, 2, 840, 113549, 1, 1, 1};
private static final byte[] NULL_BYTES = {0x05, 0x00};
private OpenSslKey() {}
/**
* Convert OpenSSL private key to PKCS8 private key.
*
* @param keySpec OpenSSL key spec
* @return PKCS8 encoded private key
*/
public static byte[] convertPrivateKey(byte[] keySpec) {
if (keySpec == null) {
return null;
}
byte[] bytes = new byte[keySpec.length];
System.arraycopy(keySpec, 0, bytes, 0, keySpec.length);
byte[] octetBytes = encodeOctetString(bytes);
byte[] oidBytes = encodeOID(RSA_ENCRYPTION);
byte[] verBytes = {0x02, 0x01, 0x00};
byte[][] seqBytes = new byte[4][];
seqBytes[0] = oidBytes;
seqBytes[1] = NULL_BYTES;
seqBytes[2] = null;
byte[] oidSeqBytes = encodeSequence(seqBytes);
seqBytes[0] = verBytes;
seqBytes[1] = oidSeqBytes;
seqBytes[2] = octetBytes;
seqBytes[3] = null;
return encodeSequence(seqBytes);
}
private static byte[] encodeOID(int[] oid) {
if (oid == null) {
return null;
}
int oLen = 1;
for (int i = 2; i < oid.length; i++) {
oLen += getOIDCompLength(oid[i]);
}
int len = oLen + getLengthOfLengthField(oLen) + 1;
byte[] bytes = new byte[len];
bytes[0] = 0x06; // ASN Object ID
int offset = writeLengthField(bytes, oLen);
bytes[offset++] = (byte) (40 * oid[0] + oid[1]);
for (int i = 2; i < oid.length; i++) {
offset = writeOIDComp(oid[i], bytes, offset);
}
return bytes;
}
private static byte[] encodeOctetString(byte[] bytes) {
if (bytes == null) {
return null;
}
int oLen = bytes.length; // one byte for unused bits field
int len = oLen + getLengthOfLengthField(oLen) + 1;
byte[] newBytes = new byte[len];
newBytes[0] = 0x04;
int offset = writeLengthField(newBytes, oLen);
if (len - oLen != offset) {
return null;
}
System.arraycopy(bytes, 0, newBytes, offset, oLen);
return newBytes;
}
private static byte[] encodeSequence(byte[][] byteArrays) {
if (byteArrays == null) {
return null;
}
int oLen = 0;
for (byte[] b : byteArrays) {
if (b == null) {
break;
}
oLen += b.length;
}
int len = oLen + getLengthOfLengthField(oLen) + 1;
byte[] bytes = new byte[len];
bytes[0] = 0x10 | 0x20; // ASN sequence & constructed
int offset = writeLengthField(bytes, oLen);
if (len - oLen != offset) {
return null;
}
for (byte[] b : byteArrays) {
if (b == null) {
break;
}
System.arraycopy(b, 0, bytes, offset, b.length);
offset += b.length;
}
return bytes;
}
private static int writeLengthField(byte[] bytes, int len) {
if (len < 127) {
bytes[1] = (byte) len;
return 2;
}
int lenOfLenField = getLengthOfLengthField(len);
bytes[1] = (byte) ((lenOfLenField - 1) | 0x80); // record length of the length field
for (int i = lenOfLenField; i >= 2; i--) { // write the length
bytes[i] = (byte) (len >> ((lenOfLenField - i) * 8));
}
return lenOfLenField + 1;
}
private static int getLengthOfLengthField(int len) {
if (len <= 127) { // highest bit is zero, one byte is enough
return 1;
} else if (len <= 0xFF) { // highest bit is 1, two bytes in the form {0x81, 0xab}
return 2;
} else if (len <= 0xFFFF) { // three bytes in the form {0x82, 0xab, 0xcd}
return 3;
} else if (len <= 0xFFFFFF) { // four bytes in the form {0x83, 0xab, 0xcd, 0xef}
return 4;
} else { // five bytes in the form {0x84, 0xab, 0xcd, 0xef, 0xgh}
return 5;
}
}
private static int getOIDCompLength(int comp) {
if (comp <= 0x7F) {
return 1;
} else if (comp <= 0x3FFF) {
return 2;
} else if (comp <= 0x1FFFFF) {
return 3;
} else if (comp <= 0xFFFFFFF) {
return 4;
} else {
return 5;
}
}
private static int writeOIDComp(int comp, byte[] bytes, int offset) {
int len = getOIDCompLength(comp);
int off = offset;
for (int i = len - 1; i > 0; i--) {
bytes[off++] = (byte) ((comp >>> i * 7) | 0x80);
}
bytes[off++] = (byte) (comp & 0x7F);
return off;
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/util/ServerGroups.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.util;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.group.ChannelGroup;
import io.netty.channel.group.ChannelGroupFuture;
import io.netty.channel.group.DefaultChannelGroup;
import io.netty.util.concurrent.GlobalEventExecutor;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** A class that holds all opened {@code ChannelGroup} and {@code EventLoopGroup}. */
public class ServerGroups {
static final Logger logger = LoggerFactory.getLogger(ServerGroups.class);
private ChannelGroup allChannels;
private EventLoopGroup serverGroup;
private EventLoopGroup childGroup;
private ConfigManager configManager;
/**
* Constructs a new {@code ServerGroups} instance.
*
* @param configManager the configuration
*/
public ServerGroups(ConfigManager configManager) {
this.configManager = configManager;
reset();
}
/** Resets the {@code ServerGroups}. */
public final void reset() {
allChannels = new DefaultChannelGroup(GlobalEventExecutor.INSTANCE);
serverGroup = Connector.newEventLoopGroup(2);
childGroup = Connector.newEventLoopGroup(configManager.getNettyThreads());
}
/**
* Shutdowns all opened channels and event loops.
*
* @param graceful {@code ture} for gracefully clean up the resources
*/
public void shutdown(boolean graceful) {
closeAllChannels(graceful);
List<EventLoopGroup> allEventLoopGroups = new ArrayList<>();
allEventLoopGroups.add(serverGroup);
allEventLoopGroups.add(childGroup);
for (EventLoopGroup group : allEventLoopGroups) {
if (graceful) {
group.shutdownGracefully();
} else {
group.shutdownGracefully(0, 0, TimeUnit.SECONDS);
}
}
if (graceful) {
for (EventLoopGroup group : allEventLoopGroups) {
try {
group.awaitTermination(60, TimeUnit.SECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
}
/**
* Returns the server event loop group.
*
* @return the server event loop group
*/
public EventLoopGroup getServerGroup() {
return serverGroup;
}
/**
* Returns the client event loop group.
*
* @return the client event loop group
*/
public EventLoopGroup getChildGroup() {
return childGroup;
}
/**
* Registers a channel to be tracked.
*
* @param channel a channel to be tracked
*/
public void registerChannel(Channel channel) {
allChannels.add(channel);
}
private void closeAllChannels(boolean graceful) {
ChannelGroupFuture future = allChannels.close();
// if this is a graceful shutdown, log any channel closing failures. if this isn't a
// graceful shutdown, ignore them.
if (graceful) {
try {
future.await(10, TimeUnit.SECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
if (!future.isSuccess()) {
for (ChannelFuture cf : future) {
if (!cf.isSuccess()) {
logger.info("Unable to close channel: " + cf.channel(), cf.cause());
}
}
}
}
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/util/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains utilities used by model server. */
package ai.djl.serving.util;
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/wlm/BatchAggregator.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import io.netty.handler.codec.http.HttpResponseStatus;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.LinkedBlockingDeque;
/**
* abstract class for all BatchAggregators. A batch aggregator check working queue and combines
* multiple job into one batch. batches of jobs are used cause optimisations in separate engines.
*
* @author erik.bamberg@web.de
*/
abstract class BatchAggregator {
protected int batchSize;
protected List<Job> jobs;
protected LinkedBlockingDeque<Job> jobQueue;
/**
* Constructs a new {@code BbatchAggregator} instance.
*
* @param model the model to use.
* @param jobQueue the job queue for polling data from.
*/
public BatchAggregator(ModelInfo model, LinkedBlockingDeque<Job> jobQueue) {
this.batchSize = model.getBatchSize();
this.jobQueue = jobQueue;
jobs = new ArrayList<>();
}
/**
* Poll the queue and return a list of Input Objects for the model.
*
* @return list of input objects to pass to the model.
* @throws InterruptedException if thread gets interrupted while waiting for new data in the
* queue.
*/
public List<Input> getRequest() throws InterruptedException {
jobs = pollBatch();
List<Input> list = new ArrayList<>(jobs.size());
for (Job job : jobs) {
job.setScheduled();
list.add(job.getInput());
}
return list;
}
/**
* Sends to response to all waiting clients.
*
* @param outputs list of model-outputs in same order as the input objects.
*/
public void sendResponse(List<Output> outputs) {
if (jobs.size() != outputs.size()) {
throw new IllegalStateException("Not all jobs get response.");
}
int i = 0;
for (Output output : outputs) {
String requestId = output.getRequestId();
Job job = jobs.get(i++);
if (!job.getRequestId().equals(requestId)) {
throw new IllegalStateException("Request response mismatched.");
}
job.sendOutput(output);
}
jobs.clear();
}
/** Sends an internal server error. */
public void sendError() {
for (Job job : jobs) {
job.sendError(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Internal server error");
}
jobs.clear();
}
/**
* Fills in the list with a batch of jobs.
*
* @return a list of jobs read by this batch interation.
* @throws InterruptedException if interrupted
*/
protected abstract List<Job> pollBatch() throws InterruptedException;
/**
* Checks if this {@code BatchAggregator} and the thread can be shutdown or if this aggregator
* waits for more data.
*
* @return true if we can shutdown the thread. for example when max idle time exceeded in
* temporary batch aggregator.
*/
public abstract boolean isFinished();
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/wlm/Endpoint.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
/** A class that represents a webservice endpoint. */
public class Endpoint {
private List<ModelInfo> models;
private Map<String, Integer> map;
private AtomicInteger position;
/** Constructs an {@code Endpoint} instance. */
public Endpoint() {
models = new ArrayList<>();
map = new ConcurrentHashMap<>();
position = new AtomicInteger(0);
}
/**
* Adds a model to the entpoint.
*
* @param modelInfo the model to be added
* @return true if add success
*/
public synchronized boolean add(ModelInfo modelInfo) {
String version = modelInfo.getVersion();
if (version == null) {
if (models.isEmpty()) {
map.put("default", 0);
return models.add(modelInfo);
}
return false;
}
if (map.containsKey(version)) {
return false;
}
map.put(version, models.size());
return models.add(modelInfo);
}
/**
* Returns the models associated with the endpoint.
*
* @return the models associated with the endpoint
*/
public List<ModelInfo> getModels() {
return models;
}
/**
* Removes a model version from the {@code Endpoint}.
*
* @param version the model version
* @return null if the specified version doesn't exist
*/
public synchronized ModelInfo remove(String version) {
if (version == null) {
if (models.isEmpty()) {
return null;
}
ModelInfo model = models.remove(0);
reIndex();
return model;
}
Integer index = map.remove(version);
if (index == null) {
return null;
}
ModelInfo model = models.remove((int) index);
reIndex();
return model;
}
/**
* Returns the {@code ModelInfo} for the specified version.
*
* @param version the version of the model to retrieve
* @return the {@code ModelInfo} for the specified version
*/
public ModelInfo get(String version) {
Integer index = map.get(version);
if (index == null) {
return null;
}
return models.get(index);
}
/**
* Returns the next version of model to serve the inference request.
*
* @return the next version of model to serve the inference request
*/
public ModelInfo next() {
int size = models.size();
if (size == 1) {
return models.get(0);
}
int index = position.getAndUpdate(operand -> (operand + 1) % size);
return models.get(index);
}
private void reIndex() {
map.clear();
int size = models.size();
for (int i = 0; i < size; ++i) {
ModelInfo modelInfo = models.get(i);
String version = modelInfo.getVersion();
if (version != null) {
map.put(version, i);
}
}
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/wlm/Job.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.serving.http.InternalServerException;
import ai.djl.serving.util.NettyUtils;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.DefaultFullHttpResponse;
import io.netty.handler.codec.http.FullHttpResponse;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.codec.http.HttpVersion;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** A class represents an inference job. */
public class Job {
private static final Logger logger = LoggerFactory.getLogger(Job.class);
private ChannelHandlerContext ctx;
private ModelInfo modelInfo;
private Input input;
private long begin;
private long scheduled;
/**
* Constructs an new {@code Job} instance.
*
* @param ctx the {@code ChannelHandlerContext}
* @param modelInfo the model to run the job
* @param input the input data
*/
public Job(ChannelHandlerContext ctx, ModelInfo modelInfo, Input input) {
this.ctx = ctx;
this.modelInfo = modelInfo;
this.input = input;
begin = System.currentTimeMillis();
scheduled = begin;
}
/**
* Returns the request id.
*
* @return the request id
*/
public String getRequestId() {
return input.getRequestId();
}
/**
* Returns the model that associated with this job.
*
* @return the model that associated with this job
*/
public ModelInfo getModel() {
return modelInfo;
}
/**
* Returns the input data.
*
* @return the input data
*/
public Input getInput() {
return input;
}
/** Marks the job has been scheduled. */
public void setScheduled() {
scheduled = System.currentTimeMillis();
}
/**
* Sends the response back to the client.
*
* @param output the output
*/
public void sendOutput(Output output) {
FullHttpResponse resp =
new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK, false);
for (Map.Entry<String, String> entry : output.getProperties().entrySet()) {
resp.headers().set(entry.getKey(), entry.getValue());
}
resp.content().writeBytes(output.getContent());
/*
* We can load the models based on the configuration file.Since this Job is
* not driven by the external connections, we could have a empty context for
* this job. We shouldn't try to send a response to ctx if this is not triggered
* by external clients.
*/
if (ctx != null) {
NettyUtils.sendHttpResponse(ctx, resp, true);
}
logger.debug(
"Waiting time: {}, Backend time: {}",
scheduled - begin,
System.currentTimeMillis() - scheduled);
}
/**
* Sends error to the client.
*
* @param status the HTTP status
* @param error the error message
*/
public void sendError(HttpResponseStatus status, String error) {
/*
* We can load the models based on the configuration file.Since this Job is
* not driven by the external connections, we could have a empty context for
* this job. We shouldn't try to send a response to ctx if this is not triggered
* by external clients.
*/
if (ctx != null) {
NettyUtils.sendError(ctx, status, new InternalServerException(error));
}
logger.debug(
"Waiting time: {}, Inference time: {}",
scheduled - begin,
System.currentTimeMillis() - begin);
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/wlm/ModelInfo.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.repository.FilenameUtils;
import ai.djl.repository.zoo.ZooModel;
import java.net.URI;
import java.nio.file.Path;
import java.util.Objects;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** A class represent a loaded model and it's metadata. */
public final class ModelInfo implements AutoCloseable {
private static final Logger logger = LoggerFactory.getLogger(ModelInfo.class);
private String modelName;
private String version;
private String modelUrl;
private int minWorkers;
private int maxWorkers;
private int queueSize;
private int batchSize;
private int maxBatchDelay;
private int maxIdleTime;
private ZooModel<Input, Output> model;
/**
* Constructs a new {@code ModelInfo} instance.
*
* @param modelName the name of the model that will be used as HTTP endpoint
* @param version the version of the model
* @param modelUrl the model url
* @param model the {@link ZooModel}
* @param queueSize the maximum request queue size
* @param maxIdleTime the initial maximum idle time for workers.
* @param maxBatchDelay the initial maximum delay when scaling up before giving up.
* @param batchSize the batch size for this model.
*/
public ModelInfo(
String modelName,
String version,
String modelUrl,
ZooModel<Input, Output> model,
int queueSize,
int maxIdleTime,
int maxBatchDelay,
int batchSize) {
this.modelName = modelName;
this.version = version;
this.modelUrl = modelUrl;
this.model = model;
this.maxBatchDelay = maxBatchDelay;
this.maxIdleTime = maxIdleTime; // default max idle time 60s
this.queueSize = queueSize;
this.batchSize = batchSize;
}
/**
* Sets a new batchSize and returns a new configured ModelInfo object. You have to
* triggerUpdates in the {@code ModelManager} using this new model.
*
* @param batchSize the batchSize to set
* @return new configured ModelInfo.
*/
public ModelInfo configureModelBatch(int batchSize) {
this.batchSize = batchSize;
return this;
}
/**
* Sets new workers capcities for this model and returns a new configured ModelInfo object. You
* have to triggerUpdates in the {@code ModelManager} using this new model.
*
* @param minWorkers minimum amount of workers.
* @param maxWorkers maximum amount of workers.
* @return new configured ModelInfo.
*/
public ModelInfo scaleWorkers(int minWorkers, int maxWorkers) {
this.minWorkers = minWorkers;
this.maxWorkers = maxWorkers;
return this;
}
/**
* Sets new configuration for the workerPool backing this model and returns a new configured
* ModelInfo object. You have to triggerUpdates in the {@code ModelManager} using this new
* model.
*
* @param maxIdleTime time a WorkerThread can be idle before scaling down this worker.
* @param maxBatchDelay maximum time to wait for a free space in worker queue after scaling up
* workers before giving up to offer the job to the queue.
* @return new configured ModelInfo.
*/
public ModelInfo configurePool(int maxIdleTime, int maxBatchDelay) {
this.maxIdleTime = maxIdleTime;
this.maxBatchDelay = maxBatchDelay;
return this;
}
/**
* Returns the loaded {@link ZooModel}.
*
* @return the loaded {@link ZooModel}
*/
public ZooModel<Input, Output> getModel() {
return model;
}
/**
* Returns the model name.
*
* @return the model name
*/
public String getModelName() {
return modelName;
}
/**
* Returns the model version.
*
* @return the model version
*/
public String getVersion() {
return version;
}
/**
* Returns the model url.
*
* @return the model url
*/
public String getModelUrl() {
return modelUrl;
}
/**
* Returns the model cache directory.
*
* @return the model cache directory
*/
public Path getModelDir() {
return model.getModelPath();
}
/**
* returns the configured maxIdleTime of workers.
*
* @return the maxIdleTime
*/
public int getMaxIdleTime() {
return maxIdleTime;
}
/**
* Returns the configured minimum number of workers.
*
* @return the configured minimum number of workers
*/
public int getMinWorkers() {
return minWorkers;
}
/**
* Returns the configured maximum number of workers.
*
* @return the configured maximum number of workers
*/
public int getMaxWorkers() {
return maxWorkers;
}
/**
* Returns the configured batch size.
*
* @return the configured batch size
*/
public int getBatchSize() {
return batchSize;
}
/**
* Returns the maximum delay in milliseconds to aggregate a batch.
*
* @return the maximum delay in milliseconds to aggregate a batch
*/
public int getMaxBatchDelay() {
return maxBatchDelay;
}
/**
* Returns the configured size of the workers queue.
*
* @return requested size of the workers queue.
*/
public int getQueueSize() {
return queueSize;
}
/** {@inheritDoc} */
@Override
public void close() {
if (model != null) {
logger.debug("closing model {}", modelName);
model.close();
}
}
/**
* Infer model name form model URL in case model name is not provided.
*
* @param url the model URL
* @return the model name
*/
public static String inferModelNameFromUrl(String url) {
URI uri = URI.create(url);
String path = uri.getPath();
boolean isDirectory = path.endsWith("/");
if (isDirectory) {
path = path.substring(0, path.length() - 1);
}
int pos = path.lastIndexOf('/');
String modelName;
if (pos >= 0) {
modelName = path.substring(pos + 1);
} else {
modelName = path;
}
if (!isDirectory) {
modelName = FilenameUtils.getNamePart(modelName);
}
modelName = modelName.replaceAll("(\\W|^_)", "_");
return modelName;
}
/** {@inheritDoc} */
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof ModelInfo)) {
return false;
}
ModelInfo modelInfo = (ModelInfo) o;
return modelName.equals(modelInfo.modelName) && Objects.equals(version, modelInfo.version);
}
/** {@inheritDoc} */
@Override
public int hashCode() {
return Objects.hash(modelName, version);
}
/** {@inheritDoc} */
@Override
public String toString() {
if (version != null) {
return modelName + ':' + version;
}
return modelName;
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/wlm/ModelManager.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.Device;
import ai.djl.ModelException;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.repository.zoo.Criteria;
import ai.djl.repository.zoo.ModelNotFoundException;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.serving.http.BadRequestException;
import ai.djl.serving.http.DescribeModelResponse;
import ai.djl.serving.util.ConfigManager;
import java.io.IOException;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ConcurrentHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** A class that in charge of managing models. */
public final class ModelManager {
private static final Logger logger = LoggerFactory.getLogger(ModelManager.class);
private static ModelManager modelManager;
private ConfigManager configManager;
private WorkLoadManager wlm;
private Map<String, Endpoint> endpoints;
private Set<String> startupModels;
private ModelManager(ConfigManager configManager) {
this.configManager = configManager;
wlm = new WorkLoadManager();
endpoints = new ConcurrentHashMap<>();
startupModels = new HashSet<>();
}
/**
* Initialized the global {@code ModelManager} instance.
*
* @param configManager the configuration
*/
public static void init(ConfigManager configManager) {
modelManager = new ModelManager(configManager);
}
/**
* Returns the singleton {@code ModelManager} instance.
*
* @return the singleton {@code ModelManager} instance
*/
public static ModelManager getInstance() {
return modelManager;
}
/**
* Registers and loads a model.
*
* @param modelName the name of the model for HTTP endpoint
* @param version the model version
* @param modelUrl the model url
* @param engineName the engine to load the model
* @param gpuId the GPU device id, -1 for auto selection
* @param batchSize the batch size
* @param maxBatchDelay the maximum delay for batching
* @param maxIdleTime the maximum idle time of the worker threads before scaling down.
* @return a {@code CompletableFuture} instance
*/
public CompletableFuture<ModelInfo> registerModel(
final String modelName,
final String version,
final String modelUrl,
final String engineName,
final int gpuId,
final int batchSize,
final int maxBatchDelay,
final int maxIdleTime) {
return CompletableFuture.supplyAsync(
() -> {
try {
Criteria.Builder<Input, Output> builder =
Criteria.builder()
.setTypes(Input.class, Output.class)
.optModelUrls(modelUrl)
.optEngine(engineName);
if (gpuId != -1) {
builder.optDevice(Device.gpu(gpuId));
}
ZooModel<Input, Output> model = builder.build().loadModel();
ModelInfo modelInfo =
new ModelInfo(
modelName,
version,
modelUrl,
model,
configManager.getJobQueueSize(),
maxIdleTime,
maxBatchDelay,
batchSize);
Endpoint endpoint =
endpoints.computeIfAbsent(modelName, k -> new Endpoint());
if (!endpoint.add(modelInfo)) {
// model already exists
model.close();
throw new BadRequestException(
"Model " + modelInfo + " is already registered.");
}
logger.info("Model {} loaded.", modelName);
return modelInfo;
} catch (ModelException | IOException e) {
throw new CompletionException(e);
}
});
}
/**
* Unregisters a model by its name and version.
*
* @param modelName the model name to be unregistered
* @param version the model version
* @return {@code true} if unregister success
*/
public boolean unregisterModel(String modelName, String version) {
Endpoint endpoint = endpoints.get(modelName);
if (endpoint == null) {
logger.warn("Model not found: " + modelName);
return false;
}
if (version == null) {
// unregister all versions
for (ModelInfo m : endpoint.getModels()) {
m.scaleWorkers(0, 0);
wlm.modelChanged(m);
startupModels.remove(modelName);
m.close();
}
logger.info("Model {} unregistered.", modelName);
} else {
ModelInfo model = endpoint.remove(version);
if (model == null) {
logger.warn("Model not found: " + modelName + ':' + version);
return false;
}
model.scaleWorkers(0, 0);
wlm.modelChanged(model);
startupModels.remove(modelName);
model.close();
}
if (endpoint.getModels().isEmpty()) {
endpoints.remove(modelName);
}
return true;
}
/**
* trigger that a ModelInfo has been updated. Updates model workers for this model and scales
* up/down all workers to match the parameters for the model.
*
* @param modelInfo the model that has been updated
* @return the model
*/
public ModelInfo triggerModelUpdated(ModelInfo modelInfo) {
String modelName = modelInfo.getModelName();
logger.debug("updateModel: {}", modelName);
wlm.modelChanged(modelInfo);
return modelInfo;
}
/**
* Returns the registry of all endpoints.
*
* @return the registry of all endpoints
*/
public Map<String, Endpoint> getEndpoints() {
return endpoints;
}
/**
* Returns a version of model.
*
* @param modelName the model name
* @param version the model version
* @param predict ture for selecting a model in load balance fashion
* @return the model
*/
public ModelInfo getModel(String modelName, String version, boolean predict) {
Endpoint endpoint = endpoints.get(modelName);
if (endpoint == null) {
return null;
}
if (version == null) {
if (endpoint.getModels().isEmpty()) {
return null;
}
if (predict) {
return endpoint.next();
}
return endpoint.getModels().get(0);
}
return endpoint.get(version);
}
/**
* Returns a set of models that was loaded at startup.
*
* @return a set of models that was loaded at startup
*/
public Set<String> getStartupModels() {
return startupModels;
}
/**
* Adds an inference job to the job queue. Assign the job to the next free worker.
*
* @param job an inference job to be executed
* @return {@code true} if submit success
* @throws ModelNotFoundException if the model is not registered
*/
public boolean addJob(Job job) throws ModelNotFoundException {
return wlm.addJob(job);
}
/**
* Returns a list of worker information for specified model.
*
* @param modelName the model name to be queried
* @param version the model version to be queried
* @return a list of worker information for specified model
* @throws ModelNotFoundException if specified model not found
*/
public DescribeModelResponse describeModel(String modelName, String version)
throws ModelNotFoundException {
ModelInfo model = getModel(modelName, version, false);
if (model == null) {
throw new ModelNotFoundException("Model not found: " + modelName);
}
DescribeModelResponse resp = new DescribeModelResponse();
resp.setModelName(modelName);
resp.setModelUrl(model.getModelUrl());
resp.setBatchSize(model.getBatchSize());
resp.setMaxBatchDelay(model.getMaxBatchDelay());
resp.setMaxWorkers(model.getMaxWorkers());
resp.setMinWorkers(model.getMinWorkers());
resp.setMaxIdleTime(model.getMaxIdleTime());
resp.setLoadedAtStartup(startupModels.contains(modelName));
int activeWorker = wlm.getNumRunningWorkers(model);
int targetWorker = model.getMinWorkers();
resp.setStatus(activeWorker >= targetWorker ? "Healthy" : "Unhealthy");
List<WorkerThread> workers = wlm.getWorkers(model);
for (WorkerThread worker : workers) {
int workerId = worker.getWorkerId();
long startTime = worker.getStartTime();
boolean isRunning = worker.isRunning();
int gpuId = worker.getGpuId();
resp.addWorker(workerId, startTime, isRunning, gpuId);
}
return resp;
}
/**
* Sends model server health status to client.
*
* @return completableFuture with eventually result in the future after async execution
*/
public CompletableFuture<String> workerStatus() {
return CompletableFuture.supplyAsync(
() -> {
String response = "Healthy";
int numWorking = 0;
int numScaled = 0;
for (Endpoint endpoint : endpoints.values()) {
for (ModelInfo m : endpoint.getModels()) {
numScaled += m.getMinWorkers();
numWorking += wlm.getNumRunningWorkers(m);
}
}
if ((numWorking > 0) && (numWorking < numScaled)) {
response = "Partial Healthy";
} else if ((numWorking == 0) && (numScaled > 0)) {
response = "Unhealthy";
}
return response;
});
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/wlm/PermanentBatchAggregator.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.LinkedBlockingDeque;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* a batch aggregator that never terminates by itself. the surrounding thread has to be interrupted
* by sending an interrupt signal.
*
* @author erik.bamberg@web.de
*/
public class PermanentBatchAggregator extends BatchAggregator {
private static final Logger logger = LoggerFactory.getLogger(TemporaryBatchAggregator.class);
/**
* Constructs a {@code PermanentBatchAggregator} instance.
*
* @param model the model to use.
* @param jobQueue the job queue for polling data from.
*/
public PermanentBatchAggregator(ModelInfo model, LinkedBlockingDeque<Job> jobQueue) {
super(model, jobQueue);
}
/** {@inheritDoc} */
@Override
protected List<Job> pollBatch() throws InterruptedException {
List<Job> list = new ArrayList<>(batchSize);
Job job = jobQueue.take();
list.add(job);
jobQueue.drainTo(list, batchSize - 1);
logger.trace("sending jobs, size: {}", list.size());
return list;
}
/** {@inheritDoc} */
@Override
public boolean isFinished() {
return false;
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/wlm/ScaleCapacityExceededException.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
/**
* Is thrown when capacity of workers is reached during autoscaling.
*
* @author erik.bamberg@web.de
*/
public class ScaleCapacityExceededException extends Exception {
/** serialVersionUDI for this class cause exceptions are serializable. */
private static final long serialVersionUID = 1633130362838844091L;
/** No arguments. */
public ScaleCapacityExceededException() {}
/**
* construct using a message.
*
* @param message the message.
*/
public ScaleCapacityExceededException(String message) {
super(message);
}
/**
* construct using a cause.
*
* @param cause the root cause.
*/
public ScaleCapacityExceededException(Throwable cause) {
super(cause);
}
/**
* construct using a message and a clause.
*
* @param message the message.
* @param cause the root cause.
*/
public ScaleCapacityExceededException(String message, Throwable cause) {
super(message, cause);
}
/**
* construct using a message cause and flags.
*
* @param message the message.
* @param cause the root cause.
* @param enableSuppression enable suppression or not.
* @param writableStackTrace flag if writableStackTrace.
*/
public ScaleCapacityExceededException(
String message,
Throwable cause,
boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/wlm/TemporaryBatchAggregator.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* a batch aggregator that terminates after a maximum idle time.
*
* @author erik.bamberg@web.de
*/
public class TemporaryBatchAggregator extends BatchAggregator {
private static final Logger logger = LoggerFactory.getLogger(TemporaryBatchAggregator.class);
private long idleSince;
private long maxIdleTime;
/**
* a batch aggregator that terminates after a maximum idle time.
*
* @param model the model to run for.
* @param jobQueue reference to external job queue for polling.
*/
public TemporaryBatchAggregator(ModelInfo model, LinkedBlockingDeque<Job> jobQueue) {
super(model, jobQueue);
this.idleSince = System.currentTimeMillis();
this.maxIdleTime = model.getMaxIdleTime();
}
/** {@inheritDoc} */
@Override
protected List<Job> pollBatch() throws InterruptedException {
List<Job> list = new ArrayList<>(batchSize);
Job job = jobQueue.poll(maxIdleTime, TimeUnit.SECONDS);
if (job != null) {
list.add(job);
jobQueue.drainTo(list, batchSize - 1);
logger.trace("sending jobs, size: {}", list.size());
idleSince = System.currentTimeMillis();
}
return list;
}
/** {@inheritDoc} */
@Override
public boolean isFinished() {
logger.trace(
"check temporary batch aggregator idle time idle since {}ms - max idle time:{}ms",
System.currentTimeMillis() - idleSince,
maxIdleTime * 1000);
return System.currentTimeMillis() - idleSince > maxIdleTime * 1000;
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/wlm/WorkLoadManager.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* WorkLoadManager is repsonsible to manage the work load of worker thread. the manage scales
* up/down the required amount of worker threads per model.
*
* @author erik.bamberg@web.de
*/
class WorkLoadManager {
private static final Logger logger = LoggerFactory.getLogger(WorkLoadManager.class);
private ExecutorService threadPool;
private ConcurrentHashMap<ModelInfo, WorkerPool> workerPools;
/** Constructs a {@code WorkLoadManager} instance. */
public WorkLoadManager() {
threadPool = Executors.newCachedThreadPool();
workerPools = new ConcurrentHashMap<>();
}
/**
* Returns the workers for the specific model.
*
* @param modelInfo the name of the model we are looking for.
* @return the list of workers responsible to handle predictions for this model.
*/
public List<WorkerThread> getWorkers(ModelInfo modelInfo) {
List<WorkerThread> list;
WorkerPool pool = workerPools.get(modelInfo);
if (pool == null) {
list = Collections.emptyList();
} else {
list = pool.getWorkers();
if (list == null) {
list = Collections.emptyList();
}
}
return list;
}
/**
* Adds an inference job to the job queue of the next free worker. scales up worker if
* necessary.
*
* @param job an inference job to be executed.
* @return {@code true} if submit success, false otherwise.
*/
public boolean addJob(Job job) {
boolean accepted = false;
ModelInfo modelInfo = job.getModel();
WorkerPool pool = getWorkerPoolForModel(modelInfo);
if (getNumRunningWorkers(modelInfo) > 0) {
try {
accepted = pool.getJobQueue().offer(job);
if (!accepted) {
synchronized (modelInfo.getModel()) {
scaleUpWorkers(modelInfo, pool);
accepted =
pool.getJobQueue()
.offer(
job,
modelInfo.getMaxBatchDelay(),
TimeUnit.MILLISECONDS);
}
}
} catch (InterruptedException e) {
logger.info(
"Worker Queue Capacity Exceeded. cannot add to worker queue in appropriate time. You can configure max batch delay time for this model.");
}
}
return accepted;
}
private void scaleUpWorkers(ModelInfo modelInfo, WorkerPool pool) {
int currentWorkers = getNumRunningWorkers(modelInfo);
if (currentWorkers < modelInfo.getMaxWorkers()) {
logger.debug("scaling up workers for model {} to {} ", modelInfo, currentWorkers + 1);
addThreads(pool.getWorkers(), modelInfo, 1, false);
} else {
logger.warn(
"scale up capacity of {} workers reached. Unable to scale up worker pool.",
modelInfo.getMaxWorkers());
}
}
/**
* Returns the number of running workers of a model. running workers are workers which are not
* stopped, in error or scheduled to scale down.
*
* @param modelInfo the model we are interested in.
* @return number of running workers.
*/
public int getNumRunningWorkers(ModelInfo modelInfo) {
int numWorking = 0;
WorkerPool pool = workerPools.get(modelInfo);
if (pool != null) {
pool.cleanup();
List<WorkerThread> threads = pool.getWorkers();
for (WorkerThread thread : threads) {
if ((thread.getState() != WorkerState.WORKER_STOPPED)
&& (thread.getState() != WorkerState.WORKER_ERROR)
&& (thread.getState() != WorkerState.WORKER_SCALED_DOWN)) {
++numWorking;
}
}
}
return numWorking;
}
/**
* Triggers a model change event. scales up and down workers to match minWorkers/maxWorkers.
*
* @param modelInfo the changed model.
*/
public void modelChanged(ModelInfo modelInfo) {
synchronized (modelInfo.getModel()) {
int minWorker = modelInfo.getMinWorkers();
WorkerPool pool = getWorkerPoolForModel(modelInfo);
if (pool != null) {
pool.cleanup();
List<WorkerThread> threads;
if (minWorker == 0) {
workerPools.remove(modelInfo);
}
threads = pool.getWorkers();
List<WorkerThread> fixedPoolThread =
threads.stream()
.filter(WorkerThread::isFixPoolThread)
.collect(Collectors.toList());
int numberOfCurrentFixedWorkers = fixedPoolThread.size();
if (numberOfCurrentFixedWorkers < minWorker) {
// scale up the fixed pool
addThreads(threads, modelInfo, minWorker - numberOfCurrentFixedWorkers, true);
} else {
// scale down the fixed pool
fixedPoolThread
.subList(minWorker, numberOfCurrentFixedWorkers)
.forEach(
t -> {
threads.remove(t);
t.shutdown(WorkerState.WORKER_SCALED_DOWN);
});
}
pool.log();
}
}
}
private WorkerPool getWorkerPoolForModel(ModelInfo modelInfo) {
return workerPools.computeIfAbsent(modelInfo, k -> new WorkerPool(modelInfo));
}
private void addThreads(
List<WorkerThread> threads, ModelInfo model, int count, boolean permanent) {
for (int i = 0; i < count; ++i) {
WorkerThread thread =
WorkerThread.builder()
.setModel(model)
.setJobQueue(getWorkerPoolForModel(model).getJobQueue())
.optFixPoolThread(permanent)
.build();
threads.add(thread);
threadPool.submit(thread);
}
}
/**
* Worker pools holds information per model.
*
* @author erik.bamberg@web.de
*/
private static final class WorkerPool {
private List<WorkerThread> workers;
private LinkedBlockingDeque<Job> jobQueue;
private String modelName;
/**
* Construct and initial data structure.
*
* @param model the model this WorkerPool belongs to.
*/
public WorkerPool(ModelInfo model) {
workers = Collections.synchronizedList(new ArrayList<>());
jobQueue = new LinkedBlockingDeque<>(model.getQueueSize());
modelName = model.getModelName();
}
/**
* Returns a list of worker thread.
*
* @return the workers
*/
public List<WorkerThread> getWorkers() {
return workers;
}
/**
* Returns the {@code JobQueue} for this model.
*
* @return the jobQueue
*/
public LinkedBlockingDeque<Job> getJobQueue() {
return jobQueue;
}
/**
* Logs the current state of this {@code WorkerPool} when level "Debug" is enabled.
*
* <p>Logs all thread-ids in the pool.
*/
public void log() {
if (logger.isDebugEnabled()) {
StringBuffer buf = new StringBuffer();
workers.forEach(
w -> {
buf.append(w.getWorkerId());
if (w.isFixPoolThread()) {
buf.append("-fixedPool\n");
} else {
buf.append("-tmpPool\n");
}
});
logger.debug("worker pool for model {}:\n {}", modelName, buf);
}
}
/** removes all stopped workers and workers in state error from the pool. */
public void cleanup() {
workers.removeIf(
t ->
t.getState() == WorkerState.WORKER_STOPPED
|| t.getState() == WorkerState.WORKER_ERROR);
}
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/wlm/WorkerIdGenerator.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import java.util.concurrent.atomic.AtomicInteger;
/**
* class to generate an unique worker id.
*
* @author erik.bamberg@web.de
*/
public class WorkerIdGenerator {
private static final AtomicInteger WORKER_COUNTER = new AtomicInteger(1);
/**
* generate a new worker id.
*
* @return returns a new id.
*/
public int generate() {
return WORKER_COUNTER.getAndIncrement();
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/wlm/WorkerState.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
/** An enum represents state of a worker. */
public enum WorkerState {
WORKER_STARTED,
WORKER_MODEL_LOADED,
WORKER_STOPPED,
WORKER_ERROR,
WORKER_SCALED_DOWN
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/wlm/WorkerThread.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.inference.Predictor;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.translate.TranslateException;
import java.util.List;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
final class WorkerThread implements Runnable {
private static final Logger logger = LoggerFactory.getLogger(WorkerThread.class);
private String workerName;
private Predictor<Input, Output> predictor;
private AtomicBoolean running = new AtomicBoolean(true);
private BatchAggregator aggregator;
private int gpuId;
private AtomicReference<Thread> currentThread = new AtomicReference<>();
private WorkerState state;
private int workerId;
private long startTime;
private boolean fixPoolThread;
/**
* Builds a workerThread with this builder.
*
* @param builder build a new worker thread using this builder.
*/
private WorkerThread(Builder builder) {
this.workerName = buildWorkerName(builder.model);
this.aggregator = builder.aggregator;
this.workerId = new WorkerIdGenerator().generate();
this.startTime = System.currentTimeMillis();
this.fixPoolThread = builder.fixPoolThread;
ZooModel<Input, Output> model = builder.model.getModel();
predictor = model.newPredictor();
this.gpuId = model.getNDManager().getDevice().getDeviceId();
}
/** {@inheritDoc} */
@Override
public void run() {
Thread thread = Thread.currentThread();
thread.setName(workerName);
currentThread.set(thread);
this.state = WorkerState.WORKER_STARTED;
List<Input> req = null;
try {
while (isRunning() && !aggregator.isFinished()) {
req = aggregator.getRequest();
if (req != null && !req.isEmpty()) {
try {
List<Output> reply = predictor.batchPredict(req);
aggregator.sendResponse(reply);
} catch (TranslateException e) {
logger.warn("Failed to predict", e);
aggregator.sendError();
}
}
req = null;
}
} catch (InterruptedException e) {
logger.debug("Shutting down the thread .. Scaling down.");
} catch (Throwable t) {
logger.error("Server error", t);
} finally {
logger.debug("Shutting down worker thread .. {}", currentThread.get().getName());
currentThread.set(null);
shutdown(WorkerState.WORKER_STOPPED);
if (req != null) {
aggregator.sendError();
}
}
}
public int getWorkerId() {
return workerId;
}
public boolean isRunning() {
return running.get();
}
public int getGpuId() {
return gpuId;
}
public long getStartTime() {
return startTime;
}
public WorkerState getState() {
return state;
}
public void shutdown(WorkerState state) {
running.set(false);
setState(state);
Thread thread = currentThread.getAndSet(null);
if (thread != null) {
thread.interrupt();
aggregator.sendError();
}
predictor.close();
}
private String buildWorkerName(ModelInfo model) {
String modelName = model.getModelName();
if (modelName.length() > 25) {
modelName = modelName.substring(0, 25);
}
return "W-" + modelName + '-' + workerId;
}
void setState(WorkerState newState) {
logger.debug("{} State change {} -> {}", workerName, state, newState);
if (state != WorkerState.WORKER_SCALED_DOWN) {
// Don't update the state if it was terminated on purpose.. Scaling in..
this.state = newState;
}
}
/**
* check if this worker is instantiate is one of the fix threads of a pool. fix threads are not
* automatically scales down, so they are candidate for down scaling when minWorker/maxWorker
* size of a model changes.
*
* @return the fixPoolThread
*/
public boolean isFixPoolThread() {
return fixPoolThread;
}
/**
* Creates a builder to build a {@code WorkerThread}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/** A Builder to construct a {@code WorkerThread}. */
public static class Builder {
private ModelInfo model;
private BatchAggregator aggregator;
private LinkedBlockingDeque<Job> jobQueue;
private boolean fixPoolThread;
Builder() {
this.fixPoolThread = true;
}
/**
* Returns self reference to this builder.
*
* @return self reference to this builder
*/
protected Builder self() {
return this;
}
protected void preBuildProcessing() {
if (aggregator == null) {
if (fixPoolThread) {
aggregator = new PermanentBatchAggregator(model, jobQueue);
} else {
aggregator = new TemporaryBatchAggregator(model, jobQueue);
}
}
}
protected void validate() {
if (model == null) {
throw new IllegalArgumentException("model must not be null");
}
if (jobQueue == null && aggregator == null) {
throw new IllegalArgumentException(
"one of jobQueue or BatchAggregator have to be set.");
}
}
/**
* Builds the {@link WorkerThread} with the provided data.
*
* @return an {@link WorkerThread}
*/
public WorkerThread build() {
validate();
preBuildProcessing();
return new WorkerThread(this);
}
/**
* Sets the {@code ModelInfo} the thread will be responsible for.
*
* @param model the model to set
* @return self-reference to this builder.
*/
public Builder setModel(ModelInfo model) {
this.model = model;
return self();
}
/**
* Sets a {@code BatchAggregator} which overrides the instantiated default {@code
* BatchAggregator}.
*
* @param aggregator the {@code BatchAggregator} to set
* @return self-reference to this builder.
*/
public Builder optAggregator(BatchAggregator aggregator) {
this.aggregator = aggregator;
return self();
}
/**
* Sets the jobQueue used to poll for new jobs. The jobQueue is passed to the created
* standard BatchAggregators if the Batch-Aggregator is not override using {@link
* #optAggregator(BatchAggregator) optAggregator(BatchAggregator)}
*
* @param jobQueue the jobQueue to set
* @return self-reference to this builder.
*/
public Builder setJobQueue(LinkedBlockingDeque<Job> jobQueue) {
this.jobQueue = jobQueue;
return self();
}
/**
* Sets if the workerThread should be part of the fixed pool. Fixed Pool workers don't
* terminate themself but are managed by WorkLoadManager min/max-worker scale functionality.
*
* @param fixPoolThread the fixPoolThread to set
* @return self-reference to this builder.
*/
public Builder optFixPoolThread(boolean fixPoolThread) {
this.fixPoolThread = fixPoolThread;
return self();
}
}
}
|
0
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving
|
java-sources/ai/djl/serving/0.12.0/ai/djl/serving/wlm/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes that manage model lifecycle. */
package ai.djl.serving.wlm;
|
0
|
java-sources/ai/djl/serving/prometheus/0.28.0/ai/djl
|
java-sources/ai/djl/serving/prometheus/0.28.0/ai/djl/prometheus/MetricExporter.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.prometheus;
import ai.djl.util.Utils;
import io.prometheus.metrics.expositionformats.PrometheusTextFormatWriter;
import io.prometheus.metrics.model.registry.PrometheusRegistry;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Set;
/** A utility class to export prometheus metrics. */
public final class MetricExporter {
public static final String CONTENT_TYPE = PrometheusTextFormatWriter.CONTENT_TYPE;
private static final PrometheusTextFormatWriter WRITER = new PrometheusTextFormatWriter(false);
private MetricExporter() {}
/**
* Writes prometheus metrics to {@code OutputStream}.
*
* @param os the {@code OutputStream} to write
* @param set the filter names
* @throws IOException throws if failed to write
*/
public static void export(OutputStream os, Set<String> set) throws IOException {
if (!Boolean.parseBoolean(Utils.getEnvOrSystemProperty("SERVING_PROMETHEUS"))) {
throw new IllegalArgumentException(
"Prometheus is not enabled, set SERVING_PROMETHEUS environment var to true to"
+ " enable prometheus metrics");
}
WRITER.write(
os,
PrometheusRegistry.defaultRegistry.scrape(s -> set.isEmpty() || set.contains(s)));
}
}
|
0
|
java-sources/ai/djl/serving/prometheus/0.28.0/ai/djl
|
java-sources/ai/djl/serving/prometheus/0.28.0/ai/djl/prometheus/PrometheusAppender.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.prometheus;
import ai.djl.metric.Dimension;
import ai.djl.metric.Metric;
import ai.djl.metric.Unit;
import ai.djl.util.Utils;
import io.prometheus.metrics.core.metrics.Counter;
import io.prometheus.metrics.core.metrics.Gauge;
import io.prometheus.metrics.model.snapshots.Labels;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.appender.AbstractAppender;
import org.apache.logging.log4j.core.config.Property;
import org.apache.logging.log4j.core.config.plugins.Plugin;
import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
import org.apache.logging.log4j.core.config.plugins.PluginFactory;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/** A log4j2 appender class to collect prometheus metrics. */
@Plugin(name = "Prometheus", category = "Core", elementType = "appender")
public final class PrometheusAppender extends AbstractAppender {
private static final Map<String, Counter> COUNTERS = new ConcurrentHashMap<>();
private static final Map<String, Gauge> GAUGES = new ConcurrentHashMap<>();
private boolean usePrometheus;
private PrometheusAppender(String name) {
super(name, null, null, true, Property.EMPTY_ARRAY);
usePrometheus = Boolean.parseBoolean(Utils.getEnvOrSystemProperty("SERVING_PROMETHEUS"));
}
/** {@inheritDoc} */
@Override
public void append(LogEvent event) {
if (usePrometheus) {
Metric metric = (Metric) event.getMessage().getParameters()[0];
String name = metric.getMetricName();
Unit unit = metric.getUnit();
Dimension[] dimension = metric.getDimensions();
String labelName = dimension[0].getName();
String labelValue = dimension[0].getValue();
Labels labels = Labels.of(labelName, labelValue);
if (unit == Unit.COUNT) {
Counter counter =
COUNTERS.computeIfAbsent(name, k -> newCounter(name, labelName, unit));
counter.labelValues(labelValue).incWithExemplar(metric.getValue(), labels);
} else {
Gauge gauge = GAUGES.computeIfAbsent(name, k -> newGauge(name, labelName, unit));
gauge.labelValues(labelValue).setWithExemplar(metric.getValue(), labels);
}
}
}
private Counter newCounter(String name, String labelName, Unit unit) {
return Counter.builder()
.name(name)
.labelNames(labelName)
.help(": prometheus counter metric, unit: " + unit.getValue())
.register();
}
private Gauge newGauge(String name, String labelName, Unit unit) {
return Gauge.builder()
.name(name)
.labelNames(labelName)
.help(": prometheus gauge metric, unit: " + unit.getValue())
.register();
}
/**
* Constructs a new {@code PrometheusAppender} instance.
*
* @param name the appender name
* @return a new {@code PrometheusAppender} instance
*/
@PluginFactory
public static PrometheusAppender createAppender(@PluginAttribute("name") String name) {
return new PrometheusAppender(name);
}
}
|
0
|
java-sources/ai/djl/serving/prometheus/0.28.0/ai/djl
|
java-sources/ai/djl/serving/prometheus/0.28.0/ai/djl/prometheus/package-info.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes for prometheus metric support. */
package ai.djl.prometheus;
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/Arguments.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving;
import ai.djl.serving.util.ConfigManager;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
/** A class represents parsed command line arguments. */
public final class Arguments {
private String configFile;
private String modelStore;
private String[] models;
private boolean help;
/**
* Constructs a new {@code Arguments} instance.
*
* @param cmd a parsed {@code CommandLine}
*/
public Arguments(CommandLine cmd) {
configFile = cmd.getOptionValue("config-file");
modelStore = cmd.getOptionValue("model-store");
models = cmd.getOptionValues("models");
help = cmd.hasOption("help");
}
/**
* Builds the command line options.
*
* @return the command line options
*/
public static Options getOptions() {
Options options = new Options();
options.addOption(
Option.builder("h").longOpt("help").hasArg(false).desc("Print this help.").build());
options.addOption(
Option.builder("f")
.longOpt("config-file")
.hasArg()
.argName("CONFIG-FILE")
.desc("Path to the configuration properties file.")
.build());
options.addOption(
Option.builder("m")
.longOpt("models")
.hasArgs()
.argName("MODELS")
.desc("Models to be loaded at startup.")
.build());
options.addOption(
Option.builder("s")
.longOpt("model-store")
.hasArg()
.argName("MODELS-STORE")
.desc("Model store location where models can be loaded.")
.build());
options.addOption(
Option.builder("i")
.longOpt("install")
.hasArgs()
.argName("DEPENDENCIES ...")
.desc(
"Maven dependencies (e.g."
+ " ai.djl.pytorch:pytorch-native-cpu:1.11.0:linux-x86_64)")
.build());
return options;
}
/**
* Returns the configuration file path.
*
* @return the configuration file path
*/
public Path getConfigFile() {
if (configFile == null) {
configFile = System.getProperty("ai.djl.conf", null);
}
if (configFile != null) {
Path file = Paths.get(configFile);
if (!Files.isRegularFile(file)) {
throw new IllegalArgumentException("Configuration file not found: " + configFile);
}
return file;
}
Path modelServerHome = Paths.get(ConfigManager.getModelServerHome());
Path file = modelServerHome.resolve("conf/config.properties");
if (Files.isRegularFile(file)) {
return file;
}
file = modelServerHome.resolve("config.properties");
if (Files.isRegularFile(file)) {
return file;
}
return null;
}
/**
* Returns the model store location.
*
* @return the model store location
*/
public String getModelStore() {
return modelStore;
}
/**
* Returns the model urls that specified in command line.
*
* @return the model urls that specified in command line
*/
public String[] getModels() {
return models;
}
/**
* Returns if the command line has help option.
*
* @return {@code true} if the command line has help option
*/
public boolean hasHelp() {
return help;
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/ModelServer.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving;
import ai.djl.Device;
import ai.djl.engine.Engine;
import ai.djl.engine.EngineException;
import ai.djl.metric.Dimension;
import ai.djl.metric.Metric;
import ai.djl.metric.Unit;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.repository.FilenameUtils;
import ai.djl.serving.http.ServerStartupException;
import ai.djl.serving.models.ModelManager;
import ai.djl.serving.plugins.DependencyManager;
import ai.djl.serving.plugins.FolderScanPluginManager;
import ai.djl.serving.util.ConfigManager;
import ai.djl.serving.util.Connector;
import ai.djl.serving.util.ServerGroups;
import ai.djl.serving.wlm.ModelInfo;
import ai.djl.serving.workflow.BadWorkflowException;
import ai.djl.serving.workflow.Workflow;
import ai.djl.serving.workflow.WorkflowDefinition;
import ai.djl.util.RandomUtils;
import ai.djl.util.Utils;
import ai.djl.util.cuda.CudaUtils;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.ServerChannel;
import io.netty.handler.ssl.SslContext;
import io.netty.util.internal.logging.InternalLoggerFactory;
import io.netty.util.internal.logging.Slf4JLoggerFactory;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedWriter;
import java.io.IOException;
import java.lang.management.MemoryUsage;
import java.net.MalformedURLException;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.GeneralSecurityException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/** The main entry point for model server. */
public class ModelServer {
private static final Logger logger = LoggerFactory.getLogger(ModelServer.class);
private static final Logger SERVER_METRIC = LoggerFactory.getLogger("server_metric");
private static final Pattern MODEL_STORE_PATTERN = Pattern.compile("(\\[?([^?]+?)]?=)?(.+)");
private ServerGroups serverGroups;
private List<ChannelFuture> futures = new ArrayList<>(2);
private AtomicBoolean stopped = new AtomicBoolean(false);
private ConfigManager configManager;
private FolderScanPluginManager pluginManager;
/**
* Creates a new {@code ModelServer} instance.
*
* @param configManager the model server configuration
*/
public ModelServer(ConfigManager configManager) {
this.configManager = configManager;
this.pluginManager = new FolderScanPluginManager(configManager);
serverGroups = new ServerGroups(configManager);
}
/**
* The entry point for the model server.
*
* @param args the command line arguments
*/
public static void main(String[] args) {
Options options = Arguments.getOptions();
try {
DefaultParser parser = new DefaultParser();
CommandLine cmd = parser.parse(options, args, null, false);
Arguments arguments = new Arguments(cmd);
if (arguments.hasHelp()) {
printHelp("djl-serving [OPTIONS]", options);
return;
} else if (cmd.hasOption("install")) {
String[] dependencies = cmd.getOptionValues("install");
DependencyManager dm = DependencyManager.getInstance();
for (String dep : dependencies) {
try {
dm.installDependency(dep);
} catch (Throwable t) {
logger.error("Failed to install dependency: {}", dep, t);
System.exit(1); // NOPMD
}
}
return;
}
DependencyManager.getInstance().initialize();
ConfigManager.init(arguments);
ConfigManager configManager = ConfigManager.getInstance();
InternalLoggerFactory.setDefaultFactory(Slf4JLoggerFactory.INSTANCE);
new ModelServer(configManager).startAndWait();
} catch (IllegalArgumentException e) {
logger.error("Invalid configuration", e);
SERVER_METRIC.info("{}", new Metric("ConfigurationError", 1));
System.exit(1); // NOPMD
} catch (ParseException e) {
printHelp(e.getMessage(), options);
SERVER_METRIC.info("{}", new Metric("CmdError", 1));
System.exit(1); // NOPMD
} catch (Throwable t) {
logger.error("Unexpected error", t);
SERVER_METRIC.info("{}", new Metric("StartupFailed", 1));
System.exit(1); // NOPMD
}
}
/**
* Starts the model server and block until server stops.
*
* @throws InterruptedException if interrupted
* @throws IOException if failed to start socket listener
* @throws GeneralSecurityException if failed to read SSL certificate
* @throws ServerStartupException if failed to startup server
*/
public void startAndWait()
throws InterruptedException,
IOException,
GeneralSecurityException,
ServerStartupException {
try {
logger.info("Starting model server ...");
List<ChannelFuture> channelFutures = start();
channelFutures.get(0).sync();
} finally {
serverGroups.shutdown(true);
logger.info("Model server stopped.");
}
}
/**
* Main Method that prepares the future for the channel and sets up the ServerBootstrap.
*
* @return a list of ChannelFuture object
* @throws InterruptedException if interrupted
* @throws IOException if failed to start socket listener
* @throws GeneralSecurityException if failed to read SSL certificate
* @throws ServerStartupException if failed to startup server
*/
public List<ChannelFuture> start()
throws InterruptedException,
IOException,
GeneralSecurityException,
ServerStartupException {
long begin = System.nanoTime();
stopped.set(false);
String version = Engine.getDjlVersion();
logger.info("Starting djl-serving: {} ...", version);
logger.info(configManager.dumpConfigurations());
Dimension dim = new Dimension("Version", version);
SERVER_METRIC.info("{}", new Metric("DJLServingStart", 1, Unit.COUNT, dim));
pluginManager.loadPlugins(true);
try {
initModelStore();
} catch (BadWorkflowException | CompletionException e) {
throw new ServerStartupException(
"Failed to initialize startup models and workflows", e);
}
Connector inferenceConnector =
configManager.getConnector(Connector.ConnectorType.INFERENCE);
Connector managementConnector =
configManager.getConnector(Connector.ConnectorType.MANAGEMENT);
inferenceConnector.clean();
managementConnector.clean();
EventLoopGroup serverGroup = serverGroups.getServerGroup();
EventLoopGroup workerGroup = serverGroups.getChildGroup();
futures.clear();
if (inferenceConnector.equals(managementConnector)) {
Connector both = configManager.getConnector(Connector.ConnectorType.BOTH);
futures.add(initializeServer(both, serverGroup, workerGroup));
} else {
futures.add(initializeServer(inferenceConnector, serverGroup, workerGroup));
futures.add(initializeServer(managementConnector, serverGroup, workerGroup));
}
long duration = (System.nanoTime() - begin) / 1000;
Metric metric = new Metric("StartupLatency", duration, Unit.MICROSECONDS);
SERVER_METRIC.info("{}", metric);
for (int i = 0; i < CudaUtils.getGpuCount(); ++i) {
try {
Device device = Device.gpu(i);
MemoryUsage mem = CudaUtils.getGpuMemory(device);
SERVER_METRIC.info(
"{}", new Metric("GPUMemory_" + i, mem.getCommitted(), Unit.BYTES));
} catch (IllegalArgumentException | EngineException e) {
logger.warn("Failed get GPU memory", e);
break;
}
}
if (stopped.get()) {
// check if model load failed in wait loading model case
stop();
}
return futures;
}
/**
* Return if the server is running.
*
* @return {@code true} if the server is running
*/
public boolean isRunning() {
return !stopped.get();
}
/** Stops the model server. */
public void stop() {
logger.info("Stopping model server.");
stopped.set(true);
for (ChannelFuture future : futures) {
future.channel().close();
}
serverGroups.shutdown(true);
serverGroups.reset();
}
private ChannelFuture initializeServer(
Connector connector, EventLoopGroup serverGroup, EventLoopGroup workerGroup)
throws InterruptedException, IOException, GeneralSecurityException {
Class<? extends ServerChannel> channelClass = connector.getServerChannel();
logger.info(
"Initialize {} server with: {}.",
connector.getType(),
channelClass.getSimpleName());
ServerBootstrap b = new ServerBootstrap();
b.option(ChannelOption.SO_BACKLOG, 1024)
.channel(channelClass)
.childOption(ChannelOption.SO_LINGER, 0)
.childOption(ChannelOption.SO_REUSEADDR, true)
.childOption(ChannelOption.SO_KEEPALIVE, true);
b.group(serverGroup, workerGroup);
SslContext sslCtx = null;
if (connector.isSsl()) {
sslCtx = configManager.getSslContext();
}
b.childHandler(new ServerInitializer(sslCtx, connector.getType(), pluginManager));
ChannelFuture future;
try {
future = b.bind(connector.getSocketAddress()).sync();
} catch (Exception e) {
// https://github.com/netty/netty/issues/2597
if (e instanceof IOException) {
throw new IOException("Failed to bind to address: " + connector, e);
}
throw e;
}
future.addListener(
(ChannelFutureListener)
f -> {
if (!f.isSuccess()) {
try {
f.get();
} catch (InterruptedException | ExecutionException e) {
logger.error("", e);
}
System.exit(2); // NOPMD
}
serverGroups.registerChannel(f.channel());
});
future.sync();
ChannelFuture f = future.channel().closeFuture();
f.addListener(
(ChannelFutureListener)
listener -> logger.info("{} listener stopped.", connector.getType()));
logger.info("{} API bind to: {}", connector.getType(), connector);
return f;
}
private void initModelStore() throws IOException, BadWorkflowException {
Set<String> startupModels = ModelManager.getInstance().getStartupWorkflows();
String loadModels = configManager.getLoadModels();
Path modelStore = configManager.getModelStore();
if (loadModels == null || loadModels.isEmpty()) {
loadModels = "ALL";
}
ModelManager modelManager = ModelManager.getInstance();
Set<String> urls = new HashSet<>();
if ("NONE".equalsIgnoreCase(loadModels)) {
// to disable load all models from model store
return;
} else if ("ALL".equalsIgnoreCase(loadModels)) {
if (modelStore == null) {
logger.warn("Model store is not configured.");
return;
}
if (Files.isDirectory(modelStore)) {
// contains only directory or archive files
boolean isMultiModelsDirectory =
Files.list(modelStore)
.filter(p -> !p.getFileName().toString().startsWith("."))
.allMatch(
p ->
Files.isDirectory(p)
|| FilenameUtils.isArchiveFile(
p.toString()));
if (isMultiModelsDirectory) {
// Check folders to see if they can be models as well
try (Stream<Path> stream = Files.list(modelStore)) {
urls.addAll(
stream.map(this::mapModelUrl)
.filter(Objects::nonNull)
.collect(Collectors.toList()));
}
} else {
// Check if root model store folder contains a model
String url = mapModelUrl(modelStore);
if (url != null) {
urls.add(url);
}
}
} else {
logger.warn("Model store path is not found: {}", modelStore);
}
} else {
String[] modelsUrls = loadModels.split("[, ]+");
urls.addAll(Arrays.asList(modelsUrls));
}
String huggingFaceModelId = Utils.getEnvOrSystemProperty("HF_MODEL_ID");
if (huggingFaceModelId != null) {
urls.add(createHuggingFaceModel(huggingFaceModelId));
}
for (String url : urls) {
logger.info("Initializing model: {}", url);
Matcher matcher = MODEL_STORE_PATTERN.matcher(url);
if (!matcher.matches()) {
throw new AssertionError("Invalid model store url: " + url);
}
String endpoint = matcher.group(2);
String modelUrl = matcher.group(3);
String version = null;
String engineName = null;
String deviceMapping = null;
String modelName = null;
if (endpoint != null) {
String[] tokens = endpoint.split(":", -1);
modelName = tokens[0];
if (tokens.length > 1) {
version = tokens[1].isEmpty() ? null : tokens[1];
}
if (tokens.length > 2) {
engineName = tokens[2].isEmpty() ? null : tokens[2];
}
if (tokens.length > 3) {
deviceMapping = tokens[3];
}
}
Workflow workflow;
URI uri = WorkflowDefinition.toWorkflowUri(modelUrl);
if (uri != null) {
workflow = WorkflowDefinition.parse(modelName, uri).toWorkflow();
} else {
if (modelName == null) {
modelName = ModelInfo.inferModelNameFromUrl(modelUrl);
}
ModelInfo<Input, Output> modelInfo =
new ModelInfo<>(
modelName,
modelUrl,
version,
engineName,
deviceMapping,
Input.class,
Output.class,
-1,
-1,
-1,
-1,
-1,
-1);
workflow = new Workflow(modelInfo);
}
CompletableFuture<Void> f = modelManager.registerWorkflow(workflow);
f.exceptionally(
t -> {
logger.error("Failed register workflow", t);
Dimension dim = new Dimension("Model", workflow.getName());
SERVER_METRIC.info(
"{}", new Metric("ModelLoadingError", 1, Unit.COUNT, dim));
// delay 3 seconds, allows REST API to send PING
// response (health check)
try {
Thread.sleep(3000);
} catch (InterruptedException ignore) {
// ignore
}
stop();
return null;
});
if (configManager.waitModelLoading()) {
f.join();
}
startupModels.add(modelName);
}
}
String mapModelUrl(Path path) {
try {
if (!Files.exists(path)
|| Files.isHidden(path)
|| (!Files.isDirectory(path)
&& !FilenameUtils.isArchiveFile(path.toString()))) {
return null;
}
if (Files.list(path).findFirst().isEmpty()) {
return null;
}
path = Utils.getNestedModelDir(path);
String url = path.toUri().toURL().toString();
String modelName = ModelInfo.inferModelNameFromUrl(url);
logger.info("Found model {}={}", modelName, url);
return modelName + '=' + url;
} catch (MalformedURLException e) {
throw new AssertionError("Invalid path: " + path, e);
} catch (IOException e) {
logger.warn("Failed to access file: " + path, e);
return null;
}
}
private static void printHelp(String msg, Options options) {
HelpFormatter formatter = new HelpFormatter();
formatter.setLeftPadding(1);
formatter.setWidth(120);
formatter.printHelp(msg, options);
}
private String createHuggingFaceModel(String modelId) throws IOException {
if (modelId.startsWith("djl://") || modelId.startsWith("s3://")) {
return modelId;
}
Path path = Paths.get(modelId);
if (Files.exists(path)) {
// modelId point to a local file
return modelId;
}
// TODO: Download the full model from HF
String hash = Utils.hash(modelId);
String downloadDir = Utils.getenv("SERVING_DOWNLOAD_DIR", null);
Path parent = downloadDir == null ? Utils.getCacheDir() : Paths.get(downloadDir);
Path huggingFaceModelDir = parent.resolve(hash);
String modelName = modelId.replaceAll("(\\W|^_)", "_");
if (Files.exists(huggingFaceModelDir)) {
logger.warn("HuggingFace Model {} already exists, use random model name", modelId);
return modelName + '_' + RandomUtils.nextInt() + '=' + huggingFaceModelDir;
}
String huggingFaceModelRevision = Utils.getEnvOrSystemProperty("HF_REVISION");
Properties huggingFaceProperties = new Properties();
huggingFaceProperties.put("option.model_id", modelId);
if (huggingFaceModelRevision != null) {
huggingFaceProperties.put("option.revision", huggingFaceModelRevision);
}
String task = Utils.getEnvOrSystemProperty("HF_TASK");
if (task != null) {
huggingFaceProperties.put("option.task", task);
}
Files.createDirectories(huggingFaceModelDir);
Path propertiesFile = huggingFaceModelDir.resolve("serving.properties");
try (BufferedWriter writer = Files.newBufferedWriter(propertiesFile)) {
huggingFaceProperties.store(writer, null);
}
logger.debug("Created serving.properties for model at path {}", propertiesFile);
return modelName + '=' + huggingFaceModelDir;
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/ServerInitializer.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving;
import ai.djl.serving.http.AdapterManagementRequestHandler;
import ai.djl.serving.http.ConfigurableHttpRequestHandler;
import ai.djl.serving.http.InferenceRequestHandler;
import ai.djl.serving.http.InvalidRequestHandler;
import ai.djl.serving.http.ManagementRequestHandler;
import ai.djl.serving.plugins.FolderScanPluginManager;
import ai.djl.serving.util.ConfigManager;
import ai.djl.serving.util.Connector;
import io.netty.channel.Channel;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelPipeline;
import io.netty.handler.codec.http.HttpObjectAggregator;
import io.netty.handler.codec.http.HttpServerCodec;
import io.netty.handler.ssl.SslContext;
/**
* A special {@link io.netty.channel.ChannelInboundHandler} which offers an easy way to initialize a
* {@link io.netty.channel.Channel} once it was registered to its {@link
* io.netty.channel.EventLoop}.
*/
public class ServerInitializer extends ChannelInitializer<Channel> {
private Connector.ConnectorType connectorType;
private SslContext sslCtx;
private FolderScanPluginManager pluginManager;
/**
* Creates a new {@code HttpRequestHandler} instance.
*
* @param sslCtx null if SSL is not enabled
* @param connectorType type of {@link Connector}
* @param pluginManager a pluginManager instance.
*/
public ServerInitializer(
SslContext sslCtx,
Connector.ConnectorType connectorType,
FolderScanPluginManager pluginManager) {
this.sslCtx = sslCtx;
this.connectorType = connectorType;
this.pluginManager = pluginManager;
}
/** {@inheritDoc} */
@Override
public void initChannel(Channel ch) {
ChannelPipeline pipeline = ch.pipeline();
int maxRequestSize = ConfigManager.getInstance().getMaxRequestSize();
if (sslCtx != null) {
pipeline.addLast("ssl", sslCtx.newHandler(ch.alloc()));
}
pipeline.addLast("http", new HttpServerCodec());
pipeline.addLast("aggregator", new HttpObjectAggregator(maxRequestSize, true));
switch (connectorType) {
case MANAGEMENT:
pipeline.addLast(new ConfigurableHttpRequestHandler(pluginManager));
pipeline.addLast("management", new ManagementRequestHandler());
pipeline.addLast("management-adapter", new AdapterManagementRequestHandler());
break;
case INFERENCE:
pipeline.addLast("inference", new InferenceRequestHandler());
break;
case BOTH:
default:
pipeline.addLast(new ConfigurableHttpRequestHandler(pluginManager));
pipeline.addLast("inference", new InferenceRequestHandler());
pipeline.addLast("management", new ManagementRequestHandler());
pipeline.addLast("management-adapter", new AdapterManagementRequestHandler());
break;
}
pipeline.addLast("badRequest", new InvalidRequestHandler());
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/cache/BaseCacheEngine.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.cache;
import ai.djl.inference.streaming.ChunkedBytesSupplier;
import ai.djl.inference.streaming.PublisherBytesSupplier;
import ai.djl.modality.Output;
import ai.djl.ndarray.BytesSupplier;
import ai.djl.util.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/** A helper for creating a {@link CacheEngine}. */
public abstract class BaseCacheEngine implements CacheEngine {
private static final Logger logger = LoggerFactory.getLogger(BaseCacheEngine.class);
protected int writeBatch;
/** Creates a {@link BaseCacheEngine}. */
public BaseCacheEngine() {
writeBatch = Integer.parseInt(Utils.getenv("SERVING_CACHE_BATCH", "1"));
}
/** {@inheritDoc} */
@Override
public Output get(String key, int limit) {
int start = 0;
if (key.length() > 36) {
start = Integer.parseInt(key.substring(36));
key = key.substring(0, 36);
}
return get(key, start, limit);
}
protected abstract Output get(String key, int start, int limit);
/** {@inheritDoc} */
@Override
public CompletableFuture<Void> put(String key, Output output) {
return CompletableFuture.<Void>supplyAsync(
() -> {
try {
BytesSupplier supplier = output.getData();
if (supplier instanceof ChunkedBytesSupplier) {
Output o = new Output();
o.setCode(output.getCode());
o.setMessage(output.getMessage());
o.setProperties(output.getProperties());
ChunkedBytesSupplier cbs = (ChunkedBytesSupplier) supplier;
int index = 0;
putStream(key, o, cbs.pollChunk(), index++, !cbs.hasNext());
List<byte[]> list = new ArrayList<>(writeBatch);
while (cbs.hasNext()) {
try {
list.add(cbs.nextChunk(60, TimeUnit.SECONDS));
} catch (InterruptedException e) {
throw new IllegalStateException(e);
}
if (list.size() >= writeBatch) {
byte[] batch = joinBytes(list);
putStream(key, null, batch, index++, !cbs.hasNext());
list.clear();
}
}
if (!list.isEmpty()) {
byte[] batch = joinBytes(list);
putStream(key, null, batch, index, true);
}
} else if (supplier instanceof PublisherBytesSupplier) {
Output o = new Output();
o.setCode(output.getCode());
o.setMessage(output.getMessage());
o.setProperties(output.getProperties());
PublisherBytesSupplier pub = (PublisherBytesSupplier) supplier;
AtomicInteger index = new AtomicInteger(-1);
List<byte[]> list = new ArrayList<>(writeBatch);
putStream(key, o, null, index.incrementAndGet(), false);
pub.subscribe(
buf -> {
try {
if (buf == null) {
byte[] batch = joinBytes(list);
putStream(
key,
null,
batch,
index.incrementAndGet(),
true);
} else if (buf.length > 0) {
list.add(buf);
if (list.size() >= writeBatch) {
byte[] batch = joinBytes(list);
putStream(
key,
null,
batch,
index.incrementAndGet(),
false);
list.clear();
}
}
} catch (IOException e) {
throw new CompletionException(e);
}
});
} else {
boolean last = output.getCode() != 202;
putSingle(key, output, last);
}
} catch (IOException e) {
throw new CompletionException(e);
}
return null;
})
.exceptionally(
t -> {
logger.warn("Failed to write to Cache", t);
return null;
});
}
/**
* Returns the number of elements to batch before putting with {@link #putStream(String, Output,
* byte[], int, boolean)}.
*
* @return number of elements to batch
*/
public int getWriteBatch() {
return writeBatch;
}
protected abstract void putSingle(String key, Output output, boolean last) throws IOException;
protected abstract void putStream(
String key, Output output, byte[] buf, int index, boolean last) throws IOException;
protected byte[] joinBytes(List<byte[]> list) {
return joinBytes(list, -1);
}
protected byte[] joinBytes(List<byte[]> list, int size) {
if (list.size() == 1) {
return list.get(0);
}
if (size < 0) {
size = 0;
for (byte[] buf : list) {
size += buf.length;
}
}
byte[] batch = new byte[size];
size = 0;
for (byte[] buf : list) {
System.arraycopy(buf, 0, batch, size, buf.length);
size += buf.length;
}
return batch;
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/cache/CacheEngine.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.cache;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import java.util.Arrays;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
/**
* A cache that can be used for streaming online caching, streaming pagination, or async
* predictions.
*/
public interface CacheEngine {
/**
* Returns whether the cache should combine results from multiple users.
*
* @return whether the cache should combine results from multiple users
*/
boolean isMultiTenant();
/**
* Creates a new key to store in the cache.
*
* @return the cache key
*/
default String create() {
return UUID.randomUUID().toString();
}
/**
* Adds the {@code Output} to cache and return the cache key.
*
* @param input the {@code Input} that could be used to build a key
* @return the cache key
*/
default String create(Input input) {
if (isMultiTenant() && input != null) {
int hash = Arrays.hashCode(input.getData().getAsBytes());
return String.valueOf(hash);
}
return create();
}
/**
* Adds the {@code Output} to cache and return the cache key.
*
* @param key the cache key
* @param output the {@code Output} to be added in cache
* @return a {@code CompletableFuture} instance
*/
CompletableFuture<Void> put(String key, Output output);
/**
* Adds the {@code Output} to cache and return the cache key.
*
* @param input the {@code Input} that could be used to build a key
* @param output the {@code Output} to be added in cache
* @return the cache key
*/
default String put(Input input, Output output) {
String key = create(input);
put(key, output);
return key;
}
/**
* Returns the cached {@link Output} from the cache.
*
* @param key the cache key
* @param limit the max number of items to return
* @return the item
*/
Output get(String key, int limit);
/**
* Removes the cache from the key.
*
* @param key the cache key
*/
void remove(String key);
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/cache/CacheManager.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.cache;
/** A class that manages response cache. */
public final class CacheManager {
private static CacheEngine engine = new MemoryCacheEngine();
private CacheManager() {}
/**
* Returns the registered {@code CacheEngine} instance.
*
* @return the registered {@code CacheEngine} instance
*/
public static CacheEngine getCacheEngine() {
return engine;
}
/**
* Sets the {@code CacheEngine} instance.
*
* @param instance the {@code CacheEngine} instance
*/
public static void setCacheEngine(CacheEngine instance) {
CacheManager.engine = instance;
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/cache/MemoryCacheEngine.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.cache;
import ai.djl.modality.Output;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* A {@link CacheEngine} that stores elements in working memory.
*
* <p>Note that this is not suitable if you expect the cache to work with a horizontally scaled
* system.
*/
public class MemoryCacheEngine extends BaseCacheEngine {
private Map<String, Item> cache;
private boolean multiTenant;
private boolean cleanOnAccess;
/** Constructs a {@link MemoryCacheEngine}. */
public MemoryCacheEngine() {
this(false);
}
/**
* Constructs a {@link MemoryCacheEngine}.
*
* @param multiTenant whether to combine entries from multiple users
*/
public MemoryCacheEngine(boolean multiTenant) {
cache = new ConcurrentHashMap<>();
this.multiTenant = multiTenant;
}
/**
* Constructs an LRU {@link MemoryCacheEngine} with limited capacity.
*
* @param multiTenant whether to combine entries from multiple users
* @param capacity the maximum number of elements to store
*/
public MemoryCacheEngine(boolean multiTenant, int capacity) {
// Simple LRU cache based on https://stackoverflow.com/a/224886/3483497
this.multiTenant = multiTenant;
cache =
new LinkedHashMap<>(capacity + 1, .75f, true) {
/** {@inheritDoc} */
@Override
public boolean removeEldestEntry(Map.Entry<String, Item> eldest) {
return size() > capacity;
}
};
cache = Collections.synchronizedMap(cache);
}
/** {@inheritDoc} */
@Override
public boolean isMultiTenant() {
return multiTenant;
}
/** {@inheritDoc} */
@Override
public Output get(String key, int start, int limit) {
Item item = cache.get(key);
if (item == null) {
return null;
}
Output output = new Output();
List<byte[]> contents = new ArrayList<>();
// Maybe add first contents from output
if (start == 0) {
output.setCode(item.output.getCode());
output.setMessage(item.output.getMessage());
output.setProperties(item.output.getProperties());
if (item.output.getData() != null) {
contents.add(item.output.getData().getAsBytes());
limit--;
}
}
// Add rest of contents from subsequent
start++;
int maxI = Math.min(start + limit, item.subsequent.size() + 1);
maxI = maxI < 0 ? item.subsequent.size() + 1 : maxI; // Handle underflow on limit
for (int i = start; i < maxI; i++) {
contents.add(item.subsequent.get(i - 1));
}
if (!contents.isEmpty()) {
output.add(joinBytes(contents));
}
// Handle if last of data or not
boolean returnedLastItem = item.last && maxI == item.subsequent.size() + 1;
if (!returnedLastItem && !output.getProperties().containsKey("x-next-token")) {
output.addProperty("x-next-token", key + (maxI - 1));
output.addProperty("X-Amzn-SageMaker-Custom-Attributes", "x-next-token=" + key);
} else if (cleanOnAccess) { // Last item and should clean on access
remove(key);
}
return output;
}
@Override
protected void putSingle(String key, Output output, boolean last) {
cache.put(key, new Item(output));
}
@Override
protected void putStream(String key, Output output, byte[] buf, int index, boolean last) {
cache.compute(
key,
(k, item) -> {
if (output != null || item == null) {
item = new Item(output);
}
if (buf != null) {
item.subsequent.add(buf);
}
item.last = last;
return item;
});
}
/** {@inheritDoc} */
@Override
public void remove(String key) {
cache.remove(key);
}
private static class Item {
Output output;
List<byte[]> subsequent;
boolean last;
public Item(Output output) {
this.output = output;
this.subsequent = new ArrayList<>();
}
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/cache/package-info.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes that handles response caching. */
package ai.djl.serving.cache;
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http/AdapterManagementRequestHandler.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import ai.djl.ModelException;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.serving.http.list.ListAdaptersResponse;
import ai.djl.serving.http.list.ListPagination;
import ai.djl.serving.models.ModelManager;
import ai.djl.serving.util.NettyUtils;
import ai.djl.serving.wlm.Adapter;
import ai.djl.serving.wlm.ModelInfo;
import ai.djl.serving.wlm.WorkerPool;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.HttpMethod;
import io.netty.handler.codec.http.QueryStringDecoder;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Pattern;
/** A class handling inbound HTTP requests to the management API for adapters. */
public class AdapterManagementRequestHandler extends HttpRequestHandler {
static final Pattern ADAPTERS_PATTERN =
Pattern.compile("^(/models/[^/^?]+)?/adapters([/?].*)?");
/** {@inheritDoc} */
@Override
public boolean acceptInboundMessage(Object msg) throws Exception {
if (super.acceptInboundMessage(msg)) {
FullHttpRequest req = (FullHttpRequest) msg;
String uri = req.uri();
return ADAPTERS_PATTERN.matcher(uri).matches();
}
return false;
}
/** {@inheritDoc} */
@Override
protected void handleRequest(
ChannelHandlerContext ctx,
FullHttpRequest req,
QueryStringDecoder decoder,
String[] segments)
throws ModelException {
HttpMethod method = req.method();
if (segments.length < 4) {
// API /adapters/*
String modelName =
ModelManager.getInstance()
.getSingleStartupWorkflow()
.orElseThrow(
() ->
new BadRequestException(
"The adapter must be prefixed with a model"
+ " unless there is only a single startup"
+ " model used."));
if (segments.length < 3) {
if (HttpMethod.GET.equals(method)) {
handleListAdapters(ctx, modelName, decoder);
return;
} else if (HttpMethod.POST.equals(method)) {
handleRegisterAdapter(ctx, modelName, decoder);
return;
} else {
throw new MethodNotAllowedException();
}
}
String adapterName = segments[2];
if (HttpMethod.GET.equals(method)) {
handleDescribeAdapter(ctx, modelName, adapterName);
} else if (HttpMethod.DELETE.equals(method)) {
handleUnregisterAdapter(ctx, modelName, adapterName);
} else {
throw new MethodNotAllowedException();
}
} else {
// API /models/{modelName}/adapters/*
String modelName = segments[2];
if (segments.length < 5) {
if (HttpMethod.GET.equals(method)) {
handleListAdapters(ctx, modelName, decoder);
return;
} else if (HttpMethod.POST.equals(method)) {
handleRegisterAdapter(ctx, modelName, decoder);
return;
} else {
throw new MethodNotAllowedException();
}
}
String adapterName = segments[4];
if (HttpMethod.GET.equals(method)) {
handleDescribeAdapter(ctx, modelName, adapterName);
} else if (HttpMethod.DELETE.equals(method)) {
handleUnregisterAdapter(ctx, modelName, adapterName);
} else {
throw new MethodNotAllowedException();
}
}
}
private void handleListAdapters(
ChannelHandlerContext ctx, String modelName, QueryStringDecoder decoder) {
WorkerPool<Input, Output> wp =
ModelManager.getInstance().getWorkLoadManager().getWorkerPoolById(modelName);
if (wp == null) {
throw new BadRequestException("The model " + modelName + " was not found");
}
ModelInfo<Input, Output> modelInfo = getModelInfo(wp);
ListAdaptersResponse list = new ListAdaptersResponse();
List<String> keys = new ArrayList<>(modelInfo.getAdapters().keySet());
ListPagination pagination = new ListPagination(decoder, keys.size());
if (pagination.getLast() < keys.size()) {
list.setNextPageToken(String.valueOf(pagination.getLast()));
}
for (int i = pagination.getPageToken(); i < pagination.getLast(); ++i) {
String adapterName = keys.get(i);
Adapter adapter = modelInfo.getAdapter(adapterName);
list.addAdapter(adapter.getName(), adapter.getSrc());
}
NettyUtils.sendJsonResponse(ctx, list);
}
private void handleRegisterAdapter(
ChannelHandlerContext ctx, String modelName, QueryStringDecoder decoder) {
String adapterName = NettyUtils.getRequiredParameter(decoder, "name");
String src = NettyUtils.getRequiredParameter(decoder, "src");
WorkerPool<Input, Output> wp =
ModelManager.getInstance().getWorkLoadManager().getWorkerPoolById(modelName);
if (wp == null) {
throw new BadRequestException("The model " + modelName + " was not found");
}
Map<String, String> options = new ConcurrentHashMap<>();
for (Map.Entry<String, List<String>> entry : decoder.parameters().entrySet()) {
if (entry.getValue().size() == 1) {
options.put(entry.getKey(), entry.getValue().get(0));
}
}
Adapter adapter = Adapter.newInstance(wp.getWpc(), adapterName, src, options);
adapter.register(wp);
String msg = "Adapter " + adapterName + " registered";
NettyUtils.sendJsonResponse(ctx, new StatusResponse(msg));
}
private void handleDescribeAdapter(
ChannelHandlerContext ctx, String modelName, String adapterName) {
WorkerPool<Input, Output> wp =
ModelManager.getInstance().getWorkLoadManager().getWorkerPoolById(modelName);
if (wp == null) {
throw new BadRequestException("The model " + modelName + " was not found");
}
ModelInfo<Input, Output> modelInfo = getModelInfo(wp);
Adapter adapter = modelInfo.getAdapter(adapterName);
if (adapter == null) {
throw new BadRequestException("The adapter " + adapterName + " was not found");
}
DescribeAdapterResponse adapterResponse = new DescribeAdapterResponse(adapter);
NettyUtils.sendJsonResponse(ctx, adapterResponse);
}
private void handleUnregisterAdapter(
ChannelHandlerContext ctx, String modelName, String adapterName) {
WorkerPool<Input, Output> wp =
ModelManager.getInstance().getWorkLoadManager().getWorkerPoolById(modelName);
if (wp == null) {
throw new BadRequestException("The model " + modelName + " was not found");
}
Adapter.unregister(wp, adapterName);
String msg = "Adapter " + adapterName + " registered";
NettyUtils.sendJsonResponse(ctx, new StatusResponse(msg));
}
private ModelInfo<Input, Output> getModelInfo(WorkerPool<Input, Output> wp) {
if (!(wp.getWpc() instanceof ModelInfo)) {
String modelName = wp.getWpc().getId();
throw new BadRequestException("The worker " + modelName + " is not a model");
}
return (ModelInfo<Input, Output>) wp.getWpc();
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http/BadRequestException.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
/** Thrown when a bad HTTP request is received. */
public class BadRequestException extends IllegalArgumentException {
static final long serialVersionUID = 1L;
private final int code;
/**
* Constructs an {@code BadRequestException} with the specified detail message.
*
* @param code the HTTP response code
* @param message the detail message (which is saved for later retrieval by the {@link
* #getMessage()} method)
*/
public BadRequestException(int code, String message) {
super(message);
this.code = code;
}
/**
* Constructs an {@code BadRequestException} with the specified detail message.
*
* @param message The detail message (which is saved for later retrieval by the {@link
* #getMessage()} method)
*/
public BadRequestException(String message) {
this(400, message);
}
/**
* Constructs an {@code BadRequestException} with the specified detail message and a root cause.
*
* @param message The detail message (which is saved for later retrieval by the {@link
* #getMessage()} method)
* @param cause root cause
*/
public BadRequestException(String message, Throwable cause) {
super(message, cause);
this.code = 400;
}
/**
* Return the HTTP response code.
*
* @return the HTTP response code
*/
public int getCode() {
return code;
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http/ConfigurableHttpRequestHandler.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import ai.djl.ModelException;
import ai.djl.serving.plugins.FolderScanPluginManager;
import ai.djl.serving.plugins.RequestHandler;
import ai.djl.serving.util.NettyUtils;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.QueryStringDecoder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
/**
* HttpRequestHandler that tries to process a http-request using the configured RequestHandlers.
*
* <p>RequestHandlers are configured by the PluginManager.
*
* @author erik.bamberg@web.de
*/
public class ConfigurableHttpRequestHandler extends HttpRequestHandler {
private static final Logger logger =
LoggerFactory.getLogger(ConfigurableHttpRequestHandler.class);
private FolderScanPluginManager pluginManager;
/**
* constructing a ConfigurableHttpRequestHandler.
*
* @param pluginManager a pluginManager instance used to search for available plug-ins to
* process a request.
*/
public ConfigurableHttpRequestHandler(FolderScanPluginManager pluginManager) {
this.pluginManager = pluginManager;
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override
protected void handleRequest(
ChannelHandlerContext ctx,
FullHttpRequest req,
QueryStringDecoder decoder,
String[] segments)
throws ModelException {
RequestHandler<?> requestHandler =
findRequestHandler(req)
.orElseThrow(
() -> new BadRequestException("request handler no longer valid"));
logger.trace(
"Request handler {} processes request", requestHandler.getClass().getSimpleName());
try {
Object result = requestHandler.handleRequest(ctx, req, decoder, segments);
if (result != null) {
if (result instanceof CompletableFuture) {
((CompletableFuture<Object>) result)
.handle(
(response, error) -> {
if (error != null) {
NettyUtils.sendError(ctx, error);
} else {
NettyUtils.sendJsonResponse(ctx, response);
}
return response;
});
} else {
NettyUtils.sendJsonResponse(ctx, result);
}
}
} catch (Exception ex) {
NettyUtils.sendError(ctx, ex);
}
}
/**
* findRequestHandler.
*
* @param req the full Http Request.
* @return an optional RequestHandler.
*/
@SuppressWarnings("rawtypes")
private Optional<RequestHandler> findRequestHandler(FullHttpRequest req) {
return pluginManager.findImplementations(RequestHandler.class).stream()
.filter(h -> h.acceptInboundMessage(req))
.findFirst();
}
/** {@inheritDoc} */
@Override
public boolean acceptInboundMessage(Object msg) throws Exception {
if (msg instanceof FullHttpRequest) {
return findRequestHandler((FullHttpRequest) msg).isPresent();
} else {
return false;
}
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http/DescribeAdapterResponse.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import ai.djl.serving.wlm.Adapter;
/** A class that holds information about adapter status. */
public class DescribeAdapterResponse {
private String name;
private String src;
/**
* Constructs a {@link DescribeAdapterResponse}.
*
* @param adapter the adapter to describe
*/
public DescribeAdapterResponse(Adapter adapter) {
this.name = adapter.getName();
this.src = adapter.getSrc();
}
/**
* Returns the adapter name.
*
* @return the adapter name
*/
public String getName() {
return name;
}
/**
* Returns the adapter src.
*
* @return the adapter src
*/
public String getSrc() {
return src;
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http/DescribeWorkflowResponse.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import ai.djl.Device;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.serving.models.ModelManager;
import ai.djl.serving.wlm.WorkLoadManager;
import ai.djl.serving.wlm.WorkerGroup;
import ai.djl.serving.wlm.WorkerPool;
import ai.djl.serving.wlm.WorkerPoolConfig;
import ai.djl.serving.wlm.WorkerThread;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Set;
/** A class that holds information about workflow status. */
public class DescribeWorkflowResponse {
private String workflowName;
private String version;
private List<Model> models;
/**
* Constructs a new {@code DescribeWorkflowResponse} instance.
*
* @param workflow the workflow
*/
public DescribeWorkflowResponse(ai.djl.serving.workflow.Workflow workflow) {
this.workflowName = workflow.getName();
this.version = workflow.getVersion();
models = new ArrayList<>();
ModelManager manager = ModelManager.getInstance();
WorkLoadManager wlm = manager.getWorkLoadManager();
Set<String> startupWorkflows = manager.getStartupWorkflows();
for (WorkerPoolConfig<Input, Output> wpc : workflow.getWpcs()) {
WorkerPoolConfig.Status status = wpc.getStatus();
int activeWorker = 0;
int targetWorker = 0;
Model m = new Model();
models.add(m);
WorkerPool<Input, Output> pool = wlm.getWorkerPool(wpc);
if (pool != null) {
pool.cleanup();
m.setModelName(wpc.getId());
m.setModelUrl(wpc.getModelUrl());
m.setBatchSize(wpc.getBatchSize());
m.setMaxBatchDelayMillis(wpc.getMaxBatchDelayMillis());
m.setMaxIdleSeconds(wpc.getMaxIdleSeconds());
m.setQueueSize(wpc.getQueueSize());
m.setRequestInQueue(pool.getJobQueue().size());
m.setLoadedAtStartup(startupWorkflows.contains(wpc.getId()));
for (WorkerGroup<Input, Output> group : pool.getWorkerGroups().values()) {
Device device = group.getDevice();
Group g = new Group(device, group.getMinWorkers(), group.getMaxWorkers());
m.addGroup(g);
List<WorkerThread<Input, Output>> workers = group.getWorkers();
activeWorker += workers.size();
targetWorker += group.getMinWorkers();
for (WorkerThread<Input, Output> worker : workers) {
int workerId = worker.getWorkerIdNum();
long startTime = worker.getStartTime();
boolean isRunning = worker.isRunning();
g.addWorker(workerId, startTime, isRunning);
}
}
}
if (status == WorkerPoolConfig.Status.READY) {
m.setStatus(activeWorker >= targetWorker ? "Healthy" : "Unhealthy");
} else {
m.setStatus(status.name());
}
}
}
/**
* Returns the workflow name.
*
* @return the workflow name
*/
public String getWorkflowName() {
return workflowName;
}
/**
* Returns the workflow version.
*
* @return the workflow version
*/
public String getVersion() {
return version;
}
/**
* Returns a list of models.
*
* @return a list of models
*/
public List<Model> getModels() {
return models;
}
/** A class represents model information. */
public static final class Model {
private String modelName;
private String modelUrl;
private int batchSize;
private int maxBatchDelayMillis;
private int maxIdleSeconds;
private int queueSize;
private int requestInQueue;
private String status;
private boolean loadedAtStartup;
private List<Group> workerGroups = new ArrayList<>();
/**
* Returns the model name.
*
* @return the model name
*/
public String getModelName() {
return modelName;
}
/**
* Sets the model name.
*
* @param modelName the model name
*/
public void setModelName(String modelName) {
this.modelName = modelName;
}
/**
* Returns the model URL.
*
* @return the model URL
*/
public String getModelUrl() {
return modelUrl;
}
/**
* Sets the model URL.
*
* @param modelUrl the model URL
*/
public void setModelUrl(String modelUrl) {
this.modelUrl = modelUrl;
}
/**
* Returns if the workflows was loaded at startup.
*
* @return {@code true} if the workflows was loaded at startup
*/
public boolean isLoadedAtStartup() {
return loadedAtStartup;
}
/**
* Sets the load at startup status.
*
* @param loadedAtStartup {@code true} if the workflows was loaded at startup
*/
public void setLoadedAtStartup(boolean loadedAtStartup) {
this.loadedAtStartup = loadedAtStartup;
}
/**
* Returns the batch size.
*
* @return the batch size
*/
public int getBatchSize() {
return batchSize;
}
/**
* Sets the batch size.
*
* @param batchSize the batch size
*/
public void setBatchSize(int batchSize) {
this.batchSize = batchSize;
}
/**
* Returns the maximum delay in milliseconds to aggregate a batch.
*
* @return the maximum delay in milliseconds to aggregate a batch
*/
public int getMaxBatchDelayMillis() {
return maxBatchDelayMillis;
}
/**
* Sets the maximum delay in milliseconds to aggregate a batch.
*
* @param maxBatchDelayMillis the maximum delay in milliseconds to aggregate a batch
*/
public void setMaxBatchDelayMillis(int maxBatchDelayMillis) {
this.maxBatchDelayMillis = maxBatchDelayMillis;
}
/**
* Returns the job queue size.
*
* @return the job queue size
*/
public int getQueueSize() {
return queueSize;
}
/**
* Sets the job queue size.
*
* @param queueSize the job queue size
*/
public void setQueueSize(int queueSize) {
this.queueSize = queueSize;
}
/**
* Returns the number of request in the queue.
*
* @return the number of request in the queue
*/
public int getRequestInQueue() {
return requestInQueue;
}
/**
* Sets the number of request in the queue.
*
* @param requestInQueue the number of request in the queue
*/
public void setRequestInQueue(int requestInQueue) {
this.requestInQueue = requestInQueue;
}
/**
* Returns the workflow's status.
*
* @return the workflow's status
*/
public String getStatus() {
return status;
}
/**
* Sets the workflow's status.
*
* @param status the workflow's status
*/
public void setStatus(String status) {
this.status = status;
}
/**
* Sets the max idle time in seconds for worker threads.
*
* @param maxIdleSeconds the time a worker thread can be idle before scaling down.
*/
public void setMaxIdleSeconds(int maxIdleSeconds) {
this.maxIdleSeconds = maxIdleSeconds;
}
/**
* Returns the maximum idle time in seconds for worker threads.
*
* @return the maximum idle time in seconds
*/
public int getMaxIdleSeconds() {
return maxIdleSeconds;
}
/**
* Returns all workerPools of the workflow.
*
* @return all workerPools of the workflow
*/
public List<Group> getWorkGroups() {
return workerGroups;
}
void addGroup(Group group) {
workerGroups.add(group);
}
}
/** A class represents worker group. */
public static final class Group {
private Device device;
private int minWorkers;
private int maxWorkers;
private List<Worker> workers;
/**
* Constructs a new instance of {@code Group}.
*
* @param device the device
* @param minWorkers the minimum number of workers
* @param maxWorkers the maximum number of workers
*/
public Group(Device device, int minWorkers, int maxWorkers) {
this.device = device;
this.minWorkers = minWorkers;
this.maxWorkers = maxWorkers;
workers = new ArrayList<>();
}
/**
* Returns the worker device.
*
* @return the worker device
*/
public Device getDevice() {
return device;
}
/**
* Returns the minimum number of workers.
*
* @return the minimum number of workers
*/
public int getMinWorkers() {
return minWorkers;
}
/**
* Returns the maximum number of workers.
*
* @return the maximum number of workers
*/
public int getMaxWorkers() {
return maxWorkers;
}
/**
* Adds worker to the worker list.
*
* @param id the worker's ID
* @param startTime the worker's start time
* @param isRunning {@code true} if worker is running
*/
public void addWorker(int id, long startTime, boolean isRunning) {
Worker worker = new Worker();
worker.setId(id);
worker.setStartTime(new Date(startTime));
worker.setStatus(isRunning ? "READY" : "UNLOADING");
workers.add(worker);
}
/**
* Returns a list of workers.
*
* @return a list of workers
*/
public List<Worker> getWorkers() {
return workers;
}
}
/** A class that holds workers information. */
public static final class Worker {
private int id;
private Date startTime;
private String status;
/**
* Returns the worker's ID.
*
* @return the worker's ID
*/
public int getId() {
return id;
}
/**
* Sets the worker's ID.
*
* @param id the workers ID
*/
public void setId(int id) {
this.id = id;
}
/**
* Returns the worker's start time.
*
* @return the worker's start time
*/
public Date getStartTime() {
return startTime;
}
/**
* Sets the worker's start time.
*
* @param startTime the worker's start time
*/
public void setStartTime(Date startTime) {
this.startTime = startTime;
}
/**
* Returns the worker's status.
*
* @return the worker's status
*/
public String getStatus() {
return status;
}
/**
* Sets the worker's status.
*
* @param status the worker's status
*/
public void setStatus(String status) {
this.status = status;
}
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http/HttpRequestHandler.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import ai.djl.ModelException;
import ai.djl.serving.util.NettyUtils;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.HttpMethod;
import io.netty.handler.codec.http.QueryStringDecoder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** A class handling inbound HTTP requests. */
public abstract class HttpRequestHandler extends SimpleChannelInboundHandler<FullHttpRequest> {
private static final Logger logger = LoggerFactory.getLogger(HttpRequestHandler.class);
/** {@inheritDoc} */
@Override
protected void channelRead0(ChannelHandlerContext ctx, FullHttpRequest req) {
try {
NettyUtils.requestReceived(ctx.channel(), req);
if (!req.decoderResult().isSuccess()) {
throw new BadRequestException("Invalid HTTP message.");
}
QueryStringDecoder decoder = new QueryStringDecoder(req.uri());
String path = decoder.path();
if ("/".equals(path) && HttpMethod.OPTIONS.equals(req.method())) {
handleApiDescription(ctx);
return;
}
String[] segments = path.split("/");
handleRequest(ctx, req, decoder, segments);
} catch (Throwable t) {
NettyUtils.sendError(ctx, t);
}
}
/** {@inheritDoc} */
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
logger.error("", cause);
ctx.close();
}
protected abstract void handleRequest(
ChannelHandlerContext ctx,
FullHttpRequest req,
QueryStringDecoder decoder,
String[] segments)
throws ModelException;
private void handleApiDescription(ChannelHandlerContext ctx) {
NettyUtils.sendJsonResponse(ctx, "{}");
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http/InferenceRequestHandler.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import ai.djl.ModelException;
import ai.djl.inference.streaming.ChunkedBytesSupplier;
import ai.djl.inference.streaming.PublisherBytesSupplier;
import ai.djl.metric.Metric;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.ndarray.BytesSupplier;
import ai.djl.repository.zoo.ModelNotFoundException;
import ai.djl.serving.cache.CacheEngine;
import ai.djl.serving.cache.CacheManager;
import ai.djl.serving.models.ModelManager;
import ai.djl.serving.util.ConfigManager;
import ai.djl.serving.util.NettyUtils;
import ai.djl.serving.wlm.ModelInfo;
import ai.djl.serving.wlm.util.WlmException;
import ai.djl.serving.workflow.Workflow;
import ai.djl.translate.TranslateException;
import ai.djl.util.JsonUtils;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.DefaultFullHttpResponse;
import io.netty.handler.codec.http.DefaultHttpContent;
import io.netty.handler.codec.http.DefaultHttpResponse;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.FullHttpResponse;
import io.netty.handler.codec.http.HttpMethod;
import io.netty.handler.codec.http.HttpResponse;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.codec.http.HttpVersion;
import io.netty.handler.codec.http.LastHttpContent;
import io.netty.handler.codec.http.QueryStringDecoder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
/** A class handling inbound HTTP requests for the management API. */
public class InferenceRequestHandler extends HttpRequestHandler {
private static final Logger logger = LoggerFactory.getLogger(InferenceRequestHandler.class);
private static final Logger SERVER_METRIC = LoggerFactory.getLogger("server_metric");
private static final Metric RESPONSE_2_XX = new Metric("Response_2XX", 1);
private static final Metric RESPONSE_4_XX = new Metric("Response_4XX", 1);
private static final Metric RESPONSE_5_XX = new Metric("Response_5XX", 1);
private static final Metric WLM_ERROR = new Metric("WlmError", 1);
private static final Metric SERVER_ERROR = new Metric("ServerError", 1);
private static final Pattern PATTERN =
Pattern.compile(
"/(ping|invocations|predictions|v1/chat/completions)([/?].*)?|/models/.+/invoke");
private static final String X_SYNCHRONOUS = "x-synchronous";
private static final String X_STARTING_TOKEN = "x-starting-token";
private static final String X_NEXT_TOKEN = "x-next-token";
private static final String X_MAX_ITEMS = "x-max-items";
private static final String X_CUSTOM_ATTRIBUTES = "X-Amzn-SageMaker-Custom-Attributes";
private RequestParser requestParser;
private int chunkReadTime;
/** default constructor. */
public InferenceRequestHandler() {
this.requestParser = new RequestParser();
chunkReadTime = ConfigManager.getInstance().getChunkedReadTimeout();
}
/** {@inheritDoc} */
@Override
public boolean acceptInboundMessage(Object msg) throws Exception {
if (super.acceptInboundMessage(msg)) {
FullHttpRequest req = (FullHttpRequest) msg;
String uri = req.uri();
return PATTERN.matcher(uri).matches();
}
return false;
}
/** {@inheritDoc} */
@Override
protected void handleRequest(
ChannelHandlerContext ctx,
FullHttpRequest req,
QueryStringDecoder decoder,
String[] segments)
throws ModelException {
switch (segments[1]) {
case "ping":
ModelManager.getInstance()
.workerStatus()
.thenAccept(
w -> {
boolean hasFailure = (boolean) w.get("hasFailure");
boolean hasPending = (boolean) w.get("hasPending");
HttpResponseStatus status;
if (hasFailure) {
logger.info(
"PING FAILED: {}",
JsonUtils.GSON.toJson(w.get("data")));
status = HttpResponseStatus.INTERNAL_SERVER_ERROR;
} else if (hasPending) {
if (ConfigManager.getInstance().allowsMultiStatus()) {
status = HttpResponseStatus.MULTI_STATUS;
} else {
status = HttpResponseStatus.OK;
}
} else {
status = HttpResponseStatus.OK;
}
NettyUtils.sendJsonResponse(ctx, w.get("data"), status);
});
break;
case "invocations":
handleInvocations(ctx, req, decoder, null);
break;
case "models":
handleInvocations(ctx, req, decoder, segments[2]);
break;
case "predictions":
handlePredictions(ctx, req, decoder, segments);
break;
case "v1":
if ("chat".equals(segments[2]) && "completions".equals(segments[3])) {
handleInvocations(ctx, req, decoder, null);
}
break;
default:
throw new AssertionError("Invalid request uri: " + req.uri());
}
}
/** {@inheritDoc} */
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
Session session = NettyUtils.getSession(ctx.channel());
if (session != null) {
Input input = session.getInput();
if (input != null) {
input.setCancelled(true);
}
}
super.channelInactive(ctx);
}
private void handlePredictions(
ChannelHandlerContext ctx,
FullHttpRequest req,
QueryStringDecoder decoder,
String[] segments)
throws ModelNotFoundException {
if (segments.length < 3) {
throw new ResourceNotFoundException();
}
String modelName = segments[2];
String version;
if (segments.length > 3) {
version = segments[3].isEmpty() ? null : segments[3];
} else {
version = null;
}
Input input = requestParser.parseRequest(req, decoder);
predict(ctx, req, input, modelName, version);
}
private void handleInvocations(
ChannelHandlerContext ctx,
FullHttpRequest req,
QueryStringDecoder decoder,
String modelName)
throws ModelNotFoundException {
Input input = requestParser.parseRequest(req, decoder);
if (modelName == null) {
modelName = NettyUtils.getParameter(decoder, "model_name", null);
}
if ((modelName == null || modelName.isEmpty())) {
modelName = input.getProperty("model_name", null);
if (modelName == null) {
modelName = input.getAsString("model_name");
}
}
if (modelName == null) {
modelName = ModelManager.getInstance().getSingleStartupWorkflow().orElse(null);
if (modelName == null) {
throw new BadRequestException("Parameter model_name is required.");
}
}
String version = NettyUtils.getParameter(decoder, "model_version", null);
if (version == null) {
version = input.getProperty("model_version", null);
}
predict(ctx, req, input, modelName, version);
}
private void predict(
ChannelHandlerContext ctx,
FullHttpRequest req,
Input input,
String workflowName,
String version)
throws ModelNotFoundException {
String startingToken = input.getProperty(X_STARTING_TOKEN, null);
if (startingToken != null && !HttpMethod.OPTIONS.equals(req.method())) {
CompletableFuture.runAsync(() -> getCacheResult(ctx, input, startingToken))
.exceptionally(
t -> {
onException(t.getCause(), ctx);
return null;
});
return;
}
ModelManager modelManager = ModelManager.getInstance();
ConfigManager config = ConfigManager.getInstance();
Workflow workflow = modelManager.getWorkflow(workflowName, version, true);
if (workflow == null) {
String regex = config.getModelUrlPattern();
if (regex == null) {
throw new ModelNotFoundException("Model or workflow not found: " + workflowName);
}
String modelUrl = input.getProperty("model_url", null);
if (modelUrl == null) {
modelUrl = input.getAsString("model_url");
if (modelUrl == null) {
throw new ModelNotFoundException("Parameter model_url is required.");
}
if (!modelUrl.matches(regex)) {
throw new ModelNotFoundException("Permission denied: " + modelUrl);
}
}
String engineName = input.getProperty("engine_name", null);
String deviceName = input.getProperty("device", null);
logger.info("Loading model {} from: {}", workflowName, modelUrl);
ModelInfo<Input, Output> modelInfo =
new ModelInfo<>(
workflowName,
modelUrl,
version,
engineName,
deviceName,
Input.class,
Output.class,
-1,
-1,
-1,
-1,
-1,
-1);
Workflow wf = new Workflow(modelInfo);
modelManager
.registerWorkflow(wf)
.thenAccept(p -> runJob(modelManager, ctx, wf, input))
.exceptionally(
t -> {
logger.error("Failed register workflow", t);
NettyUtils.sendError(ctx, t.getCause());
return null;
});
return;
}
if (HttpMethod.OPTIONS.equals(req.method())) {
NettyUtils.sendJsonResponse(ctx, "{}");
return;
}
runJob(modelManager, ctx, workflow, input);
}
void runJob(
ModelManager modelManager, ChannelHandlerContext ctx, Workflow workflow, Input input) {
Session session = NettyUtils.getSession(ctx.channel());
session.setInput(input);
String sync = input.getProperty(X_SYNCHRONOUS, "true");
if (Boolean.parseBoolean(sync)) { // Synchronous
modelManager
.runJob(workflow, input)
.whenCompleteAsync(
(o, t) -> {
if (o != null) {
sendOutput(o, ctx);
}
})
.exceptionally(
t -> {
onException(t.getCause(), ctx);
return null;
});
} else { // Asynchronous
CacheEngine cache = CacheManager.getCacheEngine();
String nextToken = cache.create(input);
// Store pending message to be sent for unfinished computations
Output pending = new Output();
pending.setMessage("The model result is not yet available");
pending.setCode(202);
pending.addProperty(X_NEXT_TOKEN, nextToken);
pending.addProperty(X_CUSTOM_ATTRIBUTES, X_NEXT_TOKEN + '=' + nextToken);
cache.put(nextToken, pending)
.thenAccept(
ignored -> {
// Send back token to user
Output out = new Output();
out.addProperty(X_NEXT_TOKEN, nextToken);
out.addProperty(
X_CUSTOM_ATTRIBUTES, X_NEXT_TOKEN + '=' + nextToken);
sendOutput(out, ctx);
// Run model
modelManager
.runJob(workflow, input)
.whenCompleteAsync(
(o, t) -> {
if (o != null) {
cache.put(nextToken, o);
} else {
Output failOut = new Output();
failOut.setCode(500);
failOut.setMessage(t.getMessage());
cache.put(nextToken, failOut);
}
});
});
}
}
private void getCacheResult(ChannelHandlerContext ctx, Input input, String startingToken) {
int limit = Integer.parseInt(input.getProperty(X_MAX_ITEMS, "-1"));
if (limit < 0) {
limit = Integer.MAX_VALUE;
}
CacheEngine cache = CacheManager.getCacheEngine();
Output output;
try {
output = cache.get(startingToken, limit);
} catch (RuntimeException e) {
throw new BadRequestException("Failed to lookup cache element", e);
}
if (output == null) {
throw new BadRequestException("Invalid " + X_STARTING_TOKEN + ": " + startingToken);
}
sendOutput(output, ctx);
}
void sendOutput(Output output, ChannelHandlerContext ctx) {
/*
* We can load the models based on the configuration file. Since this Job is
* not driven by the external connections, we could have a empty context for
* this job. We shouldn't try to send a response to ctx if this is not triggered
* by external clients.
*/
if (ctx == null) {
return;
}
HttpResponseStatus status;
int code = output.getCode();
if (code == 200) {
status = HttpResponseStatus.OK;
SERVER_METRIC.info("{}", RESPONSE_2_XX);
} else {
if (code >= 500) {
SERVER_METRIC.info("{}", RESPONSE_5_XX);
} else if (code >= 400) {
SERVER_METRIC.info("{}", RESPONSE_4_XX);
} else {
SERVER_METRIC.info("{}", RESPONSE_2_XX);
}
status = new HttpResponseStatus(code, output.getMessage());
}
BytesSupplier data = output.getData();
if (data instanceof ChunkedBytesSupplier) {
try {
boolean first = true;
ChunkedBytesSupplier supplier = (ChunkedBytesSupplier) data;
while (supplier.hasNext()) {
byte[] buf = supplier.nextChunk(chunkReadTime, TimeUnit.SECONDS);
// Defer sending HTTP header until first chunk received.
// This allows inference update HTTP code.
if (first) {
code = output.getCode();
status = new HttpResponseStatus(code, output.getMessage());
HttpResponse resp = new DefaultHttpResponse(HttpVersion.HTTP_1_1, status);
for (Map.Entry<String, String> entry : output.getProperties().entrySet()) {
resp.headers().set(entry.getKey(), entry.getValue());
}
NettyUtils.sendHttpResponse(ctx, resp, true, false);
first = false;
}
ByteBuf bb = Unpooled.wrappedBuffer(buf);
ctx.writeAndFlush(new DefaultHttpContent(bb));
}
ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT);
} catch (InterruptedException | IllegalStateException e) {
logger.warn("Chunk reading interrupted", e);
ctx.disconnect();
ctx.newFailedFuture(e);
}
return;
}
if (data instanceof PublisherBytesSupplier) {
HttpResponse resp = new DefaultHttpResponse(HttpVersion.HTTP_1_1, status);
for (Map.Entry<String, String> entry : output.getProperties().entrySet()) {
resp.headers().set(entry.getKey(), entry.getValue());
}
NettyUtils.sendHttpResponse(ctx, resp, true);
PublisherBytesSupplier supplier = (PublisherBytesSupplier) data;
supplier.subscribe(
buf -> {
if (buf == null) {
// End stream
ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT);
} else if (buf.length > 0) {
// Continue stream
ByteBuf bb = Unpooled.wrappedBuffer(buf);
ctx.writeAndFlush(new DefaultHttpContent(bb));
}
});
return;
}
FullHttpResponse resp = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, status);
for (Map.Entry<String, String> entry : output.getProperties().entrySet()) {
resp.headers().set(entry.getKey(), entry.getValue());
}
if (data != null) {
resp.content().writeBytes(data.getAsBytes());
}
NettyUtils.sendHttpResponse(ctx, resp, true);
}
void onException(Throwable t, ChannelHandlerContext ctx) {
HttpResponseStatus status;
if (t instanceof TranslateException || t instanceof BadRequestException) {
logger.debug(t.getMessage(), t);
SERVER_METRIC.info("{}", RESPONSE_4_XX);
status = HttpResponseStatus.BAD_REQUEST;
} else if (t instanceof WlmException) {
logger.warn(t.getMessage(), t);
SERVER_METRIC.info("{}", RESPONSE_5_XX);
SERVER_METRIC.info("{}", WLM_ERROR);
status = HttpResponseStatus.SERVICE_UNAVAILABLE;
} else {
logger.warn("Unexpected error", t);
SERVER_METRIC.info("{}", RESPONSE_5_XX);
SERVER_METRIC.info("{}", SERVER_ERROR);
status = HttpResponseStatus.INTERNAL_SERVER_ERROR;
}
/*
* We can load the models based on the configuration file.Since this Job is
* not driven by the external connections, we could have a empty context for
* this job. We shouldn't try to send a response to ctx if this is not triggered
* by external clients.
*/
if (ctx != null) {
NettyUtils.sendError(ctx, status, t);
}
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http/LoadModelRequest.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import ai.djl.serving.util.NettyUtils;
import ai.djl.serving.wlm.ModelInfo;
import com.google.gson.annotations.SerializedName;
import io.netty.handler.codec.http.QueryStringDecoder;
class LoadModelRequest {
static final String URL = "url";
static final String TEMPLATE = "template";
static final String DEVICE = "device";
static final String MAX_WORKER = "max_worker";
static final String MIN_WORKER = "min_worker";
static final String SYNCHRONOUS = "synchronous";
private static final String JOB_QUEUE_SIZE = "job_queue_size";
private static final String BATCH_SIZE = "batch_size";
private static final String MODEL_NAME = "model_name";
private static final String MODEL_VERSION = "model_version";
private static final String ENGINE_NAME = "engine";
private static final String MAX_BATCH_DELAY = "max_batch_delay";
private static final String MAX_IDLE_TIME = "max_idle_time";
@SerializedName(URL)
private String modelUrl;
@SerializedName(MODEL_NAME)
private String modelName;
@SerializedName(MODEL_VERSION)
private String version;
@SerializedName(DEVICE)
private String deviceName;
@SerializedName(ENGINE_NAME)
private String engineName;
@SerializedName(BATCH_SIZE)
private int batchSize = -1;
@SerializedName(JOB_QUEUE_SIZE)
private int jobQueueSize = -1;
@SerializedName(MAX_IDLE_TIME)
private int maxIdleSeconds = -1;
@SerializedName(MAX_BATCH_DELAY)
private int maxBatchDelayMillis = -1;
@SerializedName(MIN_WORKER)
private int minWorkers = -1;
@SerializedName(MAX_WORKER)
private int maxWorkers = -1;
@SerializedName(SYNCHRONOUS)
private boolean synchronous = true;
public LoadModelRequest() {}
public LoadModelRequest(QueryStringDecoder decoder) {
modelUrl = NettyUtils.getParameter(decoder, URL, null);
if (modelUrl == null) {
throw new BadRequestException("Parameter url is required.");
}
modelName = NettyUtils.getParameter(decoder, MODEL_NAME, null);
if (modelName == null || modelName.isEmpty()) {
modelName = ModelInfo.inferModelNameFromUrl(modelUrl);
}
version = NettyUtils.getParameter(decoder, MODEL_VERSION, null);
deviceName = NettyUtils.getParameter(decoder, DEVICE, null);
engineName = NettyUtils.getParameter(decoder, ENGINE_NAME, null);
jobQueueSize = NettyUtils.getIntParameter(decoder, JOB_QUEUE_SIZE, -1);
batchSize = NettyUtils.getIntParameter(decoder, BATCH_SIZE, -1);
maxBatchDelayMillis = NettyUtils.getIntParameter(decoder, MAX_BATCH_DELAY, -1);
maxIdleSeconds = NettyUtils.getIntParameter(decoder, MAX_IDLE_TIME, -1);
minWorkers = NettyUtils.getIntParameter(decoder, MIN_WORKER, -1);
maxWorkers = NettyUtils.getIntParameter(decoder, MAX_WORKER, -1);
synchronous = Boolean.parseBoolean(NettyUtils.getParameter(decoder, SYNCHRONOUS, "true"));
}
public String getModelUrl() {
return modelUrl;
}
public String getModelName() {
return modelName;
}
public String getVersion() {
return version;
}
public String getDeviceName() {
return deviceName;
}
public String getEngineName() {
return engineName;
}
public int getBatchSize() {
return batchSize;
}
public int getJobQueueSize() {
return jobQueueSize;
}
public int getMaxIdleSeconds() {
return maxIdleSeconds;
}
public int getMaxBatchDelayMillis() {
return maxBatchDelayMillis;
}
public int getMinWorkers() {
return minWorkers;
}
public int getMaxWorkers() {
return maxWorkers;
}
public boolean isSynchronous() {
return synchronous;
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http/ManagementRequestHandler.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import ai.djl.ModelException;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.repository.zoo.ModelNotFoundException;
import ai.djl.serving.http.list.ListModelsResponse;
import ai.djl.serving.http.list.ListPagination;
import ai.djl.serving.http.list.ListWorkflowsResponse;
import ai.djl.serving.models.Endpoint;
import ai.djl.serving.models.ModelManager;
import ai.djl.serving.util.NettyUtils;
import ai.djl.serving.wlm.ModelInfo;
import ai.djl.serving.wlm.WorkerPoolConfig;
import ai.djl.serving.workflow.BadWorkflowException;
import ai.djl.serving.workflow.Workflow;
import ai.djl.serving.workflow.WorkflowDefinition;
import ai.djl.serving.workflow.WorkflowTemplates;
import ai.djl.util.JsonUtils;
import ai.djl.util.Pair;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.HttpHeaderValues;
import io.netty.handler.codec.http.HttpMethod;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.codec.http.HttpUtil;
import io.netty.handler.codec.http.QueryStringDecoder;
import io.netty.util.CharsetUtil;
import java.io.IOException;
import java.lang.reflect.Method;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
/** A class handling inbound HTTP requests to the management API. */
public class ManagementRequestHandler extends HttpRequestHandler {
private static final Pattern WORKFLOWS_PATTERN = Pattern.compile("^/workflows([/?].*)?");
private static final Pattern MODELS_PATTERN = Pattern.compile("^/models([/?].*)?");
private static final Pattern INVOKE_PATTERN = Pattern.compile("^/models/.+/invoke$");
private static final Pattern SERVER_PATTERN = Pattern.compile("^/server/.+");
/** {@inheritDoc} */
@Override
public boolean acceptInboundMessage(Object msg) throws Exception {
if (super.acceptInboundMessage(msg)) {
FullHttpRequest req = (FullHttpRequest) msg;
String uri = req.uri();
if (WORKFLOWS_PATTERN.matcher(uri).matches() || SERVER_PATTERN.matcher(uri).matches()) {
return true;
} else if (AdapterManagementRequestHandler.ADAPTERS_PATTERN.matcher(uri).matches()) {
return false;
} else if (MODELS_PATTERN.matcher(uri).matches()) {
return req.method() != HttpMethod.POST || !INVOKE_PATTERN.matcher(uri).matches();
}
}
return false;
}
/** {@inheritDoc} */
@Override
protected void handleRequest(
ChannelHandlerContext ctx,
FullHttpRequest req,
QueryStringDecoder decoder,
String[] segments)
throws ModelException {
HttpMethod method = req.method();
if ("server".equals(segments[1])) {
if ("logging".equals(segments[2])) {
handleConfigLogs(ctx, decoder);
return;
} else if ("metrics".equals(segments[2])) {
if (!HttpMethod.GET.equals(method)) {
throw new MethodNotAllowedException();
}
PrometheusExporter.handle(ctx, decoder);
return;
}
throw new ResourceNotFoundException();
}
if (segments.length < 3) {
if (HttpMethod.GET.equals(method)) {
if ("models".equals(segments[1])) {
handleListModels(ctx, decoder);
} else {
handleListWorkflows(ctx, decoder);
}
return;
} else if (HttpMethod.POST.equals(method)) {
if ("models".equals(segments[1])) {
handleRegisterModel(ctx, req, decoder);
} else {
handleRegisterWorkflow(ctx, decoder);
}
return;
}
throw new MethodNotAllowedException();
}
String modelName = segments[2];
String version = null;
if (segments.length > 3) {
version = segments[3];
}
if (HttpMethod.GET.equals(method)) {
handleDescribeWorkflow(ctx, modelName, version);
} else if (HttpMethod.PUT.equals(method)) {
handleScaleWorkflow(ctx, decoder, modelName, version);
} else if (HttpMethod.DELETE.equals(method)) {
handleUnregisterWorkflow(ctx, modelName, version);
} else {
throw new MethodNotAllowedException();
}
}
private void handleListModels(ChannelHandlerContext ctx, QueryStringDecoder decoder) {
ModelManager modelManager = ModelManager.getInstance();
Map<String, Endpoint> endpoints = modelManager.getEndpoints();
List<String> keys = new ArrayList<>(endpoints.keySet());
Collections.sort(keys);
ListModelsResponse list = new ListModelsResponse();
ListPagination pagination = new ListPagination(decoder, keys.size());
if (pagination.getLast() < keys.size()) {
list.setNextPageToken(String.valueOf(pagination.getLast()));
}
for (int i = pagination.getPageToken(); i < pagination.getLast(); ++i) {
String workflowName = keys.get(i);
for (Workflow workflow : endpoints.get(workflowName).getWorkflows()) {
for (WorkerPoolConfig<Input, Output> wpc : workflow.getWpcs()) {
String status = wpc.getStatus().toString();
String id = wpc.getId();
String name;
if (workflowName.equals(id)) {
name = workflowName;
} else {
name = workflowName + ':' + id;
}
list.addModel(name, workflow.getVersion(), wpc.getModelUrl(), status);
}
}
}
NettyUtils.sendJsonResponse(ctx, list);
}
private void handleListWorkflows(ChannelHandlerContext ctx, QueryStringDecoder decoder) {
ModelManager modelManager = ModelManager.getInstance();
Map<String, Endpoint> endpoints = modelManager.getEndpoints();
List<String> keys = new ArrayList<>(endpoints.keySet());
Collections.sort(keys);
ListWorkflowsResponse list = new ListWorkflowsResponse();
ListPagination pagination = new ListPagination(decoder, keys.size());
if (pagination.getLast() <= keys.size()) {
list.setNextPageToken(String.valueOf(pagination.getLast()));
}
for (int i = pagination.getPageToken(); i < pagination.getLast(); ++i) {
String workflowName = keys.get(i);
for (Workflow w : endpoints.get(workflowName).getWorkflows()) {
list.addWorkflow(workflowName, w.getVersion());
}
}
NettyUtils.sendJsonResponse(ctx, list);
}
private void handleDescribeWorkflow(
ChannelHandlerContext ctx, String workflowName, String version)
throws ModelNotFoundException {
ModelManager modelManager = ModelManager.getInstance();
DescribeWorkflowResponse[] resp = modelManager.describeWorkflow(workflowName, version);
NettyUtils.sendJsonResponse(ctx, resp);
}
private void handleRegisterModel(
final ChannelHandlerContext ctx, FullHttpRequest request, QueryStringDecoder decoder) {
LoadModelRequest req;
CharSequence contentType = HttpUtil.getMimeType(request);
if (HttpHeaderValues.APPLICATION_JSON.contentEqualsIgnoreCase(contentType)) {
String body = request.content().toString(CharsetUtil.UTF_8);
req = JsonUtils.GSON.fromJson(body, LoadModelRequest.class);
} else {
req = new LoadModelRequest(decoder);
}
final ModelManager modelManager = ModelManager.getInstance();
Workflow workflow;
URI uri = WorkflowDefinition.toWorkflowUri(req.getModelUrl());
if (uri != null) {
try {
workflow = WorkflowDefinition.parse(req.getModelName(), uri).toWorkflow();
} catch (IOException | BadWorkflowException e) {
NettyUtils.sendError(ctx, e.getCause());
return;
}
} else {
ModelInfo<Input, Output> modelInfo =
new ModelInfo<>(
req.getModelName(),
req.getModelUrl(),
req.getVersion(),
req.getEngineName(),
req.getDeviceName(),
Input.class,
Output.class,
req.getJobQueueSize(),
req.getMaxIdleSeconds(),
req.getMaxBatchDelayMillis(),
req.getBatchSize(),
req.getMinWorkers(),
req.getMaxWorkers());
workflow = new Workflow(modelInfo);
}
CompletableFuture<Void> f =
modelManager
.registerWorkflow(workflow)
.exceptionally(
t -> {
NettyUtils.sendError(ctx, t.getCause());
if (req.isSynchronous()) {
String name = workflow.getName();
modelManager.unregisterWorkflow(name, req.getVersion());
}
return null;
});
if (req.isSynchronous()) {
final String msg = "Model \"" + req.getModelName() + "\" registered.";
f.thenAccept(v -> NettyUtils.sendJsonResponse(ctx, new StatusResponse(msg)));
} else {
String msg = "Model \"" + req.getModelName() + "\" registration scheduled.";
NettyUtils.sendJsonResponse(ctx, new StatusResponse(msg), HttpResponseStatus.ACCEPTED);
}
}
private void handleRegisterWorkflow(
final ChannelHandlerContext ctx, QueryStringDecoder decoder) {
String workflowUrl = NettyUtils.getParameter(decoder, LoadModelRequest.URL, null);
String workflowTemplate = NettyUtils.getParameter(decoder, LoadModelRequest.TEMPLATE, null);
if (workflowUrl == null && workflowTemplate == null) {
throw new BadRequestException("Either parameter url or template is required.");
}
boolean synchronous =
Boolean.parseBoolean(
NettyUtils.getParameter(decoder, LoadModelRequest.SYNCHRONOUS, "true"));
try {
final ModelManager modelManager = ModelManager.getInstance();
Workflow workflow;
if (workflowTemplate != null) { // Workflow from template
Map<String, String> templateReplacements = // NOPMD
decoder.parameters().entrySet().stream()
.filter(e -> e.getValue().size() == 1)
.map(e -> new Pair<>(e.getKey(), e.getValue().get(0)))
.collect(Collectors.toMap(Pair::getKey, Pair::getValue));
workflow =
WorkflowTemplates.template(workflowTemplate, templateReplacements)
.toWorkflow();
} else { // Workflow from URL
URI uri = URI.create(workflowUrl);
workflow = WorkflowDefinition.parse(null, uri).toWorkflow();
}
String workflowName = workflow.getName();
CompletableFuture<Void> f =
modelManager
.registerWorkflow(workflow)
.exceptionally(
t -> {
NettyUtils.sendError(ctx, t.getCause());
return null;
});
if (synchronous) {
final String msg = "Workflow \"" + workflowName + "\" registered.";
f.thenAccept(m -> NettyUtils.sendJsonResponse(ctx, new StatusResponse(msg)));
} else {
String msg = "Workflow \"" + workflowName + "\" registration scheduled.";
NettyUtils.sendJsonResponse(
ctx, new StatusResponse(msg), HttpResponseStatus.ACCEPTED);
}
} catch (IOException | BadWorkflowException e) {
NettyUtils.sendError(ctx, e.getCause());
}
}
private void handleUnregisterWorkflow(
ChannelHandlerContext ctx, String workflowName, String version)
throws ModelNotFoundException {
ModelManager modelManager = ModelManager.getInstance();
if (!modelManager.unregisterWorkflow(workflowName, version)) {
ModelNotFoundException t =
new ModelNotFoundException("Model or workflow not found: " + workflowName);
NettyUtils.sendError(ctx, t);
throw t;
}
String msg = "Model or workflow \"" + workflowName + "\" unregistered";
NettyUtils.sendJsonResponse(ctx, new StatusResponse(msg));
}
private void handleScaleWorkflow(
ChannelHandlerContext ctx,
QueryStringDecoder decoder,
String workflowName,
String version)
throws ModelNotFoundException {
try {
String deviceName = NettyUtils.getParameter(decoder, LoadModelRequest.DEVICE, null);
int minWorkers = NettyUtils.getIntParameter(decoder, LoadModelRequest.MIN_WORKER, -1);
int maxWorkers = NettyUtils.getIntParameter(decoder, LoadModelRequest.MAX_WORKER, -1);
ModelManager modelManager = ModelManager.getInstance();
Endpoint endpoint = modelManager.getEndpoints().get(workflowName);
List<Workflow> workflows = null;
if (endpoint != null) {
if (version == null) {
// scale all versions
workflows = endpoint.getWorkflows();
} else {
Workflow wf = modelManager.getWorkflow(workflowName, version, false);
if (wf != null) {
workflows = Collections.singletonList(wf);
}
}
}
if (workflows == null || workflows.isEmpty()) {
throw new ModelNotFoundException("Model or workflow not found: " + workflowName);
}
List<String> messages = new ArrayList<>();
for (Workflow workflow : workflows) {
// make sure all WorkerPoolConfigs (models) are loaded and ready
for (WorkerPoolConfig<Input, Output> wpc : workflow.getWpcs()) {
if (wpc.getStatus() != WorkerPoolConfig.Status.READY) {
throw new ServiceUnavailableException(
"Model or workflow is not ready: " + workflow.getName());
}
}
for (WorkerPoolConfig<Input, Output> wpc : workflow.getWpcs()) {
modelManager.scaleWorkers(wpc, deviceName, minWorkers, maxWorkers);
String msg =
"Workflow \""
+ workflow.getName()
+ "\" worker scaled. New Worker configuration min workers:"
+ minWorkers
+ " max workers:"
+ maxWorkers;
messages.add(msg);
}
}
String combinedMsg = String.join("\n", messages);
NettyUtils.sendJsonResponse(ctx, new StatusResponse(combinedMsg));
} catch (NumberFormatException ex) {
throw new BadRequestException("parameter is invalid number." + ex.getMessage(), ex);
}
}
@SuppressWarnings("unchecked")
private void handleConfigLogs(ChannelHandlerContext ctx, QueryStringDecoder decoder) {
String logLevel = NettyUtils.getParameter(decoder, "level", null);
if (logLevel == null) {
logLevel = "info";
}
System.setProperty("ai.djl.logging.level", logLevel);
try {
Class<?> manager = Class.forName("org.apache.logging.log4j.LogManager");
Class<?> context = Class.forName("org.apache.logging.log4j.core.LoggerContext");
Method getContext = manager.getDeclaredMethod("getContext", boolean.class);
Method reconfigure = context.getDeclaredMethod("reconfigure");
Method updateLoggers = context.getDeclaredMethod("updateLoggers");
Object logCtx = getContext.invoke(null, false);
reconfigure.invoke(logCtx);
updateLoggers.invoke(logCtx);
} catch (ReflectiveOperationException e) {
throw new InternalServerException("Failed to reload log4j configuration", e);
}
StatusResponse resp = new StatusResponse("OK");
NettyUtils.sendJsonResponse(ctx, resp);
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http/PrometheusExporter.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import ai.djl.prometheus.MetricExporter;
import ai.djl.serving.util.NettyUtils;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufOutputStream;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.DefaultFullHttpResponse;
import io.netty.handler.codec.http.FullHttpResponse;
import io.netty.handler.codec.http.HttpHeaderNames;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.codec.http.HttpVersion;
import io.netty.handler.codec.http.QueryStringDecoder;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
final class PrometheusExporter {
private PrometheusExporter() {}
static void handle(ChannelHandlerContext ctx, QueryStringDecoder decoder) {
ByteBuf buf = Unpooled.directBuffer();
List<String> params = decoder.parameters().getOrDefault("name[]", Collections.emptyList());
try (OutputStream os = new ByteBufOutputStream(buf)) {
MetricExporter.export(os, new HashSet<>(params));
} catch (IllegalArgumentException e) {
throw new BadRequestException(e.getMessage(), e);
} catch (IOException e) {
throw new InternalServerException("Failed to encode prometheus metrics", e);
}
FullHttpResponse resp =
new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK, buf);
resp.headers().set(HttpHeaderNames.CONTENT_TYPE, MetricExporter.CONTENT_TYPE);
NettyUtils.sendHttpResponse(ctx, resp, true);
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http/RequestParser.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import ai.djl.modality.Input;
import ai.djl.serving.util.ConfigManager;
import ai.djl.serving.util.NettyUtils;
import ai.djl.util.Utils;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.HttpHeaderValues;
import io.netty.handler.codec.http.HttpUtil;
import io.netty.handler.codec.http.QueryStringDecoder;
import io.netty.handler.codec.http.multipart.DefaultHttpDataFactory;
import io.netty.handler.codec.http.multipart.HttpDataFactory;
import io.netty.handler.codec.http.multipart.HttpPostRequestDecoder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Map;
/**
* a parser for inbound request.
*
* @author erik.bamberg@web.de
*/
public class RequestParser {
private static final Logger logger = LoggerFactory.getLogger(RequestParser.class);
/**
* parsing a request.
*
* @param req the full request.
* @param decoder a decoder to decode the query string.
* @return parsed input object.
*/
public Input parseRequest(FullHttpRequest req, QueryStringDecoder decoder) {
Input input = new Input();
if (decoder != null) {
for (Map.Entry<String, List<String>> entry : decoder.parameters().entrySet()) {
String key = entry.getKey();
for (String value : entry.getValue()) {
input.add(key, value);
}
}
}
for (Map.Entry<String, String> entry : req.headers().entries()) {
String key = entry.getKey();
if ("X-Amzn-SageMaker-Custom-Attributes".equalsIgnoreCase(key)) {
String[] tokens = entry.getValue().split(";");
for (String token : tokens) {
String[] pair = token.split("=", 2);
if (pair.length == 2) {
input.addProperty(pair[0].trim(), pair[1].trim());
}
}
} else {
input.addProperty(key, entry.getValue());
}
}
CharSequence contentType = HttpUtil.getMimeType(req);
if (HttpPostRequestDecoder.isMultipart(req)
|| HttpHeaderValues.APPLICATION_X_WWW_FORM_URLENCODED.contentEqualsIgnoreCase(
contentType)) {
int sizeLimit = ConfigManager.getInstance().getMaxRequestSize();
HttpDataFactory factory = new DefaultHttpDataFactory(sizeLimit);
HttpPostRequestDecoder form = new HttpPostRequestDecoder(factory, req);
try {
while (form.hasNext()) {
NettyUtils.addFormData(form.next(), input);
}
} catch (HttpPostRequestDecoder.EndOfDataDecoderException ignore) {
logger.trace("End of multipart items.");
} finally {
form.cleanFiles();
form.destroy();
}
} else {
byte[] content = NettyUtils.getBytes(req.content());
input.add("data", content);
}
if (input.getProperties().containsKey("handler")) {
if (!Boolean.parseBoolean(
Utils.getEnvOrSystemProperty("ALLOW_REQUEST_HANDLER_OVERRIDE"))) {
throw new BadRequestException("The handler can't be overridden in a request");
}
}
return input;
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http/ServerStartupException.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
/** Failed to start the server. */
public class ServerStartupException extends Exception {
static final long serialVersionUID = 1;
/**
* Constructs a new exception with {@code null} as its detail message. The cause is not
* initialized, and may subsequently be initialized by a call to {@link #initCause}.
*/
public ServerStartupException() {
super();
}
/**
* Constructs a new exception with the specified detail message. The cause is not initialized,
* and may subsequently be initialized by a call to {@link #initCause}.
*
* @param message the detail message. The detail message is saved for later retrieval by the
* {@link #getMessage()} method.
*/
public ServerStartupException(String message) {
super(message);
}
/**
* Constructs a new exception with the specified detail message and cause.
*
* <p>Note that the detail message associated with {@code cause} is <i>not</i> automatically
* incorporated in this exception's detail message.
*
* @param message the detail message (which is saved for later retrieval by the {@link
* #getMessage()} method).
* @param cause the cause (which is saved for later retrieval by the {@link #getCause()}
* method). (A {@code null} value is permitted, and indicates that the cause is nonexistent
* or unknown.)
* @since 1.4
*/
public ServerStartupException(String message, Throwable cause) {
super(message, cause);
}
/**
* Constructs a new exception with the specified cause and a detail message of {@code
* (cause==null ? null : cause.toString())} (which typically contains the class and detail
* message of {@code cause}). This constructor is useful for exceptions that are little more
* than wrappers for other throwables.
*
* @param cause the cause (which is saved for later retrieval by the {@link #getCause()}
* method). (A {@code null} value is permitted, and indicates that the cause is nonexistent
* or unknown.)
* @since 1.4
*/
public ServerStartupException(Throwable cause) {
super(cause);
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http/Session.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http;
import ai.djl.modality.Input;
import io.netty.handler.codec.http.HttpRequest;
import java.util.UUID;
/** A class that holds HTTP session information. */
public class Session {
private String requestId;
private String remoteIp;
private String method;
private String uri;
private String protocol;
private int code;
private long startTime;
private Input input;
/**
* Constructs a new {@code Session} instance.
*
* @param remoteIp the remote IP address
* @param request the HTTP request
*/
public Session(String remoteIp, HttpRequest request) {
this.remoteIp = remoteIp;
this.uri = request.uri();
if (request.decoderResult().isSuccess()) {
method = request.method().name();
protocol = request.protocolVersion().text();
} else {
method = "GET";
protocol = "HTTP/1.1";
}
requestId = UUID.randomUUID().toString();
startTime = System.currentTimeMillis();
}
/**
* Returns the current input.
*
* @return the current input
*/
public Input getInput() {
return input;
}
/**
* Sets the current input.
*
* @param input the current input
*/
public void setInput(Input input) {
this.input = input;
}
/**
* Returns the request ID.
*
* @return the request ID
*/
public String getRequestId() {
return requestId;
}
/**
* Sets the HTTP response code.
*
* @param code the HTTP response code
*/
public void setCode(int code) {
this.code = code;
}
/** {@inheritDoc} */
@Override
public String toString() {
long duration = System.currentTimeMillis() - startTime;
return remoteIp + " \"" + method + " " + uri + ' ' + protocol + "\" " + code + ' '
+ duration;
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http/list/ListAdaptersResponse.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http.list;
import java.util.ArrayList;
import java.util.List;
/** A class that holds information about the current registered adapters. */
public class ListAdaptersResponse {
private String nextPageToken;
private List<AdapterItem> adapters;
/** Constructs a new {@code ListModelsResponse} instance. */
public ListAdaptersResponse() {
adapters = new ArrayList<>();
}
/**
* Returns the next page token.
*
* @return the next page token
*/
public String getNextPageToken() {
return nextPageToken;
}
/**
* Sets the next page token.
*
* @param nextPageToken the next page token
*/
public void setNextPageToken(String nextPageToken) {
this.nextPageToken = nextPageToken;
}
/**
* Returns a list of adapters.
*
* @return a list of adapters
*/
public List<AdapterItem> getAdapters() {
return adapters;
}
/**
* Adds the adapter to the list.
*
* @param name the adapter name
* @param src the adapter source
*/
public void addAdapter(String name, String src) {
adapters.add(new AdapterItem(name, src));
}
/** A class that holds the adapter response. */
public static final class AdapterItem {
private String name;
private String src;
private AdapterItem(String name, String src) {
this.name = name;
this.src = src;
}
/**
* Returns the adapter name.
*
* @return the adapter name
*/
public String getName() {
return name;
}
/**
* Returns the adapter src.
*
* @return the adapter src
*/
public String getSrc() {
return src;
}
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http/list/ListModelsResponse.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http.list;
import java.util.ArrayList;
import java.util.List;
/** A class that holds information about the current registered models. */
public class ListModelsResponse {
private String nextPageToken;
private List<ModelItem> models;
/** Constructs a new {@code ListModelsResponse} instance. */
public ListModelsResponse() {
models = new ArrayList<>();
}
/**
* Returns the next page token.
*
* @return the next page token
*/
public String getNextPageToken() {
return nextPageToken;
}
/**
* Sets the next page token.
*
* @param nextPageToken the next page token
*/
public void setNextPageToken(String nextPageToken) {
this.nextPageToken = nextPageToken;
}
/**
* Returns a list of models.
*
* @return a list of models
*/
public List<ModelItem> getModels() {
return models;
}
/**
* Adds the model tp the list.
*
* @param modelName the model name
* @param version the mode version
* @param modelUrl the model url
* @param status the model loading status
*/
public void addModel(String modelName, String version, String modelUrl, String status) {
models.add(new ModelItem(modelName, version, modelUrl, status));
}
/** A class that holds model name and url. */
public static final class ModelItem {
private String modelName;
private String version;
private String modelUrl;
private String status;
/** Constructs a new {@code ModelItem} instance. */
public ModelItem() {}
/**
* Constructs a new {@code ModelItem} instance with model name and url.
*
* @param modelName the model name
* @param version the model version
* @param modelUrl the model url
* @param status the model loading status
*/
public ModelItem(String modelName, String version, String modelUrl, String status) {
this.modelName = modelName;
this.version = version;
this.modelUrl = modelUrl;
this.status = status;
}
/**
* Returns the model name.
*
* @return the model name
*/
public String getModelName() {
return modelName;
}
/**
* Returns the model version.
*
* @return the model version
*/
public String getVersion() {
return version;
}
/**
* Returns the model url.
*
* @return the model url
*/
public String getModelUrl() {
return modelUrl;
}
/**
* Returns the model loading status.
*
* @return the model loading status
*/
public String getStatus() {
return status;
}
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http/list/ListPagination.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http.list;
import ai.djl.serving.util.NettyUtils;
import io.netty.handler.codec.http.QueryStringDecoder;
/** A pagination helper for items in the list responses. */
public final class ListPagination {
private int pageToken;
private int last;
/**
* Constructs a new {@link ListPagination}.
*
* @param decoder the query with the pagination data
* @param keysSize the number of items to paginate over
*/
public ListPagination(QueryStringDecoder decoder, int keysSize) {
int limit = NettyUtils.getIntParameter(decoder, "limit", 100);
pageToken = NettyUtils.getIntParameter(decoder, "next_page_token", 0);
if (limit > 100 || limit < 0) {
limit = 100;
}
if (pageToken < 0) {
pageToken = 0;
}
last = pageToken + limit;
if (last > keysSize) {
last = keysSize;
}
}
/**
* Returns the current page token.
*
* @return the current page token
*/
public int getPageToken() {
return pageToken;
}
/**
* Returns the last item in the pagination.
*
* @return the last item in the pagination
*/
public int getLast() {
return last;
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http/list/ListWorkflowsResponse.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.http.list;
import java.util.ArrayList;
import java.util.List;
/** A class that holds information about the current registered workflows. */
public class ListWorkflowsResponse {
private String nextPageToken;
private List<WorkflowItem> workflows;
/** Constructs a new {@code ListWorkflowsResponse} instance. */
public ListWorkflowsResponse() {
workflows = new ArrayList<>();
}
/**
* Returns the next page token.
*
* @return the next page token
*/
public String getNextPageToken() {
return nextPageToken;
}
/**
* Sets the next page token.
*
* @param nextPageToken the next page token
*/
public void setNextPageToken(String nextPageToken) {
this.nextPageToken = nextPageToken;
}
/**
* Returns a list of workflows.
*
* @return a list of workflows
*/
public List<WorkflowItem> getWorkflows() {
return workflows;
}
/**
* Adds the workflow tp the list.
*
* @param workflowName the workflow name
* @param version the mode version
*/
public void addWorkflow(String workflowName, String version) {
workflows.add(new WorkflowItem(workflowName, version));
}
/** A class that holds workflow name and url. */
public static final class WorkflowItem {
private String workflowName;
private String version;
/** Constructs a new {@code WorkflowItem} instance. */
public WorkflowItem() {}
/**
* Constructs a new {@code WorkflowItem} instance with workflow name and url.
*
* @param workflowName the workflow name
* @param version the workflow version
*/
public WorkflowItem(String workflowName, String version) {
this.workflowName = workflowName;
this.version = version;
}
/**
* Returns the workflow name.
*
* @return the workflow name
*/
public String getWorkflowName() {
return workflowName;
}
/**
* Returns the workflow version.
*
* @return the workflow version
*/
public String getVersion() {
return version;
}
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/http/list/package-info.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes that handles REST API list responses. */
package ai.djl.serving.http.list;
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/models/Endpoint.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.models;
import ai.djl.serving.workflow.Workflow;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
/** A class that represents a webservice endpoint. */
public class Endpoint {
private List<Workflow> workflows;
private Map<String, Integer> map;
private AtomicInteger position;
/** Constructs an {@code Endpoint} instance. */
public Endpoint() {
workflows = new ArrayList<>();
map = new ConcurrentHashMap<>();
position = new AtomicInteger(0);
}
/**
* Adds a workflow to the entpoint.
*
* @param workflow the workflow to be added
* @return true if add success
*/
public synchronized boolean add(Workflow workflow) {
String version = workflow.getVersion();
if (version == null) {
if (workflows.isEmpty()) {
map.put("default", 0);
return workflows.add(workflow);
}
return false;
}
if (map.containsKey(version)) {
return false;
}
map.put(version, workflows.size());
return workflows.add(workflow);
}
/**
* Returns the {@link Workflow}s associated with the endpoint.
*
* @return the {@link Workflow}s associated with the endpoint
*/
public List<Workflow> getWorkflows() {
return workflows;
}
/**
* Removes a workflow version from the {@code Endpoint}.
*
* @param version the workflow version
* @return null if the specified version doesn't exist
*/
public synchronized Workflow remove(String version) {
if (version == null) {
if (workflows.isEmpty()) {
return null;
}
Workflow workflow = workflows.remove(0);
reIndex();
return workflow;
}
Integer index = map.remove(version);
if (index == null) {
return null;
}
Workflow workflow = workflows.remove((int) index);
reIndex();
return workflow;
}
/**
* Returns the {@code Workflow} for the specified version.
*
* @param version the version of the workflow to retrieve
* @return the {@code Workflow} for the specified version
*/
public Workflow get(String version) {
Integer index = map.get(version);
if (index == null) {
return null;
}
return workflows.get(index);
}
/**
* Returns the next version of workflow to serve the inference request.
*
* @return the next version of workflow to serve the inference request
*/
public Workflow next() {
int size = workflows.size();
if (size == 1) {
return workflows.get(0);
}
int index = position.getAndUpdate(operand -> (operand + 1) % size);
return workflows.get(index);
}
private void reIndex() {
map.clear();
int size = workflows.size();
for (int i = 0; i < size; ++i) {
Workflow workflow = workflows.get(i);
String version = workflow.getVersion();
if (version != null) {
map.put(version, i);
}
}
}
/** Closes the {@code Endpoint}. */
public void close() {
for (Workflow workflow : workflows) {
workflow.close();
}
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/models/ModelManager.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.models;
import ai.djl.ModelException;
import ai.djl.metric.Dimension;
import ai.djl.metric.Metric;
import ai.djl.metric.Unit;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.repository.zoo.ModelNotFoundException;
import ai.djl.serving.http.BadRequestException;
import ai.djl.serving.http.DescribeWorkflowResponse;
import ai.djl.serving.http.StatusResponse;
import ai.djl.serving.plugins.DependencyManager;
import ai.djl.serving.util.MutableClassLoader;
import ai.djl.serving.wlm.ModelInfo;
import ai.djl.serving.wlm.WorkLoadManager;
import ai.djl.serving.wlm.WorkerPool;
import ai.djl.serving.wlm.WorkerPoolConfig;
import ai.djl.serving.workflow.Workflow;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.stream.Collectors;
/** A class that in charge of managing models. */
public final class ModelManager {
private static final Logger logger = LoggerFactory.getLogger(ModelManager.class);
private static final Logger MODEL_METRIC = LoggerFactory.getLogger("model_metric");
private static ModelManager modelManager = new ModelManager();
private WorkLoadManager wlm;
private Map<String, Endpoint> endpoints;
private Set<String> startupWorkflows;
private ModelManager() {
wlm = new WorkLoadManager();
endpoints = new ConcurrentHashMap<>();
startupWorkflows = new HashSet<>();
}
/**
* Returns the singleton {@code ModelManager} instance.
*
* @return the singleton {@code ModelManager} instance
*/
public static ModelManager getInstance() {
return modelManager;
}
/**
* Registers and loads a {@link Workflow}.
*
* @param workflow the workflow to register
* @return a {@code CompletableFuture} instance
*/
public CompletableFuture<Void> registerWorkflow(Workflow workflow) {
Endpoint endpoint = endpoints.computeIfAbsent(workflow.getName(), k -> new Endpoint());
if (!endpoint.add(workflow)) {
// workflow already exists
throw new BadRequestException(409, "Workflow " + workflow + " is already registered.");
}
return CompletableFuture.supplyAsync(
() -> {
long begin = System.nanoTime();
Map<String, WorkerPoolConfig<Input, Output>> wpcs = workflow.getWpcMap();
for (Map.Entry<String, WorkerPoolConfig<Input, Output>> entry :
wpcs.entrySet()) {
String key = entry.getKey();
WorkerPoolConfig<Input, Output> workerPoolConfig = entry.getValue();
try {
// download model and configure per model settings
workerPoolConfig.initialize();
// Install engine if necessary
String engine = null;
if (workerPoolConfig instanceof ModelInfo) {
ModelInfo<Input, Output> model =
(ModelInfo<Input, Output>) workerPoolConfig;
engine = model.getEngineName();
DependencyManager dm = DependencyManager.getInstance();
dm.installEngine(engine);
Thread.currentThread()
.setContextClassLoader(MutableClassLoader.getInstance());
WorkerPool<Input, Output> wp = wlm.getWorkerPool(model);
if (wp != null) {
wpcs.put(key, wp.getWpc());
wp.increaseRef();
logger.info("Model {} is registered by other workflow", model);
continue;
}
}
wlm.registerWorkerPool(workerPoolConfig);
String[] devices = workerPoolConfig.getLoadOnDevices();
if (engine != null) {
logger.info(
"Loading model on {}:{}", engine, Arrays.toString(devices));
} else {
logger.info("Loading worker: {}", Arrays.toString(devices));
}
ExecutorService pool = null;
List<Future<?>> futures = new ArrayList<>();
if (workerPoolConfig.isParallelLoading()) {
pool = Executors.newFixedThreadPool(devices.length);
}
for (String deviceName : devices) {
if (pool != null) {
futures.add(
pool.submit(
() ->
initWorkers(
workerPoolConfig, deviceName)));
} else {
initWorkers(workerPoolConfig, deviceName);
}
}
if (pool != null) {
pool.shutdown();
for (Future<?> future : futures) {
try {
future.get();
} catch (ExecutionException e) {
throw new CompletionException(e.getCause()); // NOPMD
} catch (InterruptedException e) {
throw new AssertionError("Worker startup interrupted.", e);
}
}
}
} catch (IOException | ModelException e) {
throw new CompletionException(e);
}
}
workflow.prepare(wlm);
long duration = (System.nanoTime() - begin) / 1000;
Dimension dimension = new Dimension("Model", workflow.getName());
Metric metric =
new Metric("RegisterWorkflow", duration, Unit.MICROSECONDS, dimension);
MODEL_METRIC.info("{}", metric);
return null;
});
}
/**
* Unregisters a workflow by its name and version.
*
* @param workflowName the workflow name to be unregistered (may also be the same as a model
* name)
* @param version the model version
* @return {@code true} if unregister success
*/
public boolean unregisterWorkflow(String workflowName, String version) {
Endpoint endpoint = endpoints.get(workflowName);
if (endpoint == null) {
logger.warn("Model not found: {}", workflowName);
return false;
}
Set<WorkerPoolConfig<Input, Output>> candidateWpcsToUnregister = new HashSet<>();
if (version == null) {
// unregister all versions
for (Workflow workflow : endpoint.getWorkflows()) {
candidateWpcsToUnregister.addAll(workflow.getWpcs());
workflow.close();
}
startupWorkflows.remove(workflowName);
endpoint.getWorkflows().clear();
logger.info("Model {} unregistered.", workflowName);
} else {
Workflow workflow = endpoint.remove(version);
if (workflow == null) {
logger.warn("Workflow not found: {}:{}", workflowName, version);
return false;
}
candidateWpcsToUnregister.addAll(workflow.getWpcs());
workflow.close();
startupWorkflows.remove(workflowName);
logger.info("Model {}/{} unregistered.", workflowName, version);
}
if (endpoint.getWorkflows().isEmpty()) {
endpoints.remove(workflowName);
}
// Unregister candidate models if they are not used for a remaining endpoint
candidateWpcsToUnregister.removeAll(getWpcs());
for (WorkerPoolConfig<Input, Output> wpc : candidateWpcsToUnregister) {
wlm.unregisterWorkerPool(wpc);
}
return true;
}
/**
* Initializes the workers for a workerPoolConfig.
*
* @param wpc the workerPoolConfig to scale workers for
* @param deviceName the device for the workerPoolConfig
* @see WorkerPool#initWorkers(String)
*/
public void initWorkers(WorkerPoolConfig<Input, Output> wpc, String deviceName) {
Thread.currentThread().setContextClassLoader(MutableClassLoader.getInstance());
wlm.getWorkerPool(wpc).initWorkers(deviceName);
}
/**
* Scales the workers for a model.
*
* @param wpc the model to scale workers for
* @param deviceName the device for the model
* @param minWorkers the min workers, -1 for auto-scale
* @param maxWorkers the max workers, -1 for auto-scale
* @see WorkerPool#scaleWorkers(String, int, int)
*/
public void scaleWorkers(
WorkerPoolConfig<Input, Output> wpc,
String deviceName,
int minWorkers,
int maxWorkers) {
Thread.currentThread().setContextClassLoader(MutableClassLoader.getInstance());
wlm.getWorkerPool(wpc).scaleWorkers(deviceName, minWorkers, maxWorkers);
}
/**
* Returns the registry of all endpoints.
*
* @return the registry of all endpoints
*/
public Map<String, Endpoint> getEndpoints() {
return endpoints;
}
/**
* Returns all {@link WorkerPoolConfig}s in an endpoint.
*
* @return all {@link WorkerPoolConfig}s in an endpoint
*/
public Set<WorkerPoolConfig<Input, Output>> getWpcs() {
return getEndpoints().values().stream()
.flatMap(e -> e.getWorkflows().stream())
.flatMap(w -> w.getWpcs().stream())
.collect(Collectors.toSet());
}
/**
* Returns a version of workflow.
*
* @param workflowName the workflow name
* @param version the model version
* @param predict ture for selecting a model in load balance fashion
* @return the model
*/
public Workflow getWorkflow(String workflowName, String version, boolean predict) {
Endpoint endpoint = endpoints.get(workflowName);
if (endpoint == null) {
return null;
}
if (version == null) {
if (endpoint.getWorkflows().isEmpty()) {
return null;
}
if (predict) {
return endpoint.next();
}
return endpoint.getWorkflows().get(0);
}
return endpoint.get(version);
}
/**
* Returns the {@link WorkLoadManager}.
*
* @return the {@link WorkLoadManager}
*/
public WorkLoadManager getWorkLoadManager() {
return wlm;
}
/**
* Returns a set of models or workflows that were loaded at startup.
*
* @return a set of models or workflows that were loaded at startup
*/
public Set<String> getStartupWorkflows() {
return startupWorkflows;
}
/**
* Returns the single startup workflow.
*
* <p>Returns only if there was exactly 1 startup workflow passed in. Used with integration of
* SageMaker SME and single model services.
*
* @return the workflow name
*/
public Optional<String> getSingleStartupWorkflow() {
Set<String> startModels = getStartupWorkflows();
if (startModels.size() == 1) {
return Optional.ofNullable(startModels.iterator().next());
}
return Optional.empty();
}
/**
* Runs an inference job by assigning the job to the next free worker.
*
* @param workflow the workflow to run
* @param input the input to the task
* @return the {@code CompletableFuture}
*/
public CompletableFuture<Output> runJob(Workflow workflow, Input input) {
return workflow.execute(wlm, input);
}
/**
* Returns a list of worker information for specified workflow.
*
* @param workflowName the workflow name to be queried
* @param version the model version to be queried
* @return model and workers information for specified workflow
* @throws ModelNotFoundException if specified workflow not found
*/
public DescribeWorkflowResponse[] describeWorkflow(String workflowName, String version)
throws ModelNotFoundException {
Endpoint endpoint = endpoints.get(workflowName);
if (endpoint == null) {
throw new ModelNotFoundException("Workflow not found: " + workflowName);
}
List<Workflow> list = null;
if (version == null) {
list = endpoint.getWorkflows();
} else {
Workflow wf = endpoint.get(version);
if (wf != null) {
list = Collections.singletonList(wf);
}
}
if (list == null || list.isEmpty()) {
StringBuilder sb = new StringBuilder("Workflow not found: ");
sb.append(workflowName);
if (version != null) {
sb.append('/').append(version);
}
throw new ModelNotFoundException("Workflow not found: " + sb);
}
DescribeWorkflowResponse[] array = new DescribeWorkflowResponse[list.size()];
int index = 0;
for (Workflow workflow : list) {
array[index++] = new DescribeWorkflowResponse(workflow);
}
return array;
}
/**
* Sends model server health status to client.
*
* @return completableFuture with eventually result in the future after async execution
*/
public CompletableFuture<Map<String, Object>> workerStatus() {
return CompletableFuture.supplyAsync(
() -> {
boolean hasFailure = false;
boolean hasPending = false;
Map<String, StatusResponse> data = new LinkedHashMap<>(); // NOPMD
for (Endpoint endpoint : endpoints.values()) {
for (Workflow wf : endpoint.getWorkflows()) {
String workflowName = wf.getName();
for (WorkerPoolConfig<Input, Output> wpc : wf.getWpcs()) {
String modelName = wpc.getId();
if (!modelName.equals(workflowName)) {
modelName = workflowName + ':' + modelName; // NOPMD
}
WorkerPoolConfig.Status status = wpc.getStatus();
switch (status) {
case FAILED:
data.put(modelName, new StatusResponse(status.name()));
hasFailure = true;
break;
case PENDING:
data.put(modelName, new StatusResponse(status.name()));
hasPending = true;
break;
default:
if (wlm.getWorkerPool(wpc).isFullyScaled()) {
data.put(modelName, new StatusResponse("Healthy"));
} else {
data.put(modelName, new StatusResponse("Unhealthy"));
}
break;
}
}
}
}
Map<String, Object> modelInfos = new LinkedHashMap<>(); // NOPMD
modelInfos.put("hasFailure", hasFailure);
modelInfos.put("hasPending", hasPending);
modelInfos.put("data", data);
return modelInfos;
});
}
/**
* Clears everything in the {@link ModelManager}.
*
* <p>Can be run between tests.
*/
public void clear() {
wlm.close();
for (Endpoint endpoint : endpoints.values()) {
endpoint.close();
}
wlm = new WorkLoadManager();
endpoints = new ConcurrentHashMap<>();
startupWorkflows = new HashSet<>();
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/models/package-info.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains classes that manage model lifecycle. */
package ai.djl.serving.models;
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/plugins/ComponentRegistry.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.plugins;
import ai.djl.serving.plugins.PluginMetaData.Lifecycle;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
/**
* A registry for plugin components.
*
* @author erik.bamberg@web.de
*/
public class ComponentRegistry {
private static Class<?>[] pluginInterfaces = {RequestHandler.class};
private Map<Class<?>, Set<ComponentEntry>> componentRegistry;
/** construct a registry. */
public ComponentRegistry() {
componentRegistry = new ConcurrentHashMap<>();
}
/**
* Registers a new component and assign the plug-in as source of this component.
*
* @param plugin which this component is linked to
* @param component the component
*/
public void register(PluginMetaData plugin, RequestHandler<?> component) {
for (Class<?> interfaceClass : pluginInterfaces) {
if (interfaceClass.isAssignableFrom(component.getClass())) {
componentRegistry
.computeIfAbsent(interfaceClass, k -> new HashSet<>())
.add(new ComponentEntry(plugin, component));
}
}
}
/**
* Returns a set of plug-in components implementing the specific service interface.
*
* <p>Only active plug-ins are returned which are fully initialised at this point.
*
* <p>{@code Set<RequestHandler>
* allActiveRequestHandler=findImplementations(RequestHandler.class)}
*
* @param <T> generic type of service interface
* @param pluginInterface the specific service interface
* @return a set of all plugin components implementing this service interface
*/
@SuppressWarnings("unchecked")
public <T> Set<T> findImplementations(Class<T> pluginInterface) {
return (Set<T>)
Collections.unmodifiableSet(
componentRegistry.getOrDefault(pluginInterface, new HashSet<>()).stream()
.filter(ComponentEntry::isPluginActive)
.map(ComponentEntry::getComponent)
.collect(Collectors.toSet()));
}
private static class ComponentEntry {
private PluginMetaData plugin;
private Object component;
public ComponentEntry(PluginMetaData plugin, Object component) {
super();
this.plugin = plugin;
this.component = component;
}
public boolean isPluginActive() {
return plugin.getState() == Lifecycle.ACTIVE;
}
public Object getComponent() {
return component;
}
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/plugins/DependencyManager.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.plugins;
import ai.djl.engine.Engine;
import ai.djl.engine.EngineProvider;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooProvider;
import ai.djl.serving.util.ConfigManager;
import ai.djl.serving.util.MutableClassLoader;
import ai.djl.util.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ServiceLoader;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
/** {@code DependencyManager} is responsible to manage extra maven dependencies. */
public final class DependencyManager {
private static final Logger logger = LoggerFactory.getLogger(DependencyManager.class);
private static final DependencyManager INSTANCE = new DependencyManager();
private static final String OSS_URL =
"https://oss.sonatype.org/service/local/repositories/snapshots/content/";
private DependencyManager() {}
/**
* Returns the singleton instance of {@code DependencyManager}.
*
* @return the singleton instance of {@code DependencyManager}
*/
public static DependencyManager getInstance() {
return INSTANCE;
}
/**
* Installs the engine dependencies if needed.
*
* @param engineName the engine name
* @throws IOException if failed to download the dependency
*/
public void installEngine(String engineName) throws IOException {
if (Engine.hasEngine(engineName)) {
return;
}
String djlVersion = Engine.getDjlVersion();
switch (engineName) {
case "MXNet":
installDependency("ai.djl.mxnet:mxnet-engine:" + djlVersion);
installDependency("ai.djl.mxnet:mxnet-model-zoo:" + djlVersion);
break;
case "XGBoost":
installDependency("ai.djl.ml.xgboost:xgboost:" + djlVersion);
// TODO: Avoid hard code version
installDependency("commons-logging:commons-logging:1.2");
break;
case "Llama":
installDependency("ai.djl.llama:llama:" + djlVersion);
break;
default:
break;
}
refreshProviders();
}
/**
* Installs the maven dependency.
*
* @param dependency the maven dependency
* @throws IOException if failed to download the dependency
*/
public synchronized void installDependency(String dependency) throws IOException {
String[] tokens = dependency.split(":");
if (tokens.length < 3) {
throw new IllegalArgumentException("Invalid dependency: " + dependency);
}
String serverHome = ConfigManager.getModelServerHome();
Path depDir = Paths.get(serverHome, "deps");
Files.createDirectories(depDir);
logger.info("Loading dependency: {}", dependency);
String groupId = tokens[0].replace('.', '/');
String artifactId = tokens[1];
String version = tokens[2];
String name;
if (tokens.length == 3) {
name = artifactId + '-' + version + ".jar";
} else {
name = artifactId + '-' + version + '-' + tokens[3] + ".jar";
}
Path file = depDir.resolve(name);
if (Files.isRegularFile(file)) {
logger.info("Found existing dependency: {}", name);
} else {
String link;
if (version.endsWith("-SNAPSHOT")) {
link = getSnapshotUrl(groupId, artifactId, version) + ".jar";
} else {
String maven = "https://search.maven.org/remotecontent?filepath=";
link = maven + groupId + '/' + artifactId + '/' + version + '/' + name;
}
logger.info("Downloading dependency: {}.", link);
Path tmp = depDir.resolve(name + ".tmp");
try (InputStream is = Utils.openUrl(link)) {
Files.copy(is, tmp);
Utils.moveQuietly(tmp, file);
} finally {
Files.deleteIfExists(tmp);
}
}
MutableClassLoader mcl = MutableClassLoader.getInstance();
mcl.addURL(file.toUri().toURL());
}
/** Initialize dependencies for model server. */
public void initialize() {
ModelZoo.setModelZooResolver(this::resolveModelZoo);
refreshProviders();
}
/** Loads engine providers and model zoo providers from "deps" folder. */
void refreshProviders() {
// refresh EngineProvider
MutableClassLoader mcl = MutableClassLoader.getInstance();
for (EngineProvider provider : ServiceLoader.load(EngineProvider.class, mcl)) {
if (!Engine.hasEngine(provider.getEngineName())) {
Engine.registerEngine(provider);
}
}
// refresh ZooProvider
for (ZooProvider provider : ServiceLoader.load(ZooProvider.class, mcl)) {
ModelZoo.registerModelZoo(provider);
}
}
private static String getSnapshotUrl(String groupId, String artifactId, String version)
throws IOException {
String groupPath = groupId.replace('.', '/');
String url = OSS_URL + groupPath + '/' + artifactId + '/' + version + '/';
try (InputStream is = Utils.openUrl(url + "maven-metadata.xml")) {
Document doc = parseXml(is);
NodeList nl = doc.getElementsByTagName("snapshot");
Element element = (Element) nl.item(0);
String timestamp = getElementValue(element, "timestamp");
String buildNumber = getElementValue(element, "buildNumber");
String v = version.substring(0, version.length() - 9);
return url + artifactId + '-' + v + '-' + timestamp + '-' + buildNumber;
}
}
private static Document parseXml(InputStream is) throws IOException {
try {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
dbf.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
dbf.setXIncludeAware(false);
dbf.setExpandEntityReferences(false);
DocumentBuilder db = dbf.newDocumentBuilder();
return db.parse(is);
} catch (SAXException | ParserConfigurationException e) {
throw new AssertionError("Failed to parse maven metadata", e);
}
}
private ModelZoo resolveModelZoo(String groupId) {
try {
if ("ai.djl.mxnet".equals(groupId)) {
installEngine("MXNet");
} else if ("ai.djl.huggingface.gguf".equals(groupId)) {
installEngine("Llama");
} else {
logger.warn("Unknown model zoo: {}", groupId);
}
} catch (IOException e) {
logger.warn("Failed to install dependencies for model zoo:{}", groupId);
}
MutableClassLoader mcl = MutableClassLoader.getInstance();
for (ZooProvider provider : ServiceLoader.load(ZooProvider.class, mcl)) {
ModelZoo zoo = provider.getModelZoo();
if (groupId.equals(zoo.getGroupId())) {
return provider.getModelZoo();
}
}
return null;
}
private String getOrtVersion(String djlVersion) throws IOException {
String pom;
if (djlVersion.endsWith("-SNAPSHOT")) {
String groupId = "ai.djl.onnxruntime";
pom = getSnapshotUrl(groupId, "onnxruntime-engine", djlVersion) + ".pom";
} else {
pom =
"https://search.maven.org/remotecontent?filepath=ai/djl/onnxruntime/onnxruntime-engine/"
+ djlVersion
+ "/onnxruntime-engine-"
+ djlVersion
+ ".pom";
}
try (InputStream is = Utils.openUrl(pom)) {
Document doc = parseXml(is);
NodeList nl = doc.getElementsByTagName("dependency");
int len = nl.getLength();
for (int i = 0; i < len; ++i) {
Element element = (Element) nl.item(i);
String group = getElementValue(element, "groupId");
if ("com.microsoft.onnxruntime".equals(group)) {
return getElementValue(element, "version");
}
}
}
throw new AssertionError("Failed to find onnxruntime version.");
}
private static String getElementValue(Element element, String name) {
NodeList nl = element.getElementsByTagName(name);
Element node = (Element) nl.item(0);
return node.getChildNodes().item(0).getTextContent();
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/plugins/FolderScanPluginManager.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.plugins;
import ai.djl.serving.plugins.PluginMetaData.Lifecycle;
import ai.djl.serving.util.ConfigManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.beans.BeanInfo;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* The {@link PluginManager} is responsible to load and manage plugins from the file system.
*
* <p>The Plugin Folder configuration is received from the {@link ConfigManager} and usually
* defaults to {workpath}/plugins. The plugins uses Java's SPI and have to implement interfaces from
* serving-api.
*
* @author erik.bamberg@web.de
*/
public class FolderScanPluginManager implements PluginManager {
private static final Logger logger = LoggerFactory.getLogger(FolderScanPluginManager.class);
private ConfigManager configManager;
private Map<String, PluginMetaData> pluginRegistry;
private ComponentRegistry componentRegistry;
/**
* Constructs a {@code PluginManager} instance.
*
* @param configManager a instance of the configManager to lookup configuration like
* plugin-folder.
*/
public FolderScanPluginManager(ConfigManager configManager) {
this.configManager = configManager;
this.componentRegistry = new ComponentRegistry();
}
/**
* Loads all plugins from the plugin folder and register them.
*
* @param failOnInit throw exception if failed to initialize the plugin
* @throws IOException when error during IO operation occurs.
*/
public void loadPlugins(boolean failOnInit) throws IOException {
logger.info("scanning for plugins...");
URL[] pluginUrls = listPluginJars();
ClassLoader ucl =
AccessController.doPrivileged(
(PrivilegedAction<ClassLoader>) () -> new URLClassLoader(pluginUrls));
// phase 1: collect plugin information
pluginRegistry =
Collections.list(ucl.getResources("META-INF/plugin.definition")).parallelStream()
.map(PropertyFilePluginMetaDataReader::new)
.map(PropertyFilePluginMetaDataReader::read)
.distinct()
.collect(Collectors.toMap(PluginMetaData::getName, i -> i));
// phase 2: initialize components
for (PluginMetaData plugin : pluginRegistry.values()) {
logger.info("Loading plugin: {}", plugin);
if (pluginRegistry.keySet().containsAll(plugin.getDependencies())) {
try {
for (String handlerClassName : plugin.getComponentNames()) {
initializeComponent(ucl, plugin, handlerClassName);
}
plugin.changeState(Lifecycle.INITIALIZED);
} catch (Throwable t) {
if (failOnInit) {
throw new IllegalArgumentException("Failed to initialize plugin", t);
}
plugin.changeState(
Lifecycle.FAILED,
"failed to initialize plugin; caused by " + t.getMessage());
logger.error("failed to initialize plugin {}", plugin.getName(), t);
}
} else {
plugin.changeState(Lifecycle.FAILED, "required dependencies not found");
}
}
// phase 3: set active
pluginRegistry.values().stream()
.filter(plugin -> plugin.getState() == Lifecycle.INITIALIZED)
.filter(this::checkAllRequiredPluginsInitialized)
.forEach(plugin -> plugin.changeState(Lifecycle.ACTIVE, "plugin ready"));
logger.info("{} plug-ins found and loaded.", pluginRegistry.size());
}
/**
* Checks if all plug-ins required by this plugin are Initialized.
*
* @param plugin to check dependencies for state="Initialized"
* @return true if all plugins required by this one are in state "Initialized"
*/
private boolean checkAllRequiredPluginsInitialized(PluginMetaData plugin) {
for (String required : plugin.getDependencies()) {
PluginMetaData reqPlugin = pluginRegistry.get(required);
if (reqPlugin != null && reqPlugin.getState() != Lifecycle.INITIALIZED) {
return false;
}
}
return true;
}
@SuppressWarnings("rawtypes")
protected void initializeComponent(
ClassLoader ucl, PluginMetaData plugin, String handlerClassName)
throws ReflectiveOperationException, IntrospectionException {
@SuppressWarnings("unchecked")
Class<? extends RequestHandler> handlerClass =
(Class<? extends RequestHandler>) ucl.loadClass(handlerClassName);
RequestHandler<?> handler = handlerClass.getConstructor(new Class<?>[] {}).newInstance();
injectDependenciesIntoComponent(handler);
componentRegistry.register(plugin, handler);
}
/**
* Initializes a plugin by calling known setters to inject managers and other dependant plugins
* into the plugins.
*
* @param component the component to get initialized
* @throws IntrospectionException when initialization fails.
* @throws InvocationTargetException when initialization fails.
* @throws ReflectiveOperationException when initialization fails.
*/
protected void injectDependenciesIntoComponent(Object component)
throws IntrospectionException, ReflectiveOperationException, InvocationTargetException {
BeanInfo beanInfo = Introspector.getBeanInfo(component.getClass());
for (PropertyDescriptor property : beanInfo.getPropertyDescriptors()) {
// TODO introduce kind of ServiceRegistry and inject all known Managers and
// others
// plug-ins
if ("pluginManager".equals(property.getName())) {
Method method = property.getWriteMethod();
if (method != null) {
method.invoke(component, this);
} else {
logger.warn(
"no accessible setter for pluginManager found in plugin {}. skipping"
+ " injecting",
component.getClass().getName());
}
}
}
}
/**
* returns a set of plug-in components implementing the specific service interface.
*
* <p>only active plug-ins are returned which are fully initialised at this point.
*
* <p>{@code Set<RequestHandler>
* allActiveRequestHandler=findImplementations(RequestHandler.class)}
*
* @param <T> generic type of service interface
* @param pluginInterface the specific service interface
* @return a set of all plugin components implementing this service interface
*/
@Override
public <T> Set<T> findImplementations(Class<T> pluginInterface) {
return componentRegistry.findImplementations(pluginInterface);
}
private URL[] listPluginJars() throws IOException {
List<Path> pluginsFolders = configManager.getPluginFolder();
List<URL> ret = new ArrayList<>();
for (Path dir : pluginsFolders) {
if (dir == null || !Files.isDirectory(dir)) {
logger.info("plug-in folder not exists:{}", dir);
continue;
}
logger.info("scanning in plug-in folder :{}", dir);
try (Stream<Path> stream = Files.walk(dir)) {
stream.forEach(
f -> {
String name = f.toFile().getName().toLowerCase(Locale.ROOT);
try {
if (Files.isRegularFile(f) && name.endsWith(".jar")) {
ret.add(f.toUri().toURL());
}
} catch (MalformedURLException e) {
logger.error("scan plugins folder failed", e);
}
});
}
}
return ret.toArray(new URL[0]);
}
/**
* List all plugins.
*
* @return list of all plugins.
*/
@Override
public Collection<PluginMetaData> listPlugins() {
return Collections.unmodifiableCollection(pluginRegistry.values());
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/plugins/PluginManager.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.plugins;
import java.util.Collection;
import java.util.Set;
/**
* The Plugin Manager is responsible to load and manage plugins from the filesystem.
*
* <p>The Plugin Folder configuration is received from the ConfigManager and usually defaults to
* {workpath}/plugins. The plugins uses Java's SPI and have to implement interfaces from
* serving-api.
*
* @author erik.bamberg@web.de
*/
public interface PluginManager {
/**
* Returns a set of plug-in components implementing the specific service interface.
*
* <p>only active plug-ins are returned which are fully initialised at this point.
*
* <p>{@code Set<RequestHandler>
* allActiveRequestHandler=findImplementations(RequestHandler.class)}
*
* @param <T> generic type of service interface
* @param pluginInterface the specific service interface
* @return a set of all plugin components implementing this service interface
*/
<T> Set<T> findImplementations(Class<T> pluginInterface);
/**
* Returns a collection of all plugins registered.
*
* @return collection of all registered plugins.
*/
Collection<PluginMetaData> listPlugins();
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/plugins/PluginMetaData.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.plugins;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.URL;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
/**
* Represents a loaded Plug-in.
*
* <p>A plug-in contains MetaData, handler and resource mapping information and references to the
* plug-in components
*
* @author erik.bamberg@web.de
*/
public class PluginMetaData {
private static final Logger logger = LoggerFactory.getLogger(PluginMetaData.class);
enum Lifecycle {
INITIALIZING,
INITIALIZED,
ACTIVE,
INACTIVE,
FAILED
}
private String name;
private URL url;
private List<String> componentNames;
private List<String> dependencies;
private Lifecycle state;
private String error;
/**
* Constructs a plug-in meta-info.
*
* @param name of the plug-in
* @param url where this plug-in is loaded from
* @param componentNames of all exported components of the plug-in
* @param dependencies require this plug-ins to run
*/
public PluginMetaData(
String name, URL url, List<String> componentNames, List<String> dependencies) {
this.name = name;
this.url = url;
this.componentNames = componentNames;
this.state = Lifecycle.INITIALIZING;
this.dependencies = dependencies;
}
/**
* Returns the name of the plug-in.
*
* @return name of the plug-in
*/
public String getName() {
return name;
}
/**
* Returns the class names of the registered-components.
*
* @return the class names of the registered-components
*/
public List<String> getComponentNames() {
return Collections.unmodifiableList(componentNames);
}
/**
* Returns the state-property.
*
* @return the state of this class.
*/
public Lifecycle getState() {
return state;
}
/**
* Sets the property state of the object.
*
* @param state the state to set
*/
public void changeState(Lifecycle state) {
this.state = state;
logger.info("plugin {} changed state to {}", name, state);
}
/**
* Sets the property state of the object and log message.
*
* @param state the state to set
* @param logMessage why this status is set
*/
public void changeState(Lifecycle state, String logMessage) {
this.state = state;
if (state == Lifecycle.FAILED) {
error = logMessage;
logger.warn("plugin {} changed state to {} reason: {}", name, state, logMessage);
} else {
logger.info("plugin {} changed state to {} reason: {}", name, state, logMessage);
}
}
/**
* Returns the url-property.
*
* @return the url of this class
*/
public URL getUrl() {
return url;
}
/**
* Returns a list of required plug-in dependencies.
*
* @return the depend plug-in names require by this class
*/
public List<String> getDependencies() {
return dependencies;
}
/**
* Returns the error-property.
*
* @return the error of this class
*/
public String getError() {
return error;
}
/** {@inheritDoc} */
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof PluginMetaData)) {
return false;
}
PluginMetaData that = (PluginMetaData) o;
return name.equals(that.name);
}
/** {@inheritDoc} */
@Override
public int hashCode() {
return Objects.hash(name);
}
/** {@inheritDoc} */
@Override
public String toString() {
return '{' + name + '/' + url + '}';
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/plugins/PluginMetaDataReader.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.plugins;
/**
* Reads plugin-metadata from an url and parse the content.
*
* <p>Implementations typically reads a definition file like {@code plugin.definion} from the plugin
* jar file.
*
* @author erik.bamberg@web.de
*/
public interface PluginMetaDataReader {
/**
* Reads plugin-metadata from on url.
*
* @return a parsed plugin metadata object or null if not metadata can be found
*/
PluginMetaData read();
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/plugins/PropertyFilePluginMetaDataReader.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.plugins;
import ai.djl.util.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import java.util.UUID;
/**
* Reads plugin-metadata from plugin.definition file which have to be in property file format.
*
* <pre>
* name=static-file-plugin
* requestHandler=ai.djl.serving.plugins.staticfile.HttpStaticClasspathResourceHandler
* </pre>
*
* @author erik.bamberg@web.de
*/
public class PropertyFilePluginMetaDataReader implements PluginMetaDataReader {
private static final String PROPERTY_PLUGIN_NAME = "name";
private static final String PROPERTY_PLUGIN_EXPORT = "export";
private static final String PROPERTY_PLUGIN_REQUIRES = "requires";
private static final Logger logger =
LoggerFactory.getLogger(PropertyFilePluginMetaDataReader.class);
private Properties properties;
private URL url;
/**
* Constructs a {@code PropertyFilePluginMetaDataReader} instance to read meta-information from
* the URL.
*
* @param url to read the plug-in meta-data from
*/
public PropertyFilePluginMetaDataReader(URL url) {
this.url = url;
properties = new Properties();
try (InputStream is = Utils.openUrl(url)) {
properties.load(is);
} catch (IOException e) {
logger.error("io error while receiving plugin.definition file", e);
}
}
/** {@inheritDoc} */
@Override
public PluginMetaData read() {
String pluginName =
properties.getProperty(
PROPERTY_PLUGIN_NAME, "plugin_" + UUID.randomUUID().toString());
List<String> exportedComponents = getPropertyAsStringList(PROPERTY_PLUGIN_EXPORT);
List<String> requires = getPropertyAsStringList(PROPERTY_PLUGIN_REQUIRES);
logger.info("Plugin found: {}/{}", pluginName, url);
return new PluginMetaData(pluginName, url, exportedComponents, requires);
}
private List<String> getPropertyAsStringList(String property) {
String rhNames = properties.getProperty(property, "");
List<String> exportedComponents;
if (!rhNames.isEmpty()) {
exportedComponents = Arrays.asList(rhNames.split("\\s*,\\s*"));
} else {
exportedComponents = Collections.emptyList();
}
return exportedComponents;
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/util/ConfigManager.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.util;
import ai.djl.serving.Arguments;
import ai.djl.serving.wlm.util.WlmConfigManager;
import ai.djl.util.NeuronUtils;
import ai.djl.util.Utils;
import ai.djl.util.cuda.CudaUtils;
import io.netty.handler.ssl.SslContext;
import io.netty.handler.ssl.SslContextBuilder;
import io.netty.handler.ssl.util.SelfSignedCertificate;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.management.ManagementFactory;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.GeneralSecurityException;
import java.security.KeyException;
import java.security.KeyFactory;
import java.security.KeyStore;
import java.security.PrivateKey;
import java.security.cert.Certificate;
import java.security.cert.CertificateFactory;
import java.security.cert.X509Certificate;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collection;
import java.util.Enumeration;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.function.Consumer;
/** A class that hold configuration information. */
public final class ConfigManager {
private static final String INFERENCE_ADDRESS = "inference_address";
private static final String MANAGEMENT_ADDRESS = "management_address";
private static final String LOAD_MODELS = "load_models";
private static final String WAIT_MODEL_LOADING = "wait_model_loading";
private static final String ALLOW_MULTI_STATUS = "allow_multi_status";
private static final String NUMBER_OF_NETTY_THREADS = "number_of_netty_threads";
private static final String JOB_QUEUE_SIZE = "job_queue_size";
private static final String MAX_IDLE_TIME = "max_idle_time";
private static final String BATCH_SIZE = "batch_size";
private static final String MAX_BATCH_DELAY = "max_batch_delay";
private static final String RESERVED_MEMORY_MB = "reserved_memory_mb";
private static final String CORS_ALLOWED_ORIGIN = "cors_allowed_origin";
private static final String CORS_ALLOWED_METHODS = "cors_allowed_methods";
private static final String CORS_ALLOWED_HEADERS = "cors_allowed_headers";
private static final String KEYSTORE = "keystore";
private static final String KEYSTORE_PASS = "keystore_pass";
private static final String KEYSTORE_TYPE = "keystore_type";
private static final String CERTIFICATE_FILE = "certificate_file";
private static final String PRIVATE_KEY_FILE = "private_key_file";
private static final String MAX_REQUEST_SIZE = "max_request_size";
private static final String MODEL_STORE = "model_store";
private static final String MODEL_URL_PATTERN = "model_url_pattern";
private static final String LOAD_ON_DEVICES = "load_on_devices";
private static final String PLUGIN_FOLDER = "plugin_folder";
private static final String CHUNKED_READ_TIMEOUT = "chunked_read_timeout";
// Configuration which are not documented or enabled through environment variables
private static final String USE_NATIVE_IO = "use_native_io";
private static final String IO_RATIO = "io_ratio";
private static final int DEF_MAX_REQUEST_SIZE = 64 * 1024 * 1024;
private static ConfigManager instance;
private Properties prop;
private ConfigManager(Arguments args) {
prop = new Properties();
Path file = args.getConfigFile();
if (file != null) {
try (InputStream stream = Files.newInputStream(file)) {
prop.load(stream);
} catch (IOException e) {
throw new IllegalArgumentException("Unable to read configuration file", e);
}
prop.put("configFile", file.toString());
}
String modelStore = args.getModelStore();
if (modelStore != null) {
prop.setProperty(MODEL_STORE, modelStore);
}
String[] models = args.getModels();
if (models != null) {
prop.setProperty(LOAD_MODELS, String.join(",", models));
}
for (Map.Entry<String, String> env : Utils.getenv().entrySet()) {
String key = env.getKey();
if (key.startsWith("SERVING_")) {
prop.put(key.substring(8).toLowerCase(Locale.ROOT), env.getValue());
}
}
}
/**
* Initialize the global {@code ConfigManager} instance.
*
* @param args the command line arguments
*/
public static void init(Arguments args) {
instance = new ConfigManager(args);
// set default system properties
if (System.getProperty("ai.djl.pytorch.num_interop_threads") == null) {
System.setProperty("ai.djl.pytorch.num_interop_threads", "1");
}
if (System.getProperty("ai.djl.pytorch.num_threads") == null
&& Utils.getenv("OMP_NUM_THREADS") == null) {
System.setProperty("ai.djl.pytorch.num_threads", "1");
}
if (System.getProperty("ai.djl.onnxruntime.num_interop_threads") == null) {
System.setProperty("ai.djl.onnxruntime.num_interop_threads", "1");
}
if (System.getProperty("ai.djl.onnxruntime.num_threads") == null) {
System.setProperty("ai.djl.onnxruntime.num_threads", "1");
}
if (System.getProperty("log4j2.contextSelector") == null) {
// turn on async logging by default
System.setProperty(
"log4j2.contextSelector",
"org.apache.logging.log4j.core.async.AsyncLoggerContextSelector");
}
// Disable alternative engine for Python in djl-serving
if (System.getProperty("ai.djl.python.disable_alternative") == null) {
System.setProperty("ai.djl.python.disable_alternative", "true");
}
WlmConfigManager wlmc = WlmConfigManager.getInstance();
instance.withIntProperty(JOB_QUEUE_SIZE, wlmc::setJobQueueSize);
instance.withIntProperty(MAX_IDLE_TIME, wlmc::setMaxIdleSeconds);
instance.withIntProperty(BATCH_SIZE, wlmc::setBatchSize);
instance.withIntProperty(MAX_BATCH_DELAY, wlmc::setMaxBatchDelayMillis);
instance.withIntProperty(RESERVED_MEMORY_MB, wlmc::setReservedMemoryMb);
wlmc.setLoadOnDevices(instance.getLoadOnDevices());
}
/**
* Returns the singleton {@code ConfigManager} instance.
*
* @return the singleton {@code ConfigManager} instance
*/
public static ConfigManager getInstance() {
return instance;
}
/**
* Returns the models server socket connector.
*
* @param type the type of connector
* @return the {@code Connector}
*/
public Connector getConnector(Connector.ConnectorType type) {
String binding;
if (type == Connector.ConnectorType.MANAGEMENT) {
binding = prop.getProperty(MANAGEMENT_ADDRESS, "http://127.0.0.1:8080");
} else {
binding = prop.getProperty(INFERENCE_ADDRESS, "http://127.0.0.1:8080");
}
return Connector.parse(binding, type);
}
/**
* Returns the configured netty threads.
*
* @return the configured netty threads
*/
public int getNettyThreads() {
return getIntProperty(NUMBER_OF_NETTY_THREADS, 0);
}
/**
* Returns the model server home directory.
*
* @return the model server home directory
*/
public static String getModelServerHome() {
String home = Utils.getenv("MODEL_SERVER_HOME");
if (home == null) {
home = System.getProperty("MODEL_SERVER_HOME");
if (home == null) {
home = getCanonicalPath(".");
return home;
}
}
Path dir = Paths.get(home);
if (!Files.isDirectory(dir)) {
throw new IllegalArgumentException("Model server home not exist: " + home);
}
home = getCanonicalPath(dir);
return home;
}
/**
* Returns if model server should wait for model initialization on startup.
*
* @return true if model server should wait for model initialization on startup
*/
public boolean waitModelLoading() {
return Boolean.parseBoolean(prop.getProperty(WAIT_MODEL_LOADING, "true"));
}
/**
* Returns if allows return MULTI-STATUS HTTP code.
*
* @return true if allows return MULTI-STATUS HTTP code
*/
public boolean allowsMultiStatus() {
return Boolean.parseBoolean(prop.getProperty(ALLOW_MULTI_STATUS));
}
/**
* Returns the model store location.
*
* @return the model store location
*/
public Path getModelStore() {
return getPathProperty(MODEL_STORE);
}
/**
* Returns the allowed model url pattern regex.
*
* @return the allowed model url pattern regex
*/
public String getModelUrlPattern() {
return prop.getProperty(MODEL_URL_PATTERN);
}
/**
* Returns the model urls that to be loaded at startup.
*
* @return the model urls that to be loaded at startup
*/
public String getLoadModels() {
return prop.getProperty(LOAD_MODELS);
}
/**
* Returns the devices the default model will be loaded on at startup.
*
* @return the devices the default model will be loaded on at startup
*/
public String getLoadOnDevices() {
return prop.getProperty(LOAD_ON_DEVICES, "*");
}
/**
* Returns the CORS allowed origin setting.
*
* @return the CORS allowed origin setting
*/
public String getCorsAllowedOrigin() {
return prop.getProperty(CORS_ALLOWED_ORIGIN);
}
/**
* Returns the CORS allowed method setting.
*
* @return the CORS allowed method setting
*/
public String getCorsAllowedMethods() {
return prop.getProperty(CORS_ALLOWED_METHODS);
}
/**
* Returns the CORS allowed headers setting.
*
* @return the CORS allowed headers setting
*/
public String getCorsAllowedHeaders() {
return prop.getProperty(CORS_ALLOWED_HEADERS);
}
/**
* return the folder where the model search for plugins.
*
* @return the configured plugin folder or the default folder
* @throws IOException if failed to resolve plugin folder
*/
public List<Path> getPluginFolder() throws IOException {
List<Path> list = new ArrayList<>();
Path plugin = getPathProperty(PLUGIN_FOLDER, "plugins");
list.add(plugin);
String appHome = Utils.getenv("APP_HOME");
if (appHome != null) {
Path path = Paths.get(appHome, "plugins");
if (!Files.isSameFile(path, plugin)) {
list.add(path);
}
}
return list;
}
/**
* Returns a {@code SSLContext} instance.
*
* @return a {@code SSLContext} instance
* @throws IOException if failed to read certificate file
* @throws GeneralSecurityException if failed to initialize {@code SSLContext}
*/
public SslContext getSslContext() throws IOException, GeneralSecurityException {
List<String> supportedCiphers =
Arrays.asList(
"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA",
"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256");
PrivateKey privateKey;
X509Certificate[] chain;
Path keyStoreFile = getPathProperty(KEYSTORE);
Path privateKeyFile = getPathProperty(PRIVATE_KEY_FILE);
Path certificateFile = getPathProperty(CERTIFICATE_FILE);
if (keyStoreFile != null) {
char[] keystorePass = getProperty(KEYSTORE_PASS, "changeit").toCharArray();
String keystoreType = getProperty(KEYSTORE_TYPE, "PKCS12");
KeyStore keyStore = KeyStore.getInstance(keystoreType);
try (InputStream is = Files.newInputStream(keyStoreFile)) {
keyStore.load(is, keystorePass);
}
Enumeration<String> en = keyStore.aliases();
String keyAlias = null;
while (en.hasMoreElements()) {
String alias = en.nextElement();
if (keyStore.isKeyEntry(alias)) {
keyAlias = alias;
break;
}
}
if (keyAlias == null) {
throw new KeyException("No key entry found in keystore.");
}
privateKey = (PrivateKey) keyStore.getKey(keyAlias, keystorePass);
Certificate[] certs = keyStore.getCertificateChain(keyAlias);
chain = new X509Certificate[certs.length];
for (int i = 0; i < certs.length; ++i) {
chain[i] = (X509Certificate) certs[i];
}
} else if (privateKeyFile != null && certificateFile != null) {
privateKey = loadPrivateKey(privateKeyFile);
chain = loadCertificateChain(certificateFile);
} else {
SelfSignedCertificate ssc = new SelfSignedCertificate();
privateKey = ssc.key();
chain = new X509Certificate[] {ssc.cert()};
}
return SslContextBuilder.forServer(privateKey, chain)
.protocols("TLSv1.2")
.ciphers(supportedCiphers)
.build();
}
/**
* Returns the ChunkedBytesSupplier read time in seconds.
*
* @return the ChunkedBytesSupplier read time in seconds
*/
public int getChunkedReadTimeout() {
return getIntProperty(CHUNKED_READ_TIMEOUT, 60);
}
/**
* Returns the value with the specified key in this configuration.
*
* @param key the key
* @param def a default value
* @return the value with the specified key in this configuration
*/
public String getProperty(String key, String def) {
return prop.getProperty(key, def);
}
/**
* Prints out this configuration.
*
* @return a string representation of this configuration
*/
public String dumpConfigurations() {
WlmConfigManager wlmc = WlmConfigManager.getInstance();
Runtime runtime = Runtime.getRuntime();
StringBuilder sb = new StringBuilder(2048);
sb.append("\nModel server home: ")
.append(getModelServerHome())
.append("\nCurrent directory: ")
.append(getCanonicalPath("."))
.append("\nTemp directory: ")
.append(System.getProperty("java.io.tmpdir"))
.append("\nCommand line: ")
.append(String.join(" ", ManagementFactory.getRuntimeMXBean().getInputArguments()))
.append("\nNumber of CPUs: ")
.append(runtime.availableProcessors());
if (CudaUtils.hasCuda()) {
sb.append("\nCUDA version: ")
.append(CudaUtils.getCudaVersionString())
.append(" / ")
.append(CudaUtils.getComputeCapability(0))
.append("\nNumber of GPUs: ")
.append(CudaUtils.getGpuCount());
} else if (NeuronUtils.hasNeuron()) {
sb.append("\nNumber of Neuron cores: ").append(NeuronUtils.getNeuronCores());
}
sb.append("\nMax heap size: ")
.append((runtime.maxMemory() / 1024 / 1024))
.append("\nConfig file: ")
.append(prop.getProperty("configFile", "N/A"))
.append("\nInference address: ")
.append(getConnector(Connector.ConnectorType.INFERENCE))
.append("\nManagement address: ")
.append(getConnector(Connector.ConnectorType.MANAGEMENT))
.append("\nDefault job_queue_size: ")
.append(wlmc.getJobQueueSize())
.append("\nDefault batch_size: ")
.append(wlmc.getBatchSize())
.append("\nDefault max_batch_delay: ")
.append(wlmc.getMaxBatchDelayMillis())
.append("\nDefault max_idle_time: ")
.append(wlmc.getMaxIdleSeconds())
.append("\nModel Store: ")
.append((getModelStore() == null ? "N/A" : getModelStore()))
.append("\nInitial Models: ")
.append((getLoadModels() == null ? "N/A" : getLoadModels()))
.append("\nNetty threads: ")
.append(getNettyThreads())
.append("\nMaximum Request Size: ")
.append(prop.getProperty(MAX_REQUEST_SIZE, String.valueOf(getMaxRequestSize())))
.append("\nEnvironment variables:");
for (Map.Entry<String, String> entry : System.getenv().entrySet()) {
String key = entry.getKey();
// Do not log HF_TOKEN value
if ("HF_TOKEN".equals(key)) {
sb.append("\n ").append(key).append(": ***");
} else if (key.startsWith("SERVING")
|| key.startsWith("PYTHON")
|| key.startsWith("DJL_")
|| key.startsWith("HF_")
|| key.startsWith("OPTION_")
|| key.contains("SAGEMAKER")
|| "TENSOR_PARALLEL_DEGREE".equals(key)
|| "OMP_NUM_THREADS".equals(key)
|| "CUDA_VISIBLE_DEVICES".equals(key)) {
sb.append("\n ").append(key).append(": ").append(entry.getValue());
}
}
return sb.toString();
}
/**
* Returns if use netty native IO.
*
* @return {@code true} if use netty native IO
*/
public boolean useNativeIo() {
return Boolean.parseBoolean(prop.getProperty(USE_NATIVE_IO, "true"));
}
/**
* Returns the native IO ratio.
*
* @return the native IO ratio
*/
public int getIoRatio() {
return getIntProperty(IO_RATIO, 50);
}
/**
* Returns the maximum allowed request size in bytes.
*
* @return the maximum allowed request size in bytes
*/
public int getMaxRequestSize() {
return getIntProperty(MAX_REQUEST_SIZE, DEF_MAX_REQUEST_SIZE);
}
private int getIntProperty(String key, int def) {
String value = prop.getProperty(key);
if (value == null) {
return def;
}
return Integer.parseInt(value);
}
private void withIntProperty(String key, Consumer<Integer> f) {
if (prop.containsKey(key)) {
f.accept(Integer.parseInt(prop.getProperty(key)));
}
}
private Path getPathProperty(String key) {
return getPathProperty(key, null);
}
private Path getPathProperty(String key, String defaultValue) {
String property = prop.getProperty(key, defaultValue);
if (property == null) {
return null;
}
Path path = Paths.get(property);
if (!path.isAbsolute()) {
path = Paths.get(getModelServerHome()).resolve(path);
}
return path;
}
private static String getCanonicalPath(Path file) {
try {
return file.toRealPath().toString();
} catch (IOException e) {
return file.toAbsolutePath().toString();
}
}
private static String getCanonicalPath(String path) {
if (path == null) {
return null;
}
return getCanonicalPath(Paths.get(path));
}
private PrivateKey loadPrivateKey(Path keyFile) throws IOException, GeneralSecurityException {
KeyFactory keyFactory = KeyFactory.getInstance("RSA");
try (ByteArrayOutputStream os = new ByteArrayOutputStream()) {
Files.copy(keyFile, os);
String content = os.toString(StandardCharsets.UTF_8);
content = content.replaceAll("-----(BEGIN|END)( RSA)? PRIVATE KEY-----\\s*", "");
byte[] buf = Base64.getMimeDecoder().decode(content);
try {
PKCS8EncodedKeySpec privKeySpec = new PKCS8EncodedKeySpec(buf);
return keyFactory.generatePrivate(privKeySpec);
} catch (InvalidKeySpecException e) {
// old private key is OpenSSL format private key
buf = OpenSslKey.convertPrivateKey(buf);
PKCS8EncodedKeySpec privKeySpec = new PKCS8EncodedKeySpec(buf);
return keyFactory.generatePrivate(privKeySpec);
}
}
}
private X509Certificate[] loadCertificateChain(Path keyFile)
throws IOException, GeneralSecurityException {
CertificateFactory cf = CertificateFactory.getInstance("X.509");
try (InputStream is = Files.newInputStream(keyFile)) {
Collection<? extends Certificate> certs = cf.generateCertificates(is);
int i = 0;
X509Certificate[] chain = new X509Certificate[certs.size()];
for (Certificate cert : certs) {
chain[i++] = (X509Certificate) cert;
}
return chain;
}
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/util/Connector.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.util;
import ai.djl.util.Utils;
import io.netty.channel.Channel;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.ServerChannel;
import io.netty.channel.epoll.Epoll;
import io.netty.channel.epoll.EpollDomainSocketChannel;
import io.netty.channel.epoll.EpollEventLoopGroup;
import io.netty.channel.epoll.EpollServerDomainSocketChannel;
import io.netty.channel.epoll.EpollServerSocketChannel;
import io.netty.channel.epoll.EpollSocketChannel;
import io.netty.channel.kqueue.KQueue;
import io.netty.channel.kqueue.KQueueDomainSocketChannel;
import io.netty.channel.kqueue.KQueueEventLoopGroup;
import io.netty.channel.kqueue.KQueueServerDomainSocketChannel;
import io.netty.channel.kqueue.KQueueServerSocketChannel;
import io.netty.channel.kqueue.KQueueSocketChannel;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.channel.unix.DomainSocketAddress;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.file.Paths;
import java.util.Objects;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/** A class represents model server's socket listener. */
public final class Connector {
private static final Pattern ADDRESS_PATTERN =
Pattern.compile(
"((https|http)://([^:^/]+)(:([0-9]+))?)|(unix:(/.*))",
Pattern.CASE_INSENSITIVE);
private static boolean useNativeIo = ConfigManager.getInstance().useNativeIo();
private boolean uds;
private String socketPath;
private String bindIp;
private int port;
private boolean ssl;
private ConnectorType type;
private Connector(
int port,
boolean uds,
String bindIp,
String socketPath,
boolean ssl,
ConnectorType type) {
this.port = port;
this.uds = uds;
this.bindIp = bindIp;
this.socketPath = socketPath;
this.ssl = ssl;
this.type = type;
}
/**
* Create a {@code Connector} instance based on binding string.
*
* @param binding the binding string
* @param connectorType the type of the connector
* @return a {@code Connector} instance
*/
public static Connector parse(String binding, ConnectorType connectorType) {
Matcher matcher = ADDRESS_PATTERN.matcher(binding);
if (!matcher.matches()) {
throw new IllegalArgumentException("Invalid binding address: " + binding);
}
boolean uds = matcher.group(7) != null;
if (uds) {
if (!useNativeIo) {
throw new IllegalArgumentException(
"unix domain socket requires use_native_io set to true.");
}
String path = matcher.group(7);
return new Connector(-1, true, "", path, false, connectorType);
}
String protocol = matcher.group(2);
String host = matcher.group(3);
String listeningPort = matcher.group(5);
boolean ssl = "https".equalsIgnoreCase(protocol);
int port;
if (listeningPort == null) {
port = ssl ? 443 : 80;
} else {
port = Integer.parseInt(listeningPort);
}
if (port >= 65535) {
throw new IllegalArgumentException("Invalid port number: " + binding);
}
return new Connector(port, false, host, String.valueOf(port), ssl, connectorType);
}
/**
* Returns the socket type.
*
* @return the socket type
*/
public String getSocketType() {
return uds ? "unix" : "tcp";
}
/**
* Returns if the connector is using unix domain socket.
*
* @return {@code true} if the connector is using unix domain socket
*/
public boolean isUds() {
return uds;
}
/**
* Return if the connector requires SSL.
*
* @return {@code true} if the connector requires SSL
*/
public boolean isSsl() {
return ssl;
}
/**
* Returns the unix domain socket path.
*
* @return the unix domain socket path
*/
public String getSocketPath() {
return socketPath;
}
/**
* Returns the TCP socket listening address.
*
* @return the TCP socket listening address
*/
public SocketAddress getSocketAddress() {
return uds ? new DomainSocketAddress(socketPath) : new InetSocketAddress(bindIp, port);
}
/**
* Returns the type of the connector.
*
* @return the type of the connector
*/
public ConnectorType getType() {
return type;
}
/**
* Creates a new netty {@code EventLoopGroup}.
*
* @param threads the number of threads
* @return a new netty {@code EventLoopGroup}
*/
public static EventLoopGroup newEventLoopGroup(int threads) {
if (useNativeIo && Epoll.isAvailable()) {
return new EpollEventLoopGroup(threads);
} else if (useNativeIo && KQueue.isAvailable()) {
return new KQueueEventLoopGroup(threads);
}
NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(threads);
eventLoopGroup.setIoRatio(ConfigManager.getInstance().getIoRatio());
return eventLoopGroup;
}
/**
* Returns the server channel class.
*
* @return the server channel class
*/
public Class<? extends ServerChannel> getServerChannel() {
if (useNativeIo && Epoll.isAvailable()) {
return uds ? EpollServerDomainSocketChannel.class : EpollServerSocketChannel.class;
} else if (useNativeIo && KQueue.isAvailable()) {
return uds ? KQueueServerDomainSocketChannel.class : KQueueServerSocketChannel.class;
}
return NioServerSocketChannel.class;
}
/**
* Returns the client channel class.
*
* @return the client channel class
*/
public Class<? extends Channel> getClientChannel() {
if (useNativeIo && Epoll.isAvailable()) {
return uds ? EpollDomainSocketChannel.class : EpollSocketChannel.class;
} else if (useNativeIo && KQueue.isAvailable()) {
return uds ? KQueueDomainSocketChannel.class : KQueueSocketChannel.class;
}
return NioSocketChannel.class;
}
/** Cleans up the left over resources. */
public void clean() {
if (uds) {
Utils.deleteQuietly(Paths.get(socketPath));
}
}
/** {@inheritDoc} */
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Connector connector = (Connector) o;
return uds == connector.uds
&& port == connector.port
&& socketPath.equals(connector.socketPath)
&& bindIp.equals(connector.bindIp);
}
/** {@inheritDoc} */
@Override
public int hashCode() {
return Objects.hash(uds, socketPath, bindIp, port);
}
/** {@inheritDoc} */
@Override
public String toString() {
if (uds) {
return "unix:" + socketPath;
} else if (ssl) {
return "https://" + bindIp + ':' + port;
}
return "http://" + bindIp + ':' + port;
}
/** An enum represents type of connector. */
public enum ConnectorType {
INFERENCE,
MANAGEMENT,
BOTH
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/util/MimeUtils.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.util;
import io.netty.handler.codec.http.HttpHeaderValues;
import io.netty.util.AsciiString;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/** A utility class that handling MIME types. */
public final class MimeUtils {
private static final Map<String, AsciiString> MIME_TYPE_MAP = new ConcurrentHashMap<>();
static {
MIME_TYPE_MAP.put("htm", HttpHeaderValues.TEXT_HTML);
MIME_TYPE_MAP.put("html", HttpHeaderValues.TEXT_HTML);
MIME_TYPE_MAP.put("js", AsciiString.of("application/javascript"));
MIME_TYPE_MAP.put("xml", HttpHeaderValues.APPLICATION_XML);
MIME_TYPE_MAP.put("css", HttpHeaderValues.TEXT_CSS);
MIME_TYPE_MAP.put("txt", HttpHeaderValues.TEXT_PLAIN);
MIME_TYPE_MAP.put("text", HttpHeaderValues.TEXT_PLAIN);
MIME_TYPE_MAP.put("log", HttpHeaderValues.TEXT_PLAIN);
MIME_TYPE_MAP.put("csv", AsciiString.of("text/comma-separated-values"));
MIME_TYPE_MAP.put("rtf", AsciiString.of("text/rtf"));
MIME_TYPE_MAP.put("sh", AsciiString.of("text/x-sh"));
MIME_TYPE_MAP.put("tex", AsciiString.of("application/x-tex"));
MIME_TYPE_MAP.put("texi", AsciiString.of("application/x-texinfo"));
MIME_TYPE_MAP.put("texinfo", AsciiString.of("application/x-texinfo"));
MIME_TYPE_MAP.put("t", AsciiString.of("application/x-troff"));
MIME_TYPE_MAP.put("tr", AsciiString.of("application/x-troff"));
MIME_TYPE_MAP.put("roff", AsciiString.of("application/x-troff"));
MIME_TYPE_MAP.put("gif", AsciiString.of("image/gif"));
MIME_TYPE_MAP.put("png", AsciiString.of("image/x-png"));
MIME_TYPE_MAP.put("ief", AsciiString.of("image/ief"));
MIME_TYPE_MAP.put("jpeg", AsciiString.of("image/jpeg"));
MIME_TYPE_MAP.put("jpg", AsciiString.of("image/jpeg"));
MIME_TYPE_MAP.put("jpe", AsciiString.of("image/jpeg"));
MIME_TYPE_MAP.put("tiff", AsciiString.of("image/tiff"));
MIME_TYPE_MAP.put("tif", AsciiString.of("image/tiff"));
MIME_TYPE_MAP.put("xwd", AsciiString.of("image/x-xwindowdump"));
MIME_TYPE_MAP.put("pict", AsciiString.of("image/x-pict"));
MIME_TYPE_MAP.put("bmp", AsciiString.of("image/x-ms-bmp"));
MIME_TYPE_MAP.put("pcd", AsciiString.of("image/x-photo-cd"));
MIME_TYPE_MAP.put("dwg", AsciiString.of("image/vnd.dwg"));
MIME_TYPE_MAP.put("dxf", AsciiString.of("image/vnd.dxf"));
MIME_TYPE_MAP.put("svf", AsciiString.of("image/vnd.svf"));
MIME_TYPE_MAP.put("au", AsciiString.of("autio/basic"));
MIME_TYPE_MAP.put("snd", AsciiString.of("autio/basic"));
MIME_TYPE_MAP.put("mid", AsciiString.of("autio/midi"));
MIME_TYPE_MAP.put("midi", AsciiString.of("autio/midi"));
MIME_TYPE_MAP.put("aif", AsciiString.of("autio/x-aiff"));
MIME_TYPE_MAP.put("aiff", AsciiString.of("autio/x-aiff"));
MIME_TYPE_MAP.put("aifc", AsciiString.of("autio/x-aiff"));
MIME_TYPE_MAP.put("wav", AsciiString.of("autio/x-wav"));
MIME_TYPE_MAP.put("mpa", AsciiString.of("autio/x-mpeg"));
MIME_TYPE_MAP.put("abs", AsciiString.of("autio/x-mpeg"));
MIME_TYPE_MAP.put("mpega", AsciiString.of("autio/x-mpeg"));
MIME_TYPE_MAP.put("mp2a", AsciiString.of("autio/x-mpeg-2"));
MIME_TYPE_MAP.put("mpa2", AsciiString.of("autio/x-mpeg-2"));
MIME_TYPE_MAP.put("ra", AsciiString.of("application/x-pn-realaudio"));
MIME_TYPE_MAP.put("ram", AsciiString.of("application/x-pn-realaudio"));
MIME_TYPE_MAP.put("mpeg", AsciiString.of("video/mpeg"));
MIME_TYPE_MAP.put("mpg", AsciiString.of("video/mpeg"));
MIME_TYPE_MAP.put("mpe", AsciiString.of("video/mpeg"));
MIME_TYPE_MAP.put("mpv2", AsciiString.of("video/mpeg-2"));
MIME_TYPE_MAP.put("mp2v", AsciiString.of("video/mpeg-2"));
MIME_TYPE_MAP.put("qt", AsciiString.of("video/quicktime"));
MIME_TYPE_MAP.put("mov", AsciiString.of("video/quicktime"));
MIME_TYPE_MAP.put("avi", AsciiString.of("video/x-msvideo"));
MIME_TYPE_MAP.put("ai", AsciiString.of("application/postscript"));
MIME_TYPE_MAP.put("eps", AsciiString.of("application/postscript"));
MIME_TYPE_MAP.put("ps", AsciiString.of("application/postscript"));
MIME_TYPE_MAP.put("pdf", AsciiString.of("application/pdf"));
MIME_TYPE_MAP.put("gtar", AsciiString.of("application/x-gtar"));
MIME_TYPE_MAP.put("tar", AsciiString.of("application/x-tar"));
MIME_TYPE_MAP.put("bcpio", AsciiString.of("application/x-bcpio"));
MIME_TYPE_MAP.put("cpio", AsciiString.of("application/x-cpio"));
MIME_TYPE_MAP.put("zip", AsciiString.of("application/zip"));
MIME_TYPE_MAP.put("rar", AsciiString.of("application/rar"));
}
private MimeUtils() {}
/**
* Return the content type that associated with the file.
*
* @param fileType file extension
* @return the content type
*/
public static AsciiString getContentType(String fileType) {
AsciiString contentType = MIME_TYPE_MAP.get(fileType.toLowerCase(Locale.ROOT));
if (contentType == null) {
return HttpHeaderValues.APPLICATION_OCTET_STREAM;
}
return contentType;
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/util/MutableClassLoader.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.util;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.stream.Stream;
/** A {@code URLClassLoader} that can add new class at runtime. */
public class MutableClassLoader extends URLClassLoader {
private static final Logger logger = LoggerFactory.getLogger(MutableClassLoader.class);
private static final MutableClassLoader INSTANCE =
AccessController.doPrivileged(
(PrivilegedAction<MutableClassLoader>) MutableClassLoader::new);
/**
* Constructs a new URLClassLoader for the given URLs. The URLs will be searched in the order
* specified for classes and resources after first searching in the specified parent class
* loader. Any URL that ends with a '/' is assumed to refer to a directory. Otherwise, the URL
* is assumed to refer to a JAR file which will be downloaded and opened as needed.
*
* <p>If there is a security manager, this method first calls the security manager's {@code
* checkCreateClassLoader} method to ensure creation of a class loader is allowed.
*
* @throws SecurityException if a security manager exists and its {@code checkCreateClassLoader}
* method doesn't allow creation of a class loader.
* @throws NullPointerException if {@code urls} is {@code null}.
* @see SecurityManager#checkCreateClassLoader
*/
@SuppressWarnings("this-escape")
public MutableClassLoader() {
super(new URL[0]);
String serverHome = ConfigManager.getModelServerHome();
Path depDir = Paths.get(serverHome, "deps");
if (Files.isDirectory(depDir)) {
try (Stream<Path> stream = Files.list(depDir)) {
stream.forEach(
p -> {
if (p.toString().endsWith(".jar")) {
try {
addURL(p.toUri().toURL());
} catch (MalformedURLException e) {
logger.warn("Invalid file system path: " + p, e);
}
}
});
} catch (IOException e) {
logger.warn("Failed to load dependencies from deps folder.", e);
}
}
}
/**
* Returns the singleton instance of {@code ServingClassLoader}.
*
* @return the singleton instance of {@code ServingClassLoader}
*/
public static MutableClassLoader getInstance() {
return INSTANCE;
}
/** {@inheritDoc} */
@Override
public final void addURL(URL url) {
super.addURL(url);
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/util/NettyUtils.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.util;
import ai.djl.ModelException;
import ai.djl.engine.EngineException;
import ai.djl.modality.Input;
import ai.djl.repository.FilenameUtils;
import ai.djl.repository.zoo.ModelNotFoundException;
import ai.djl.serving.http.BadRequestException;
import ai.djl.serving.http.ErrorResponse;
import ai.djl.serving.http.InternalServerException;
import ai.djl.serving.http.MethodNotAllowedException;
import ai.djl.serving.http.ResourceNotFoundException;
import ai.djl.serving.http.ServiceUnavailableException;
import ai.djl.serving.http.Session;
import ai.djl.serving.wlm.util.WlmException;
import ai.djl.serving.wlm.util.WlmOutOfMemoryException;
import ai.djl.util.JsonSerializable;
import ai.djl.util.JsonUtils;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.DefaultFullHttpResponse;
import io.netty.handler.codec.http.FullHttpResponse;
import io.netty.handler.codec.http.HttpHeaderNames;
import io.netty.handler.codec.http.HttpHeaderValues;
import io.netty.handler.codec.http.HttpHeaders;
import io.netty.handler.codec.http.HttpRequest;
import io.netty.handler.codec.http.HttpResponse;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.codec.http.HttpUtil;
import io.netty.handler.codec.http.HttpVersion;
import io.netty.handler.codec.http.QueryStringDecoder;
import io.netty.handler.codec.http.multipart.Attribute;
import io.netty.handler.codec.http.multipart.FileUpload;
import io.netty.handler.codec.http.multipart.InterfaceHttpData;
import io.netty.util.AsciiString;
import io.netty.util.AttributeKey;
import io.netty.util.CharsetUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.SocketAddress;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
/** A utility class that handling Netty request and response. */
public final class NettyUtils {
private static final Logger logger = LoggerFactory.getLogger(NettyUtils.class);
private static final Logger ACCESS_LOG = LoggerFactory.getLogger("ACCESS_LOG");
private static final String REQUEST_ID = "x-request-id";
private static final AttributeKey<Session> SESSION_KEY = AttributeKey.valueOf("session");
private static final String HTTP_DATE_FORMAT = "EEE, dd MMM yyyy HH:mm:ss zzz";
private static final String HTTP_DATE_GMT_TIMEZONE = "GMT";
private static final int HTTP_CACHE_SECONDS = 86400;
private NettyUtils() {}
/**
* Updates session when a HTTP request is received.
*
* @param channel the connection channel
* @param request the HTTP request
*/
public static void requestReceived(Channel channel, HttpRequest request) {
SocketAddress address = channel.remoteAddress();
String remoteIp;
if (address == null) {
// This can be null on UDS, or on certain case in Windows
remoteIp = "0.0.0.0";
} else {
remoteIp = address.toString();
}
channel.attr(SESSION_KEY).set(new Session(remoteIp, request));
}
/**
* Returns the request session.
*
* @param channel the channel associated with the request
* @return the request session
*/
public static Session getSession(Channel channel) {
return channel.attr(SESSION_KEY).get();
}
/**
* Returns the request ID for the specified channel.
*
* @param channel the connection channel
* @return the request ID for the specified channel
*/
public static String getRequestId(Channel channel) {
Session accessLog = channel.attr(SESSION_KEY).get();
if (accessLog != null) {
return accessLog.getRequestId();
}
return null;
}
/**
* Sends the json object to client.
*
* @param ctx the connection context
* @param obj the object to be sent
*/
public static void sendJsonResponse(ChannelHandlerContext ctx, Object obj) {
sendJsonResponse(ctx, obj, HttpResponseStatus.OK);
}
/**
* Sends the json string to client with specified status.
*
* @param ctx the connection context
* @param obj the object to be sent
* @param status the HTTP status
*/
public static void sendJsonResponse(
ChannelHandlerContext ctx, Object obj, HttpResponseStatus status) {
String content;
if (obj instanceof JsonSerializable) {
content = ((JsonSerializable) obj).toJson();
} else {
content = JsonUtils.GSON_PRETTY.toJson(obj);
}
sendJsonResponse(ctx, content, status);
}
/**
* Sends the json string to client.
*
* @param ctx the connection context
* @param json the json string
*/
public static void sendJsonResponse(ChannelHandlerContext ctx, String json) {
sendJsonResponse(ctx, json, HttpResponseStatus.OK);
}
/**
* Sends the json object to client with specified status.
*
* @param ctx the connection context
* @param json the json object
* @param status the HTTP status
*/
public static void sendJsonResponse(
ChannelHandlerContext ctx, String json, HttpResponseStatus status) {
FullHttpResponse resp = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, status);
resp.headers().set(HttpHeaderNames.CONTENT_TYPE, HttpHeaderValues.APPLICATION_JSON);
ByteBuf content = resp.content();
content.writeCharSequence(json, CharsetUtil.UTF_8);
content.writeByte('\n');
sendHttpResponse(ctx, resp, true);
}
/**
* Sends the file to client.
*
* @param ctx the connection context
* @param path the file to download
* @param keepAlive if keep the connection
*/
public static void sendFile(ChannelHandlerContext ctx, Path path, boolean keepAlive) {
File file = path.toFile();
String name = file.getName();
long lastModified = file.lastModified();
try (InputStream is = Files.newInputStream(path)) {
sendFile(ctx, is, name, lastModified, keepAlive);
} catch (IOException e) {
throw new InternalServerException("Failed read file: " + name, e);
}
}
/**
* Sends the file to client.
*
* @param ctx the connection context
* @param is the {@code InputStream} to read file from
* @param name the file name
* @param lastModified the time that the file last modified
* @param keepAlive if keep the connection
* @throws IOException if read file fails
*/
public static void sendFile(
ChannelHandlerContext ctx,
InputStream is,
String name,
long lastModified,
boolean keepAlive)
throws IOException {
AsciiString contentType = MimeUtils.getContentType(FilenameUtils.getFileExtension(name));
SimpleDateFormat dateFormatter = new SimpleDateFormat(HTTP_DATE_FORMAT, Locale.ROOT);
dateFormatter.setTimeZone(TimeZone.getTimeZone(HTTP_DATE_GMT_TIMEZONE));
Calendar time = Calendar.getInstance();
time.add(Calendar.SECOND, HTTP_CACHE_SECONDS);
byte[] buf = is.readAllBytes();
FullHttpResponse resp =
new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK);
resp.headers()
.set(HttpHeaderNames.CONTENT_TYPE, contentType)
.set(HttpHeaderNames.CONTENT_LENGTH, buf.length)
.set(HttpHeaderNames.DATE, dateFormatter.format(time.getTime()))
.set(HttpHeaderNames.EXPIRES, dateFormatter.format(time.getTime()))
.set(HttpHeaderNames.CACHE_CONTROL, "private, max-age=" + HTTP_CACHE_SECONDS)
.set(HttpHeaderNames.LAST_MODIFIED, dateFormatter.format(new Date(lastModified)));
if (!HttpHeaderValues.TEXT_HTML.contentEqualsIgnoreCase(contentType)) {
String contentDisposition = "attachment;fileName=" + name + ";fileName*=UTF-8''" + name;
resp.headers().set(HttpHeaderNames.CONTENT_DISPOSITION, contentDisposition);
}
resp.content().writeBytes(buf);
sendHttpResponse(ctx, resp, keepAlive, false);
}
/**
* Sends error to client with exception.
*
* @param ctx the connection context
* @param t the exception to be send
*/
public static void sendError(ChannelHandlerContext ctx, Throwable t) {
if (t instanceof ResourceNotFoundException || t instanceof ModelNotFoundException) {
logger.debug("", t);
NettyUtils.sendError(ctx, HttpResponseStatus.NOT_FOUND, t);
} else if (t instanceof BadRequestException) {
logger.debug("", t);
BadRequestException e = (BadRequestException) t;
HttpResponseStatus status = HttpResponseStatus.valueOf(e.getCode(), e.getMessage());
NettyUtils.sendError(ctx, status, t);
} else if (t instanceof EngineException) {
if ("OOM".equals(t.getMessage())) {
logger.warn("CUDA out of memory", t);
NettyUtils.sendError(ctx, HttpResponseStatus.INSUFFICIENT_STORAGE, t);
}
logger.error("", t);
NettyUtils.sendError(ctx, HttpResponseStatus.INTERNAL_SERVER_ERROR, t);
} else if (t instanceof WlmOutOfMemoryException) {
logger.warn("", t);
NettyUtils.sendError(ctx, HttpResponseStatus.INSUFFICIENT_STORAGE, t);
} else if (t instanceof ModelException) {
logger.debug("", t);
NettyUtils.sendError(ctx, HttpResponseStatus.BAD_REQUEST, t);
} else if (t instanceof MethodNotAllowedException) {
logger.debug("", t);
NettyUtils.sendError(ctx, HttpResponseStatus.METHOD_NOT_ALLOWED, t);
} else if (t instanceof ServiceUnavailableException || t instanceof WlmException) {
logger.warn("", t);
NettyUtils.sendError(ctx, HttpResponseStatus.SERVICE_UNAVAILABLE, t);
} else {
logger.error("", t);
NettyUtils.sendError(ctx, HttpResponseStatus.INTERNAL_SERVER_ERROR, t);
}
}
/**
* Sends error to client with HTTP status and exception.
*
* @param ctx the connection context
* @param status the HTTP status
* @param t the exception to be send
*/
public static void sendError(
ChannelHandlerContext ctx, HttpResponseStatus status, Throwable t) {
String type = t.getClass().getSimpleName();
Throwable cause = t.getCause();
while (cause != null) {
t = cause;
cause = cause.getCause();
}
ErrorResponse error = new ErrorResponse(status.code(), type, t.getMessage());
sendJsonResponse(ctx, error, status);
}
/**
* Send HTTP response to client.
*
* @param ctx ChannelHandlerContext
* @param resp HttpResponse to send
* @param keepAlive if keep the connection
*/
public static void sendHttpResponse(
ChannelHandlerContext ctx, HttpResponse resp, boolean keepAlive) {
sendHttpResponse(ctx, resp, keepAlive, true, true);
}
/**
* Send HTTP response to client.
*
* @param ctx ChannelHandlerContext
* @param resp HttpResponse to send
* @param keepAlive if keep the connection
* @param closeOnError close the connection if error
*/
public static void sendHttpResponse(
ChannelHandlerContext ctx, HttpResponse resp, boolean keepAlive, boolean closeOnError) {
sendHttpResponse(ctx, resp, keepAlive, true, closeOnError);
}
private static void sendHttpResponse(
ChannelHandlerContext ctx,
HttpResponse resp,
boolean keepAlive,
boolean noCache,
boolean closeOnError) {
// Send the response and close the connection if necessary.
Channel channel = ctx.channel();
Session session = channel.attr(SESSION_KEY).getAndSet(null);
HttpHeaders headers = resp.headers();
ConfigManager configManager = ConfigManager.getInstance();
HttpResponseStatus status = resp.status();
int code = status.code();
if (code != 200) {
logger.debug("HTTP {}", status);
}
if (session != null) {
// session might be recycled if channel is closed already.
session.setCode(code);
headers.set(REQUEST_ID, session.getRequestId());
ACCESS_LOG.info(session.toString());
} else {
ACCESS_LOG.info("HTTP {}", status);
}
String allowedOrigin = configManager.getCorsAllowedOrigin();
String allowedMethods = configManager.getCorsAllowedMethods();
String allowedHeaders = configManager.getCorsAllowedHeaders();
if (allowedOrigin != null
&& !allowedOrigin.isEmpty()
&& !headers.contains(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN)) {
headers.set(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN, allowedOrigin);
}
if (allowedMethods != null
&& !allowedMethods.isEmpty()
&& !headers.contains(HttpHeaderNames.ACCESS_CONTROL_ALLOW_METHODS)) {
headers.set(HttpHeaderNames.ACCESS_CONTROL_ALLOW_METHODS, allowedMethods);
}
if (allowedHeaders != null
&& !allowedHeaders.isEmpty()
&& !headers.contains(HttpHeaderNames.ACCESS_CONTROL_ALLOW_HEADERS)) {
headers.set(HttpHeaderNames.ACCESS_CONTROL_ALLOW_HEADERS, allowedHeaders);
}
if (noCache) {
// Add cache-control headers to avoid browser cache response
headers.set("Pragma", "no-cache");
headers.set("Cache-Control", "no-cache; no-store, must-revalidate, private");
headers.set("Expires", "Thu, 01 Jan 1970 00:00:00 UTC");
}
if (resp instanceof FullHttpResponse) {
ByteBuf content = ((FullHttpResponse) resp).content();
HttpUtil.setContentLength(resp, content.readableBytes());
} else {
HttpUtil.setTransferEncodingChunked(resp, true);
}
if (!keepAlive || (code >= 400 && closeOnError)) {
headers.set(HttpHeaderNames.CONNECTION, HttpHeaderValues.CLOSE);
if (channel.isActive()) {
ChannelFuture f = channel.writeAndFlush(resp);
f.addListener(ChannelFutureListener.CLOSE);
} else {
logger.warn("Channel is closed by peer.");
}
} else {
headers.set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE);
if (channel.isActive()) {
channel.writeAndFlush(resp);
} else {
logger.warn("Channel is closed by peer.");
}
}
}
/**
* Closes the specified channel after all queued write requests are flushed.
*
* @param ch the channel to be closed
*/
public static void closeOnFlush(Channel ch) {
if (ch.isActive()) {
ch.writeAndFlush(Unpooled.EMPTY_BUFFER).addListener(ChannelFutureListener.CLOSE);
}
}
/**
* Returns the bytes for the specified {@code ByteBuf}.
*
* @param buf the {@code ByteBuf} to read
* @return the bytes for the specified {@code ByteBuf}
*/
public static byte[] getBytes(ByteBuf buf) {
if (buf.hasArray()) {
return buf.array();
}
byte[] ret = new byte[buf.readableBytes()];
int readerIndex = buf.readerIndex();
buf.getBytes(readerIndex, ret);
return ret;
}
/**
* Reads the parameter's value for the key from the uri.
*
* @param decoder the {@code QueryStringDecoder} parsed from uri
* @param key the parameter key
* @return the parameter's value
*/
public static String getRequiredParameter(QueryStringDecoder decoder, String key) {
List<String> param = decoder.parameters().get(key);
if (param != null && !param.isEmpty()) {
String ret = param.get(0);
if (!ret.isEmpty()) {
return ret;
}
}
throw new BadRequestException("The parameter " + key + " is required");
}
/**
* Reads the parameter's value for the key from the uri.
*
* @param decoder the {@code QueryStringDecoder} parsed from uri
* @param key the parameter key
* @param def the default value
* @return the parameter's value
*/
public static String getParameter(QueryStringDecoder decoder, String key, String def) {
List<String> param = decoder.parameters().get(key);
if (param != null && !param.isEmpty()) {
String ret = param.get(0);
if (ret.isEmpty()) {
return def;
}
return ret;
}
return def;
}
/**
* Read the parameter's integer value for the key from the uri.
*
* @param decoder the {@code QueryStringDecoder} parsed from uri
* @param key the parameter key
* @param def the default value
* @return the parameter's integer value
* @throws NumberFormatException exception is thrown when the parameter-value is not numeric.
*/
public static int getIntParameter(QueryStringDecoder decoder, String key, int def) {
String value = getParameter(decoder, key, null);
if (value == null || value.isEmpty()) {
return def;
}
return Integer.parseInt(value);
}
/**
* Parses form data and added to the {@link Input} object.
*
* @param data the form data
* @param input the {@link Input} object to be added to
*/
public static void addFormData(InterfaceHttpData data, Input input) {
if (data == null) {
return;
}
try {
String name = data.getName();
switch (data.getHttpDataType()) {
case Attribute:
Attribute attribute = (Attribute) data;
input.add(name, attribute.getValue().getBytes(StandardCharsets.UTF_8));
break;
case FileUpload:
FileUpload fileUpload = (FileUpload) data;
input.add(name, getBytes(fileUpload.getByteBuf()));
break;
default:
throw new IllegalArgumentException(
"Except form field, but got " + data.getHttpDataType());
}
} catch (IOException e) {
throw new AssertionError(e);
}
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/util/OpenSslKey.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.util;
/** A utility class converting OpenSSL private key to PKCS8 private key. */
public final class OpenSslKey {
private static final int[] RSA_ENCRYPTION = {1, 2, 840, 113549, 1, 1, 1};
private static final byte[] NULL_BYTES = {0x05, 0x00};
private OpenSslKey() {}
/**
* Convert OpenSSL private key to PKCS8 private key.
*
* @param keySpec OpenSSL key spec
* @return PKCS8 encoded private key
*/
public static byte[] convertPrivateKey(byte[] keySpec) {
if (keySpec == null) {
return null; // NOPMD
}
byte[] bytes = new byte[keySpec.length];
System.arraycopy(keySpec, 0, bytes, 0, keySpec.length);
byte[] octetBytes = encodeOctetString(bytes);
byte[] oidBytes = encodeOID(RSA_ENCRYPTION);
byte[] verBytes = {0x02, 0x01, 0x00};
byte[][] seqBytes = new byte[4][];
seqBytes[0] = oidBytes;
seqBytes[1] = NULL_BYTES;
seqBytes[2] = null;
byte[] oidSeqBytes = encodeSequence(seqBytes);
seqBytes[0] = verBytes;
seqBytes[1] = oidSeqBytes;
seqBytes[2] = octetBytes;
seqBytes[3] = null;
return encodeSequence(seqBytes);
}
private static byte[] encodeOID(int[] oid) {
if (oid == null) {
return null; // NOPMD
}
int oLen = 1;
for (int i = 2; i < oid.length; i++) {
oLen += getOIDCompLength(oid[i]);
}
int len = oLen + getLengthOfLengthField(oLen) + 1;
byte[] bytes = new byte[len];
bytes[0] = 0x06; // ASN Object ID
int offset = writeLengthField(bytes, oLen);
bytes[offset++] = (byte) (40 * oid[0] + oid[1]);
for (int i = 2; i < oid.length; i++) {
offset = writeOIDComp(oid[i], bytes, offset);
}
return bytes;
}
private static byte[] encodeOctetString(byte[] bytes) {
if (bytes == null) {
return null; // NOPMD
}
int oLen = bytes.length; // one byte for unused bits field
int len = oLen + getLengthOfLengthField(oLen) + 1;
byte[] newBytes = new byte[len];
newBytes[0] = 0x04;
int offset = writeLengthField(newBytes, oLen);
if (len - oLen != offset) {
return null; // NOPMD
}
System.arraycopy(bytes, 0, newBytes, offset, oLen);
return newBytes;
}
private static byte[] encodeSequence(byte[][] byteArrays) {
if (byteArrays == null) {
return null; // NOPMD
}
int oLen = 0;
for (byte[] b : byteArrays) {
if (b == null) {
break;
}
oLen += b.length;
}
int len = oLen + getLengthOfLengthField(oLen) + 1;
byte[] bytes = new byte[len];
bytes[0] = 0x10 | 0x20; // ASN sequence & constructed
int offset = writeLengthField(bytes, oLen);
if (len - oLen != offset) {
return null; // NOPMD
}
for (byte[] b : byteArrays) {
if (b == null) {
break;
}
System.arraycopy(b, 0, bytes, offset, b.length);
offset += b.length;
}
return bytes;
}
private static int writeLengthField(byte[] bytes, int len) {
if (len < 127) {
bytes[1] = (byte) len;
return 2;
}
int lenOfLenField = getLengthOfLengthField(len);
bytes[1] = (byte) ((lenOfLenField - 1) | 0x80); // record length of the length field
for (int i = lenOfLenField; i >= 2; i--) { // write the length
bytes[i] = (byte) (len >> ((lenOfLenField - i) * 8));
}
return lenOfLenField + 1;
}
private static int getLengthOfLengthField(int len) {
if (len <= 127) { // highest bit is zero, one byte is enough
return 1;
} else if (len <= 0xFF) { // highest bit is 1, two bytes in the form {0x81, 0xab}
return 2;
} else if (len <= 0xFFFF) { // three bytes in the form {0x82, 0xab, 0xcd}
return 3;
} else if (len <= 0xFFFFFF) { // four bytes in the form {0x83, 0xab, 0xcd, 0xef}
return 4;
} else { // five bytes in the form {0x84, 0xab, 0xcd, 0xef, 0xgh}
return 5;
}
}
private static int getOIDCompLength(int comp) {
if (comp <= 0x7F) {
return 1;
} else if (comp <= 0x3FFF) {
return 2;
} else if (comp <= 0x1FFFFF) {
return 3;
} else if (comp <= 0xFFFFFFF) {
return 4;
} else {
return 5;
}
}
private static int writeOIDComp(int comp, byte[] bytes, int offset) {
int len = getOIDCompLength(comp);
int off = offset;
for (int i = len - 1; i > 0; i--) {
bytes[off++] = (byte) ((comp >>> i * 7) | 0x80);
}
bytes[off++] = (byte) (comp & 0x7F);
return off;
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/util/ServerGroups.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.util;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.group.ChannelGroup;
import io.netty.channel.group.ChannelGroupFuture;
import io.netty.channel.group.DefaultChannelGroup;
import io.netty.util.concurrent.GlobalEventExecutor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
/** A class that holds all opened {@code ChannelGroup} and {@code EventLoopGroup}. */
public class ServerGroups {
static final Logger logger = LoggerFactory.getLogger(ServerGroups.class);
private ChannelGroup allChannels;
private EventLoopGroup serverGroup;
private EventLoopGroup childGroup;
private ConfigManager configManager;
/**
* Constructs a new {@code ServerGroups} instance.
*
* @param configManager the configuration
*/
public ServerGroups(ConfigManager configManager) {
this.configManager = configManager;
reset();
}
/** Resets the {@code ServerGroups}. */
public final void reset() {
allChannels = new DefaultChannelGroup(GlobalEventExecutor.INSTANCE);
serverGroup = Connector.newEventLoopGroup(2);
childGroup = Connector.newEventLoopGroup(configManager.getNettyThreads());
}
/**
* Shutdowns all opened channels and event loops.
*
* @param graceful {@code ture} for gracefully clean up the resources
*/
public void shutdown(boolean graceful) {
closeAllChannels(graceful);
List<EventLoopGroup> allEventLoopGroups = new ArrayList<>();
allEventLoopGroups.add(serverGroup);
allEventLoopGroups.add(childGroup);
for (EventLoopGroup group : allEventLoopGroups) {
if (graceful) {
group.shutdownGracefully();
} else {
group.shutdownGracefully(0, 0, TimeUnit.SECONDS);
}
}
if (graceful) {
for (EventLoopGroup group : allEventLoopGroups) {
try {
group.awaitTermination(60, TimeUnit.SECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
}
/**
* Returns the server event loop group.
*
* @return the server event loop group
*/
public EventLoopGroup getServerGroup() {
return serverGroup;
}
/**
* Returns the client event loop group.
*
* @return the client event loop group
*/
public EventLoopGroup getChildGroup() {
return childGroup;
}
/**
* Registers a channel to be tracked.
*
* @param channel a channel to be tracked
*/
public void registerChannel(Channel channel) {
allChannels.add(channel);
}
private void closeAllChannels(boolean graceful) {
ChannelGroupFuture future = allChannels.close();
// if this is a graceful shutdown, log any channel closing failures. if this isn't a
// graceful shutdown, ignore them.
if (graceful) {
try {
future.await(10, TimeUnit.SECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
if (!future.isSuccess()) {
for (ChannelFuture cf : future) {
if (!cf.isSuccess()) {
logger.info("Unable to close channel: " + cf.channel(), cf.cause());
}
}
}
}
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow/BadWorkflowException.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.workflow;
/** Thrown when a workflow is invalid or could not be loaded. */
public class BadWorkflowException extends Exception {
static final long serialVersionUID = 1L;
/**
* Constructs an {@code BadWorkflowException} with the specified detail message.
*
* @param message The detail message (which is saved for later retrieval by the {@link
* #getMessage()} method)
*/
public BadWorkflowException(String message) {
super(message);
}
/**
* Constructs an {@code BadWorkflowException} with the specified detail message and cause.
*
* <p>Note that the detail message associated with {@code cause} is <i>not</i> automatically
* incorporated into this exception's detail message.
*
* @param message The detail message (which is saved for later retrieval by the {@link
* #getMessage()} method)
* @param cause The cause (which is saved for later retrieval by the {@link #getCause()}
* method). (A null value is permitted, and indicates that the cause is nonexistent or
* unknown.)
*/
public BadWorkflowException(String message, Throwable cause) {
super(message, cause);
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow/Workflow.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.workflow;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.serving.wlm.WorkLoadManager;
import ai.djl.serving.wlm.WorkerPoolConfig;
import ai.djl.serving.workflow.WorkflowExpression.Item;
import ai.djl.serving.workflow.WorkflowExpression.Item.ItemType;
import ai.djl.serving.workflow.function.AdapterWorkflowFunction;
import ai.djl.serving.workflow.function.EnsembleMerge;
import ai.djl.serving.workflow.function.FunctionsApply;
import ai.djl.serving.workflow.function.IdentityWF;
import ai.djl.serving.workflow.function.WlmWorkflowFunction;
import ai.djl.serving.workflow.function.WorkflowFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Supplier;
import java.util.stream.Collectors;
/** A flow of executing {@link ai.djl.Model}s and custom functions. */
public class Workflow implements AutoCloseable {
private static final Logger logger = LoggerFactory.getLogger(Workflow.class);
public static final String IN = "in";
public static final String OUT = "out";
private static final Map<String, Supplier<WorkflowFunction>> BUILT_INS =
new ConcurrentHashMap<>();
static {
BUILT_INS.put(IdentityWF.NAME, IdentityWF::new);
BUILT_INS.put(EnsembleMerge.NAME, EnsembleMerge::new);
BUILT_INS.put(FunctionsApply.NAME, FunctionsApply::new);
BUILT_INS.put(AdapterWorkflowFunction.NAME, AdapterWorkflowFunction::new);
}
String name;
String version;
Map<String, WorkerPoolConfig<Input, Output>> wpcs;
Map<String, WorkflowExpression> expressions;
Map<String, WorkflowFunction> funcs;
Map<String, Map<String, Object>> configs;
private boolean prepared;
/**
* Constructs a workflow containing only a single workerPoolConfig.
*
* @param wpc the workerPoolConfig for the workflow
*/
public Workflow(WorkerPoolConfig<Input, Output> wpc) {
String modelName = "model";
this.name = wpc.getId();
this.version = wpc.getVersion();
wpcs = Collections.singletonMap(modelName, wpc);
expressions =
Collections.singletonMap(
OUT, new WorkflowExpression(new Item(modelName), new Item(IN)));
funcs = new ConcurrentHashMap<>();
configs = Collections.emptyMap();
}
/**
* Constructs a workflow.
*
* @param name workflow name
* @param version workflow version
* @param wpcs a map of executableNames for a wpc (how it is referred to in the {@link
* WorkflowExpression}s to model
* @param expressions a map of names to refer to an expression to the expression
* @param configs the configuration objects
* @param funcs the custom functions used in the workflow
*/
public Workflow(
String name,
String version,
Map<String, WorkerPoolConfig<Input, Output>> wpcs,
Map<String, WorkflowExpression> expressions,
Map<String, Map<String, Object>> configs,
Map<String, WorkflowFunction> funcs) {
this.name = name;
this.version = version;
this.wpcs = wpcs;
this.expressions = expressions;
this.funcs = funcs;
this.configs = configs;
}
/**
* Returns the {@link WorkerPoolConfig}s used in the workflow.
*
* @return the wpcs used in the workflow
*/
public Collection<WorkerPoolConfig<Input, Output>> getWpcs() {
return wpcs.values();
}
/**
* Returns the wpc map in the workflow.
*
* @return the wpc map in the workflow
*/
public Map<String, WorkerPoolConfig<Input, Output>> getWpcMap() {
return wpcs;
}
/**
* Prepares this workflow for use.
*
* <p>This is idempotent and can be safely re-called if this is already prepared. It should
* re-call to ensure preparedness.
*
* @param wlm the wlm to prepare with
*/
public void prepare(WorkLoadManager wlm) {
if (prepared) {
return;
}
// Populate local builtin funcs from global BUILT_INS
// TODO Avoid creating unused built-ins
for (Map.Entry<String, Supplier<WorkflowFunction>> builtIn : BUILT_INS.entrySet()) {
funcs.computeIfAbsent(builtIn.getKey(), n -> builtIn.getValue().get());
}
// Prepare WorkflowFunctions
for (WorkflowFunction f : funcs.values()) {
f.prepare(wlm, configs);
}
prepared = true;
}
/**
* Executes a workflow with an input.
*
* @param wlm the wlm to run the workflow with
* @param input the input
* @return a future of the result of the execution
*/
public CompletableFuture<Output> execute(WorkLoadManager wlm, Input input) {
logger.trace("Beginning execution of workflow: {}", name);
WorkflowExecutor ex = new WorkflowExecutor(wlm, input);
return ex.execute(OUT)
.thenApply(
i -> {
logger.trace("Ending execution of workflow: {}", name);
if (i.getItemType() != ItemType.INPUT) {
throw new IllegalArgumentException(
"The workflow did not return an output. Instead it returned"
+ " an "
+ i.getItemType());
}
return (Output) i.getInput();
});
}
/**
* Returns the workflow name.
*
* @return the workflow name
*/
public String getName() {
return name;
}
/**
* Returns the workflow version.
*
* @return the workflow version
*/
public String getVersion() {
return version;
}
/** {@inheritDoc} */
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof Workflow)) {
return false;
}
Workflow p = (Workflow) o;
return name.equals(p.getName()) && version.equals(p.getVersion());
}
/** {@inheritDoc} */
@Override
public int hashCode() {
return Objects.hash(name, getVersion());
}
/** {@inheritDoc} */
@Override
public String toString() {
if (version != null) {
return name + ':' + version;
}
return name;
}
/** {@inheritDoc} */
@Override
public void close() {
for (WorkerPoolConfig<Input, Output> wpc : getWpcs()) {
wpc.close();
}
for (WorkflowFunction f : funcs.values()) {
f.close();
}
}
/** An executor is a session for a running {@link Workflow}. */
public final class WorkflowExecutor {
private WorkLoadManager wlm;
private Map<String, CompletableFuture<Item>> vars;
private Set<String> targetStack;
private WorkflowExecutor(WorkLoadManager wlm, Input input) {
this.wlm = wlm;
// Construct variable map to contain each expression and the input
vars = new ConcurrentHashMap<>(expressions.size() + 1);
vars.put(IN, CompletableFuture.completedFuture(new Item(input)));
targetStack = new HashSet<>();
}
/**
* Returns the {@link WorkLoadManager} used by the {@link WorkflowExecutor}.
*
* @return the {@link WorkLoadManager} used by the {@link WorkflowExecutor}
*/
public WorkLoadManager getWlm() {
return wlm;
}
/**
* Uses the execute to compute a local value or target.
*
* <p>These values can be found as the keys in the "workflow" object.
*
* @param target the target to compute
* @return a future that contains the target value
*/
public CompletableFuture<Item> execute(String target) {
if (vars.containsKey(target)) {
return vars.get(target);
}
// Use targetStack, the set of targets in the "call stack" to detect cycles
if (targetStack.contains(target)) {
// If a target is executed but already in the stack, there must be a cycle
throw new IllegalStateException(
"Your workflow contains a cycle with target: " + target);
}
targetStack.add(target);
WorkflowExpression expr = expressions.get(target);
if (expr == null) {
throw new IllegalArgumentException(
"Expected to find variable but it is not defined: " + target);
}
CompletableFuture<Item> result = executeExpression(expr);
vars.put(target, result);
return result.whenComplete(
(o, e) -> {
if (e != null) {
throw new WorkflowExecutionException(
"Failed to compute workflow target: " + target, e);
}
targetStack.remove(target);
logger.trace("Workflow computed target {} with value:\n{}", target, o);
});
}
/**
* Computes the result of a {@link WorkflowExpression}.
*
* @param expr the expression to compute
* @return the computed value
*/
public CompletableFuture<Item> executeExpression(WorkflowExpression expr) {
WorkflowFunction workflowFunction = getExecutable(expr.getExecutableName());
List<WorkflowArgument> args =
expr.getExecutableArgs().stream()
.map(arg -> new WorkflowArgument(this, arg))
.collect(Collectors.toList());
return workflowFunction.run(this, args);
}
/**
* Returns the executable (model, function, or built-in) with a given name.
*
* @param arg the workflow argument containing the name
* @return the function to execute the found executable
*/
public WorkflowFunction getExecutable(WorkflowArgument arg) {
return getExecutable(arg.getItem().getString());
}
/**
* Returns the executable (model, function, or built-in) with a given name.
*
* @param name the executable name
* @return the function to execute the found executable
*/
public WorkflowFunction getExecutable(String name) {
WorkerPoolConfig<Input, Output> wpc = wpcs.get(name);
if (wpc != null) {
return new WlmWorkflowFunction(wpc);
}
if (funcs.containsKey(name)) {
return funcs.get(name);
}
throw new IllegalArgumentException("Could not find find model or function: " + name);
}
/**
* Returns the configuration with the given name.
*
* @param name the configuration name
* @return the configuration
*/
public Map<String, Object> getConfig(WorkflowArgument name) {
return getConfig(name.getItem().getString());
}
/**
* Returns the configuration with the given name.
*
* @param name the configuration name
* @return the configuration
*/
public Map<String, Object> getConfig(String name) {
return configs.get(name);
}
}
/** An argument that is passed to a {@link WorkflowFunction}. */
public static class WorkflowArgument {
private WorkflowExecutor executor;
private Item item;
/**
* Constructs a {@link WorkflowArgument}.
*
* @param executor the executor associated with the argument
* @param item the argument item
*/
public WorkflowArgument(WorkflowExecutor executor, Item item) {
this.executor = executor;
this.item = item;
}
/**
* Returns the item (either {@link String} or {@link WorkflowExpression}).
*
* @return the item (either {@link String} or {@link WorkflowExpression})
*/
public Item getItem() {
return item;
}
/**
* Evaluates the argument as a target reference (if string) or function call (if
* expression).
*
* @return the result of evaluating the argument
*/
public CompletableFuture<Item> evaluate() {
switch (item.getItemType()) {
case STRING:
return executor.execute(item.getString());
case EXPRESSION:
return executor.executeExpression(item.getExpression());
case INPUT:
return CompletableFuture.completedFuture(item);
default:
throw new IllegalStateException(
"Found unexpected item type in workflow evaluate");
}
}
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow/WorkflowDefinition.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.workflow;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.repository.FilenameUtils;
import ai.djl.serving.util.MutableClassLoader;
import ai.djl.serving.wlm.ModelInfo;
import ai.djl.serving.wlm.WorkerPoolConfig;
import ai.djl.serving.workflow.WorkflowExpression.Item;
import ai.djl.serving.workflow.function.WorkflowFunction;
import ai.djl.util.ClassLoaderUtils;
import ai.djl.util.JsonUtils;
import ai.djl.util.Utils;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonDeserializationContext;
import com.google.gson.JsonDeserializer;
import com.google.gson.JsonElement;
import com.google.gson.JsonParseException;
import com.google.gson.annotations.SerializedName;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringReader;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
/**
* This class is for parsing the JSON or YAML definition for a {@link Workflow}.
*
* <p>It can then be converted into a {@link Workflow} using {@link #toWorkflow()}.
*/
public class WorkflowDefinition {
String name;
String version;
String baseUri;
Map<String, ModelInfo<Input, Output>> models;
@SerializedName("workflow")
Map<String, WorkflowExpression> expressions;
@SerializedName("functions")
Map<String, String> funcs;
@SerializedName("configs")
Map<String, Map<String, Object>> configs;
public static final Gson GSON =
JsonUtils.builder()
.registerTypeAdapter(ModelInfo.class, new ModelDefinitionDeserializer())
.registerTypeAdapter(WorkflowExpression.class, new ExpressionDeserializer())
.registerTypeAdapter(Item.class, new ExpressionItemDeserializer())
.create();
/**
* Parses a new {@link WorkflowDefinition} from a file path.
*
* @param path the path to parse the file from
* @return the parsed {@link WorkflowDefinition}
* @throws IOException if it fails to load the file for parsing
*/
public static WorkflowDefinition parse(Path path) throws IOException {
return parse(null, path.toUri());
}
/**
* Parses a new {@link WorkflowDefinition} from an input stream.
*
* @param name the workflow name (null for no name)
* @param uri the uri of the file
* @return the parsed {@link WorkflowDefinition}
* @throws IOException if read from uri failed
*/
public static WorkflowDefinition parse(String name, URI uri) throws IOException {
return parse(name, uri, new ConcurrentHashMap<>());
}
static WorkflowDefinition parse(String name, URI uri, Map<String, String> templateReplacements)
throws IOException {
String type = FilenameUtils.getFileExtension(Objects.requireNonNull(uri.toString()));
// Default model_dir template replacement
if (templateReplacements == null) {
templateReplacements = new ConcurrentHashMap<>();
}
templateReplacements.put("model_dir", getWorkflowDir(uri.toString()));
try (InputStream is = Utils.openUrl(uri.toURL());
Reader reader = new InputStreamReader(is, StandardCharsets.UTF_8)) {
WorkflowDefinition wd = parse(type, reader, templateReplacements);
if (name != null) {
wd.name = name;
}
if (wd.baseUri == null) {
wd.baseUri = uri.toString();
}
return wd;
}
}
private static WorkflowDefinition parse(
String type, Reader input, Map<String, String> templateReplacements) {
if (templateReplacements != null) {
String updatedInput =
new BufferedReader(input)
.lines()
.map(
l -> {
for (Entry<String, String> replacement :
templateReplacements.entrySet()) {
l =
l.replace(
"{" + replacement.getKey() + "}",
replacement.getValue());
}
return l;
})
.collect(Collectors.joining("\n"));
input = new StringReader(updatedInput);
}
if ("yml".equalsIgnoreCase(type) || "yaml".equalsIgnoreCase(type)) {
try {
ClassLoader cl = ClassLoaderUtils.getContextClassLoader();
Class<?> clazz = Class.forName("org.yaml.snakeyaml.Yaml", true, cl);
Constructor<?> constructor = clazz.getConstructor();
Method method = clazz.getMethod("load", Reader.class);
Object obj = constructor.newInstance();
Object yaml = method.invoke(obj, input);
String asJson = GSON.toJson(yaml);
return GSON.fromJson(asJson, WorkflowDefinition.class);
} catch (ReflectiveOperationException e) {
throw new IllegalArgumentException(
"Yaml parsing is not supported. In order to support parsing Yaml files, the"
+ " dependency snakeyaml is required. Please add"
+ " 'org.yaml.snakeyaml.Yaml' to your classpath, pom.xml, or"
+ " build.gradle.",
e);
}
} else if ("json".equalsIgnoreCase(type)) {
return GSON.fromJson(input, WorkflowDefinition.class);
} else {
throw new IllegalArgumentException("Unexpected file type: " + type);
}
}
/**
* Returns the full workflow url if the url points to a workflow definition file.
*
* @param link the workflow url
* @return the workflow URL
*/
public static URI toWorkflowUri(String link) {
if (link.startsWith("http") && link.endsWith(".json")
|| link.endsWith(".yml")
|| link.endsWith(".yaml")) {
return URI.create(link);
}
URI uri = URI.create(link);
String scheme = uri.getScheme();
if (scheme != null && !"file".equals(scheme)) {
return null;
}
String uriPath = uri.getPath();
if (uriPath == null) {
uriPath = uri.getSchemeSpecificPart();
}
if (uriPath.startsWith("/") && System.getProperty("os.name").startsWith("Win")) {
uriPath = uriPath.substring(1);
}
Path path = Paths.get(uriPath);
if (!Files.exists(path)) {
return null;
}
if (uriPath.endsWith(".json") || uriPath.endsWith(".yml") || uriPath.endsWith(".yaml")) {
return path.toUri();
}
if (Files.isDirectory(path)) {
Path file = path.resolve("workflow.json");
if (Files.isRegularFile(file)) {
return file.toUri();
}
file = path.resolve("workflow.yml");
if (Files.isRegularFile(file)) {
return file.toUri();
}
file = path.resolve("workflow.yaml");
if (Files.isRegularFile(file)) {
return file.toUri();
}
}
return null;
}
/**
* Converts the {@link WorkflowDefinition} into a workflow.
*
* @return a new {@link Workflow} matching this definition
* @throws BadWorkflowException if the workflow could not be parsed successfully
*/
public Workflow toWorkflow() throws BadWorkflowException {
String workflowDir = getWorkflowDir(baseUri);
if (models != null) {
for (Entry<String, ModelInfo<Input, Output>> emd : models.entrySet()) {
ModelInfo<Input, Output> md = emd.getValue();
md.setId(emd.getKey());
}
}
Map<String, WorkflowFunction> loadedFunctions = new ConcurrentHashMap<>();
if (funcs != null) {
String uriPath = URI.create(workflowDir).getPath();
if (uriPath.startsWith("/") && System.getProperty("os.name").startsWith("Win")) {
uriPath = uriPath.substring(1);
}
Path path = Paths.get(uriPath).resolve("libs");
Path classDir = path.resolve("classes");
if (Files.exists(classDir)) {
ClassLoaderUtils.compileJavaClass(path);
}
ClassLoader mcl = MutableClassLoader.getInstance();
ClassLoader ccl = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(mcl);
for (Entry<String, String> f : funcs.entrySet()) {
WorkflowFunction func =
ClassLoaderUtils.findImplementation(
path, WorkflowFunction.class, f.getValue());
if (func == null) {
throw new BadWorkflowException("Could not load function " + f.getKey());
}
loadedFunctions.put(f.getKey(), func);
}
} finally {
Thread.currentThread().setContextClassLoader(ccl);
}
}
Map<String, WorkerPoolConfig<Input, Output>> wpcs = new ConcurrentHashMap<>(models);
wpcs.putAll(models);
return new Workflow(name, version, wpcs, expressions, configs, loadedFunctions);
}
private static String getWorkflowDir(String uri) {
int pos = uri.lastIndexOf('/');
return uri.substring(0, pos);
}
private static final class ModelDefinitionDeserializer
implements JsonDeserializer<ModelInfo<Input, Output>> {
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override
public ModelInfo<Input, Output> deserialize(
JsonElement json, Type typeOfT, JsonDeserializationContext context) {
if (json.isJsonObject()) {
ModelInfo<Input, Output> model = JsonUtils.GSON.fromJson(json, ModelInfo.class);
model.hasInputOutputClass(Input.class, Output.class);
return model;
} else if (json.isJsonPrimitive()) {
return new ModelInfo<>(json.getAsString());
}
throw new JsonParseException(
"Unexpected type of model definition: should be Criteria object or URI string");
}
}
private static final class ExpressionDeserializer
implements JsonDeserializer<WorkflowExpression> {
/** {@inheritDoc} */
@Override
public WorkflowExpression deserialize(
JsonElement json, Type typeOfT, JsonDeserializationContext context) {
JsonArray array = json.getAsJsonArray();
List<Item> args = new ArrayList<>(array.size());
for (JsonElement el : array) {
args.add(context.deserialize(el, Item.class));
}
return new WorkflowExpression(args);
}
}
private static final class ExpressionItemDeserializer implements JsonDeserializer<Item> {
/** {@inheritDoc} */
@Override
public Item deserialize(
JsonElement json, Type typeOfT, JsonDeserializationContext context) {
if (json.isJsonArray()) {
return new Item(
(WorkflowExpression) context.deserialize(json, WorkflowExpression.class));
} else if (json.isJsonPrimitive()) {
return new Item(json.getAsString());
} else {
throw new JsonParseException("Unexpected JSON element in expression item");
}
}
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow/WorkflowExecutionException.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.workflow;
/** Thrown when a workflow could not be executed. */
public class WorkflowExecutionException extends RuntimeException {
static final long serialVersionUID = 1L;
/**
* Constructs an {@code WorkflowExecutionException} with the specified detail message.
*
* @param message The detail message (which is saved for later retrieval by the {@link
* #getMessage()} method)
*/
public WorkflowExecutionException(String message) {
super(message);
}
/**
* Constructs an {@code WorkflowExecutionException} with the specified detail message and cause.
*
* <p>Note that the detail message associated with {@code cause} is <i>not</i> automatically
* incorporated into this exception's detail message.
*
* @param message The detail message (which is saved for later retrieval by the {@link
* #getMessage()} method)
* @param cause The cause (which is saved for later retrieval by the {@link #getCause()}
* method). (A null value is permitted, and indicates that the cause is nonexistent or
* unknown.)
*/
public WorkflowExecutionException(String message, Throwable cause) {
super(message, cause);
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow/WorkflowExpression.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.workflow;
import ai.djl.modality.Input;
import java.util.Arrays;
import java.util.List;
/** An expression defining a local value in a {@link Workflow}. */
public class WorkflowExpression {
private List<Item> args;
/**
* Constructs a {@link WorkflowExpression}.
*
* @param args the args to pass to the executable. Can refer to other expression value names to
* get the outputs of those expressions or the special name "in" to refer to the workflow
* input
*/
public WorkflowExpression(Item... args) {
this(Arrays.asList(args));
}
/**
* Constructs a {@link WorkflowExpression}.
*
* @param args the args to pass to the executable. Can refer to other expression value names to
* get the outputs of those expressions or the special name "in" to refer to the workflow
* input
*/
public WorkflowExpression(List<Item> args) {
this.args = args;
}
/**
* Returns the executable name (the first argument).
*
* @return the executable name or throws an exception if it is not an executable name
*/
public String getExecutableName() {
return args.get(0).getString();
}
/**
* Returns the arguments assuming the expression is an executable (all but the first arguments).
*
* @return the arguments assuming the expression is an executable (all but the first arguments)
*/
public List<Item> getExecutableArgs() {
return args.subList(1, args.size());
}
/**
* Returns the expression args.
*
* @return the expression args
*/
public List<Item> getArgs() {
return args;
}
/**
* An item in the expression which contains either a string or another {@link
* WorkflowExpression}.
*/
public static class Item {
private String string;
private WorkflowExpression expression;
private Input input;
private ItemType itemType;
/**
* Constructs an {@link Item} containing a string.
*
* @param string the string
*/
public Item(String string) {
this.string = string;
itemType = ItemType.STRING;
}
/**
* Constructs an {@link Item} containing a {@link WorkflowExpression}.
*
* @param expression the expression
*/
public Item(WorkflowExpression expression) {
this.expression = expression;
itemType = ItemType.EXPRESSION;
}
/**
* Constructs an {@link Item} containing an {@link Input}.
*
* @param input the input
*/
public Item(Input input) {
this.input = input;
itemType = ItemType.INPUT;
}
/**
* Returns the string value or throws an exception if it does not contain a string.
*
* @return the string value or throws an exception if it does not contain a string
*/
public String getString() {
if (itemType != ItemType.STRING) {
throw new IllegalArgumentException(
"Expected a string workflow item, but found " + itemType);
}
return string;
}
/**
* Returns the expression value or throws an exception if it does not contain an expression.
*
* @return the expression value or throws an exception if it does not contain an expression
*/
public WorkflowExpression getExpression() {
if (itemType != ItemType.EXPRESSION) {
throw new IllegalArgumentException(
"Expected an expression workflow item, but found " + itemType);
}
return expression;
}
/**
* Returns the input value or throws an exception if it does not contain an input.
*
* @return the input value or throws an exception if it does not contain an input
*/
public Input getInput() {
if (itemType != ItemType.INPUT) {
throw new IllegalArgumentException(
"Expected an input workflow item, but found " + itemType);
}
return input;
}
/**
* Returns the type of item.
*
* @return the type of item
*/
public ItemType getItemType() {
return itemType;
}
/**
* Returns the expression value as a list.
*
* @return the expression value as a list.
*/
public List<Item> getList() {
if (itemType != ItemType.EXPRESSION) {
throw new IllegalArgumentException(
"Expected an expression/list workflow item, but found " + itemType);
}
return expression.getArgs();
}
/** The type of contents stored in the item. */
public enum ItemType {
STRING,
EXPRESSION,
INPUT
}
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow/WorkflowTemplates.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.workflow;
import ai.djl.util.ClassLoaderUtils;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/** A class for managing and using {@link WorkflowDefinition} templates. */
public final class WorkflowTemplates {
private static final Map<String, URI> TEMPLATES = new ConcurrentHashMap<>();
private WorkflowTemplates() {}
/**
* Registers a new workflow template.
*
* @param name the template name
* @param template the template location
*/
public static void register(String name, URI template) {
TEMPLATES.put(name, template);
}
/**
* Constructs a {@link WorkflowDefinition} using a registered template.
*
* @param templateName the template name
* @param templateReplacements a map of replacements to be applied to the template
* @return the new {@link WorkflowDefinition} based off the template
* @throws IOException if it fails to load the template file for parsing
*/
public static WorkflowDefinition template(
String templateName, Map<String, String> templateReplacements) throws IOException {
URI uri = TEMPLATES.get(templateName);
if (uri == null) {
URL fromResource =
ClassLoaderUtils.getResource("workflowTemplates/" + templateName + ".json");
if (fromResource != null) {
try {
uri = fromResource.toURI();
} catch (URISyntaxException ignored) {
}
}
}
if (uri == null) {
throw new IllegalArgumentException(
"The workflow template " + templateName + " could not be found");
}
return WorkflowDefinition.parse(null, uri, templateReplacements);
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow/package-info.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/** Contains the {@link ai.djl.serving.workflow.Workflow} for managing a model execution flow. */
package ai.djl.serving.workflow;
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow/function/AdapterWorkflowFunction.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.workflow.function;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.serving.wlm.Adapter;
import ai.djl.serving.wlm.WorkLoadManager;
import ai.djl.serving.wlm.WorkerPool;
import ai.djl.serving.workflow.Workflow.WorkflowArgument;
import ai.djl.serving.workflow.Workflow.WorkflowExecutor;
import ai.djl.serving.workflow.WorkflowExpression;
import ai.djl.serving.workflow.WorkflowExpression.Item;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
/**
* Workflow function "adapter" applies an adapted model to an input.
*
* <p>To use this workflow function, you must pre-specify the adapted functions in the configs. In
* the configs, create an object "adapters" with keys as reference names and values as objects. The
* adapter reference objects should have the following properties:
*
* <ul>
* <li>model - the model name
* <li>name - the adapter name
* <li>url - the adapter url
* <li>options (optional) - an object containing additional string options
* </ul>
*
* <p>To call this workflow function, it requires two arguments. The first is the adapter config
* reference name (determining the model and adapter to use). The second argument is the input.
*
* <p>To see an example of this workflow function, see the <a
* href="https://github.com/deepjavalibrary/djl-serving/tree/master/serving/src/test/resources/adapterWorkflows/w1/workflow.json">test
* example</a>.
*/
public class AdapterWorkflowFunction extends WorkflowFunction {
public static final String NAME = "adapter";
private WorkLoadManager wlm;
private Map<String, AdapterReference> adapters;
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public void prepare(WorkLoadManager wlm, Map<String, Map<String, Object>> configs) {
this.wlm = wlm;
this.adapters = new ConcurrentHashMap<>();
// Add adapters from configurations
if (configs != null && configs.containsKey("adapters")) {
for (Map.Entry<String, Object> entry : configs.get("adapters").entrySet()) {
Map<String, Object> config = (Map<String, Object>) entry.getValue();
String modelName = (String) config.get("model");
String adapterName = entry.getKey();
String src = (String) config.get("src");
Map<String, String> options = new ConcurrentHashMap<>();
if (config.containsKey("options") && config.get("options") instanceof Map) {
for (Map.Entry<String, Object> option :
((Map<String, Object>) config.get("options")).entrySet()) {
if (option.getValue() instanceof String) {
options.put(option.getKey(), (String) option.getValue());
}
}
}
WorkerPool<?, ?> wp = wlm.getWorkerPoolById(modelName);
Adapter adapter = Adapter.newInstance(wp.getWpc(), adapterName, src, options);
adapters.put(adapterName, new AdapterReference(modelName, adapter));
}
}
// Register adapters
for (AdapterReference adapter : adapters.values()) {
adapter.adapter.register(wlm.getWorkerPoolById(adapter.modelName));
}
}
/** {@inheritDoc} */
@Override
public void close() {
for (AdapterReference adapter : adapters.values()) {
WorkerPool<Input, Output> wp = wlm.getWorkerPoolById(adapter.modelName);
if (wp != null) {
Adapter.unregister(wp, adapter.adapter.getName());
}
}
}
/** {@inheritDoc} */
@Override
public CompletableFuture<Item> run(WorkflowExecutor executor, List<WorkflowArgument> args) {
if (args.size() != 2) {
throw new IllegalArgumentException(
"The adapter workflow function should have two args, but has " + args.size());
}
String adapterReference = args.get(0).getItem().getString();
if (!adapters.containsKey(adapterReference)) {
throw new IllegalArgumentException(
"The adapter function was called with unknown adapter " + adapterReference);
}
AdapterReference adapter = adapters.get(adapterReference);
return args.get(1)
.evaluate()
.thenComposeAsync(
evaluatedArg -> {
Input input = evaluatedArg.getInput();
input.add("adapter", adapter.adapter.getName());
return executor.executeExpression(
new WorkflowExpression(
new Item(adapter.modelName), new Item(input)));
});
}
private static final class AdapterReference {
private String modelName;
private Adapter adapter;
private AdapterReference(String modelName, Adapter adapter) {
this.modelName = modelName;
this.adapter = adapter;
}
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow/function/EnsembleMerge.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.workflow.function;
import ai.djl.modality.Output;
import ai.djl.ndarray.BytesSupplier;
import ai.djl.serving.workflow.Workflow.WorkflowArgument;
import ai.djl.serving.workflow.Workflow.WorkflowExecutor;
import ai.djl.serving.workflow.WorkflowExpression.Item;
import ai.djl.translate.Ensembleable;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
/**
* Workflow function "ensemble" accepts a list of {@link Ensembleable} outputs and merges them using
* {@link Ensembleable#ensemble(List)}.
*/
public class EnsembleMerge extends WorkflowFunction {
public static final String NAME = "ensemble";
/** {@inheritDoc} */
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public CompletableFuture<Item> run(WorkflowExecutor executor, List<WorkflowArgument> args) {
if (args.size() != 1) {
throw new IllegalArgumentException(
"Expected one arguments, the list of items to ensemble");
}
return CompletableFuture.supplyAsync(
() -> {
List<Ensembleable> outputs =
args.get(0).evaluate().join().getList().stream()
.map(i -> (Ensembleable<?>) i.getInput().get(0))
.collect(Collectors.toList());
Ensembleable<?> ensembled = Ensembleable.ensemble(outputs);
Output output = new Output();
output.add((BytesSupplier) ensembled);
return new Item(output);
});
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow/function/FunctionsApply.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.workflow.function;
import ai.djl.serving.workflow.Workflow.WorkflowArgument;
import ai.djl.serving.workflow.Workflow.WorkflowExecutor;
import ai.djl.serving.workflow.WorkflowExpression;
import ai.djl.serving.workflow.WorkflowExpression.Item;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
/**
* Workflow function "functionsApply" accepts a list of functions and an input and applies each
* function to the input.
*
* <p>It returns a list of the results of applying each function to the input.
*/
public class FunctionsApply extends WorkflowFunction {
public static final String NAME = "functionsApply";
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public CompletableFuture<Item> run(WorkflowExecutor executor, List<WorkflowArgument> args) {
if (args.size() != 2) {
throw new IllegalArgumentException(
"Expected two arguments: the list of functions to run and the input, but found "
+ args.size());
}
List<Item> fns = args.get(0).getItem().getList();
Item input = args.get(1).getItem();
return CompletableFuture.supplyAsync(
() -> {
// Get classifications
CompletableFuture<Item>[] futures =
fns.stream()
.map(
fn ->
executor.executeExpression(
new WorkflowExpression(fn, input)))
.toArray(CompletableFuture[]::new);
CompletableFuture.allOf(futures);
List<Item> outputs =
Arrays.stream(futures)
.map(CompletableFuture::join)
.collect(Collectors.toList());
return new Item(new WorkflowExpression(outputs));
});
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow/function/IdentityWF.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.workflow.function;
import ai.djl.serving.workflow.Workflow;
import ai.djl.serving.workflow.WorkflowExpression.Item;
import java.util.List;
import java.util.concurrent.CompletableFuture;
/** Workflow function "id" accepts a single argument and returns the result of evaluating it. */
public class IdentityWF extends WorkflowFunction {
public static final String NAME = "id";
/** {@inheritDoc} */
@Override
public CompletableFuture<Item> run(
Workflow.WorkflowExecutor executor, List<Workflow.WorkflowArgument> args) {
if (args.size() != 1) {
throw new IllegalArgumentException("Expected one argument to id");
}
return evaluateArgs(args).thenApply(pa -> pa.get(0));
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow/function/WlmWorkflowFunction.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.workflow.function;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.serving.wlm.Job;
import ai.djl.serving.wlm.WorkerPoolConfig;
import ai.djl.serving.workflow.Workflow;
import ai.djl.serving.workflow.WorkflowExpression.Item;
import java.util.List;
import java.util.concurrent.CompletableFuture;
/**
* An internal {@link WorkflowFunction} that is used to execute a {@link WorkerPoolConfig}
* (typically a model) through the {@link ai.djl.serving.wlm.WorkLoadManager} in the workflow.
*/
public class WlmWorkflowFunction extends WorkflowFunction {
WorkerPoolConfig<Input, Output> workerPoolConfig;
/**
* Constructs a {@link WlmWorkflowFunction} with a given workerPoolConfig.
*
* @param wpc the workerPoolConfig to run
*/
public WlmWorkflowFunction(WorkerPoolConfig<Input, Output> wpc) {
this.workerPoolConfig = wpc;
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override
public CompletableFuture<Item> run(
Workflow.WorkflowExecutor executor, List<Workflow.WorkflowArgument> args) {
if (args.size() != 1) {
throw new IllegalArgumentException(
"The model or worker type "
+ workerPoolConfig.getId()
+ " should have one arg, but has "
+ args.size());
}
return evaluateArgs(args)
.thenCompose(
processedArgs ->
executor.getWlm()
.runJob(
new Job<>(
workerPoolConfig,
processedArgs.get(0).getInput()))
.thenApply(Item::new));
}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow/function/WorkflowFunction.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.workflow.function;
import ai.djl.serving.wlm.WorkLoadManager;
import ai.djl.serving.workflow.Workflow;
import ai.djl.serving.workflow.WorkflowExpression.Item;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
/**
* A lambda function that can be run within a {@link Workflow}.
*
* @see #run(Workflow.WorkflowExecutor, List)
*/
public abstract class WorkflowFunction implements AutoCloseable {
/**
* The lambda function that is run.
*
* @param executor an executor that can be used to run expressions or models
* @param args the list of function arguments
* @return a future containing the input
*/
public abstract CompletableFuture<Item> run(
Workflow.WorkflowExecutor executor, List<Workflow.WorkflowArgument> args);
/**
* A helper to evaluate all function arguments.
*
* @param args the arguments to evaluate
* @return a future with the list of evaluated arguments
*/
@SuppressWarnings("unchecked")
protected CompletableFuture<List<Item>> evaluateArgs(List<Workflow.WorkflowArgument> args) {
CompletableFuture<Item>[] processedArgs =
args.stream()
.map(Workflow.WorkflowArgument::evaluate)
.toArray(CompletableFuture[]::new);
return CompletableFuture.allOf(processedArgs)
.thenApply(
v ->
Arrays.stream(processedArgs)
.map(CompletableFuture::join)
.collect(Collectors.toList()));
}
/**
* Prepares this {@link WorkflowFunction}.
*
* @param wlm the wlm
* @param configs the workflow configs
*/
public void prepare(WorkLoadManager wlm, Map<String, Map<String, Object>> configs) {}
/** {@inheritDoc} */
@Override
public void close() {}
}
|
0
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow
|
java-sources/ai/djl/serving/serving/0.28.0/ai/djl/serving/workflow/function/package-info.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains the {@link ai.djl.serving.workflow.function.WorkflowFunction} and various built-in
* functions.
*/
package ai.djl.serving.workflow.function;
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/Adapter.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.inference.Predictor;
import ai.djl.serving.wlm.WorkerPoolConfig.ThreadConfig;
import ai.djl.serving.wlm.util.WorkerJob;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
/**
* An adapter is a modification producing a variation of a model that can be used during prediction.
*/
public abstract class Adapter {
protected String name;
protected String src;
protected Map<String, String> options;
/**
* Constructs an {@link Adapter}.
*
* @param name the adapter name
* @param src the adapter source
* @param options additional adapter options
*/
protected Adapter(String name, String src, Map<String, String> options) {
this.name = name;
this.src = src;
this.options = options;
}
/**
* Constructs a new {@link Adapter}.
*
* <p>After registration, you should call {@link #register(WorkerPool)}. This doesn't affect the
* worker pool itself.
*
* @param wpc the worker pool config for the new adapter
* @param name the adapter name
* @param src the adapter source
* @param options additional adapter options
* @return the new adapter
*/
public static Adapter newInstance(
WorkerPoolConfig<?, ?> wpc, String name, String src, Map<String, String> options) {
if (!(wpc instanceof ModelInfo)) {
String modelName = wpc.getId();
throw new IllegalArgumentException("The worker " + modelName + " is not a model");
}
// TODO Allow URL support
try {
URI uri = new URI(src);
String scheme = uri.getScheme();
if (scheme != null && !"file".equals(scheme)) {
throw new IllegalArgumentException("URL adapters are not currently supported");
}
} catch (URISyntaxException ignored) {
}
ModelInfo<?, ?> modelInfo = (ModelInfo<?, ?>) wpc;
// TODO Replace usage of class name with creating adapters by Engine.newPatch(name ,src)
if ("PyEngine".equals(modelInfo.getEngine().getClass().getSimpleName())) {
return new PyAdapter(name, src, options);
} else {
throw new IllegalArgumentException(
"Adapters are only currently supported for Python models");
}
}
/**
* Unregisters an adapter in a worker pool.
*
* <p>This unregisters it in the wpc for new threads and all existing threads.
*
* @param wp the worker pool to remove the adapter from
* @param adapterName the adapter name
* @param <I> the input type
* @param <O> the output type
*/
public static <I, O> void unregister(WorkerPool<I, O> wp, String adapterName) {
ModelInfo<I, O> wpc = (ModelInfo<I, O>) wp.getWpc();
Adapter adapter = wpc.unregisterAdapter(adapterName);
// TODO Support worker adapter scheduling rather than register/unregister on all workers
for (WorkerGroup<I, O> wg : wp.getWorkerGroups().values()) {
for (WorkerThread<I, O> t : wg.getWorkers()) {
t.addConfigJob(adapter.unregisterJob(wpc, t.getThreadType()));
}
}
}
/**
* Returns the adapter name.
*
* @return the adapter name
*/
public String getName() {
return name;
}
/**
* Returns the adapter src.
*
* @return the adapter src
*/
public String getSrc() {
return src;
}
/**
* Registers this adapter in a worker pool.
*
* <p>This registers it in the wpc for new threads and all existing threads.
*
* @param wp the worker pool to register this adapter in
* @param <I> the input type
* @param <O> the output type
*/
public <I, O> void register(WorkerPool<I, O> wp) {
ModelInfo<I, O> wpc = (ModelInfo<I, O>) wp.getWpc();
wpc.registerAdapter(this);
for (WorkerGroup<I, O> wg : wp.getWorkerGroups().values()) {
for (WorkerThread<I, O> t : wg.getWorkers()) {
t.addConfigJob(registerJob(wpc, t.getThreadType()));
}
}
}
/**
* Creates a {@link WorkerJob} to register this adapter in a {@link WorkerThread}.
*
* @param wpc the worker pool of the thread
* @param threadConfig the thread config to register
* @param <I> the input type
* @param <O> the output type
* @return the registration job
*/
public <I, O> WorkerJob<I, O> registerJob(
WorkerPoolConfig<I, O> wpc, ThreadConfig<I, O> threadConfig) {
ModelInfo<I, O>.ModelThread t = (ModelInfo<I, O>.ModelThread) threadConfig;
Job<I, O> job =
new Job<>(
wpc,
null,
in -> {
registerPredictor(t.getPredictor());
return null;
});
return new WorkerJob<>(job, new CompletableFuture<>());
}
/**
* Creates a {@link WorkerJob} to unregister this adapter from a {@link WorkerThread}.
*
* @param wpc the worker pool of the thread
* @param threadConfig the thread config to unregister
* @param <I> the input type
* @param <O> the output type
* @return the unregistration job
*/
public <I, O> WorkerJob<I, O> unregisterJob(
WorkerPoolConfig<I, O> wpc, ThreadConfig<I, O> threadConfig) {
ModelInfo<I, O>.ModelThread t = (ModelInfo<I, O>.ModelThread) threadConfig;
Job<I, O> job =
new Job<>(
wpc,
null,
in -> {
unregisterPredictor(t.getPredictor());
return null;
});
return new WorkerJob<>(job, new CompletableFuture<>());
}
protected abstract void registerPredictor(Predictor<?, ?> predictor);
protected abstract void unregisterPredictor(Predictor<?, ?> predictor);
}
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/BatchAggregator.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.metric.Dimension;
import ai.djl.metric.Metric;
import ai.djl.metric.Unit;
import ai.djl.serving.wlm.util.WorkerJob;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit;
/**
* abstract class for all BatchAggregators. A batch aggregator check working queue and combines
* multiple job into one batch. batches of jobs are used cause optimisations in separate engines.
*
* @author erik.bamberg@web.de
*/
abstract class BatchAggregator<I, O> {
private static final Logger MODEL_METRIC = LoggerFactory.getLogger("model_metric");
private Dimension dimension;
protected int batchSize;
protected long maxBatchDelayMicros;
protected List<WorkerJob<I, O>> wjs;
protected LinkedBlockingDeque<WorkerJob<I, O>> jobQueue;
/**
* Constructs a new {@code BbatchAggregator} instance.
*
* @param wpc the workerPoolConfig to use.
* @param jobQueue the job queue for polling data from.
*/
public BatchAggregator(
WorkerPoolConfig<I, O> wpc, LinkedBlockingDeque<WorkerJob<I, O>> jobQueue) {
this.dimension = new Dimension("Model", wpc.getId());
this.batchSize = wpc.getBatchSize();
this.maxBatchDelayMicros = wpc.getMaxBatchDelayMillis() * 1000L;
this.jobQueue = jobQueue;
wjs = new ArrayList<>();
}
/**
* Poll the queue and return a list of Input Objects for the model.
*
* @return list of input objects to pass to the model.
* @throws InterruptedException if thread gets interrupted while waiting for new data in the
* queue.
*/
public List<Job<I, O>> getRequest() throws InterruptedException {
wjs = pollBatch();
List<Job<I, O>> list = new ArrayList<>(wjs.size());
for (WorkerJob<I, O> wj : wjs) {
Job<I, O> job = wj.getJob();
long queueTime = job.getWaitingMicroSeconds();
Metric metric = new Metric("QueueTime", queueTime, Unit.MICROSECONDS, dimension);
MODEL_METRIC.info("{}", metric);
list.add(job);
}
int size = list.size();
if (size > 1) {
MODEL_METRIC.info(
"{}", new Metric("DynamicBatchSize", size, Unit.COUNT_PER_ITEM, dimension));
}
return list;
}
/** Sends to response to all waiting clients. */
public void sendResponse() {
for (WorkerJob<I, O> wj : wjs) {
wj.getFuture().complete(wj.getJob().getOutput());
long latency = wj.getJob().getWaitingMicroSeconds();
Metric metric = new Metric("RequestLatency", latency, Unit.MICROSECONDS, dimension);
MODEL_METRIC.info("{}", metric);
}
wjs.clear();
}
/**
* Completes the job with an error.
*
* @param error the exception
*/
public void sendError(Throwable error) {
for (WorkerJob<I, O> wj : wjs) {
wj.getFuture().completeExceptionally(error);
}
wjs.clear();
}
/**
* Fills in the list with a batch of jobs.
*
* @return a list of jobs read by this batch interation.
* @throws InterruptedException if interrupted
*/
protected abstract List<WorkerJob<I, O>> pollBatch() throws InterruptedException;
/**
* Checks if this {@code BatchAggregator} and the thread can be shutdown or if this aggregator
* waits for more data.
*
* @return true if we can shutdown the thread. for example when max idle time exceeded in
* temporary batch aggregator.
*/
public abstract boolean isFinished();
protected void drainTo(List<WorkerJob<I, O>> list, long maxDelay) throws InterruptedException {
long begin = System.nanoTime();
jobQueue.drainTo(list, batchSize - 1);
int remain = batchSize - list.size();
for (int i = 0; i < remain; ++i) {
WorkerJob<I, O> wj = jobQueue.poll(maxDelay, TimeUnit.MICROSECONDS);
if (wj == null || wj.getJob() == null) {
break;
}
long end = System.nanoTime();
maxDelay -= (end - begin) / 1000;
begin = end;
list.add(wj);
if (maxDelay <= 0) {
break;
}
}
}
}
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/Job.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.translate.TranslateException;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
/** A class represents an inference job. */
public class Job<I, O> {
private WorkerPoolConfig<I, O> workerPoolConfig;
private I input;
private O output;
private long begin;
private JobFunction<I, O> runner;
/**
* Constructs a new {@code Job} instance.
*
* @param wpc the model to run the job
* @param input the input data
*/
public Job(WorkerPoolConfig<I, O> wpc, I input) {
this.workerPoolConfig = wpc;
this.input = input;
begin = System.nanoTime();
}
/**
* Constructs a new {@code Job} instance.
*
* @param wpc the model to run the job
* @param input the input data
* @param runner the function to run on worker
*/
public Job(WorkerPoolConfig<I, O> wpc, I input, JobFunction<I, O> runner) {
this(wpc, input);
this.runner = runner;
}
/**
* Runs a {@link JobFunction} on a batch of jobs and sets the result in their output.
*
* @param jobs the jobs to run and update
* @param f the function to run
* @param <I> the input type
* @param <O> the output type
* @throws TranslateException if the jobs fail to run
*/
public static <I, O> void runAll(List<Job<I, O>> jobs, JobFunction<I, O> f)
throws TranslateException {
List<O> out = f.apply(jobs.stream().map(Job::getInput).collect(Collectors.toList()));
if (out != null) {
for (int i = 0; i < out.size(); i++) {
jobs.get(i).setOutput(out.get(i));
}
}
}
/**
* Sets a {@link Job} output to a failure.
*
* @param job the job to set the output on
* @param code the failure code
* @param message the failure message
*/
public static void setFailOutput(Job<Input, Output> job, int code, String message) {
Output output = new Output();
output.setCode(code);
output.setMessage(message);
job.setOutput(output);
}
/**
* Returns the worker pool config that is associated with this job.
*
* @return the worker pool config that is associated with this job
*/
public WorkerPoolConfig<I, O> getWpc() {
return workerPoolConfig;
}
/**
* Returns the input data.
*
* @return the input data
*/
public I getInput() {
return input;
}
/**
* Returns the output data.
*
* @return the output data
*/
public O getOutput() {
return output;
}
/**
* Sets the output of the job.
*
* @param output the job output
*/
public void setOutput(O output) {
this.output = output;
}
/**
* Returns the wait time of this job.
*
* @return the wait time of this job in mirco seconds
*/
public long getWaitingMicroSeconds() {
return (System.nanoTime() - begin) / 1000;
}
/**
* Returns the task to run for the job.
*
* @return the task to run for the job
*/
public Optional<JobFunction<I, O>> getRunner() {
return Optional.ofNullable(runner);
}
}
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/JobFunction.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.translate.TranslateException;
import java.util.List;
/**
* A function describing the action to take in a {@link Job}.
*
* @param <I> the job input type
* @param <O> the job output type
*/
@FunctionalInterface
public interface JobFunction<I, O> {
/**
* Applies this function.
*
* @param inputs the batch of inputs to run
* @return the batch of results
* @throws TranslateException if it fails to run
*/
List<O> apply(List<I> inputs) throws TranslateException;
}
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/LmiConfigRecommender.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.util.Ec2Utils;
import ai.djl.util.Utils;
import ai.djl.util.cuda.CudaUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
/** A utility class to auto configure LMI model properties. */
public final class LmiConfigRecommender {
private static final Logger logger = LoggerFactory.getLogger(LmiConfigRecommender.class);
// TODO: model list is up to date with vLLM 0.4.2
private static final Map<String, String> MODEL_TO_ROLLING_BATCH =
Map.ofEntries(
Map.entry("falcon", "lmi-dist"),
Map.entry("gpt-neox", "lmi-dist"),
Map.entry("t5", "lmi-dist"),
Map.entry("llama", "lmi-dist"),
Map.entry("mpt", "lmi-dist"),
Map.entry("gpt-bigcode", "lmi-dist"),
Map.entry("aquila", "lmi-dist"),
Map.entry("baichuan", "lmi-dist"),
Map.entry("bloom", "lmi-dist"),
Map.entry("chatglm", "lmi-dist"),
Map.entry("cohere", "lmi-dist"),
Map.entry("dbrx", "lmi-dist"),
Map.entry("deci", "lmi-dist"),
Map.entry("gemma", "lmi-dist"),
Map.entry("gpt2", "lmi-dist"),
Map.entry("gptj", "lmi-dist"),
Map.entry("internlm", "lmi-dist"),
Map.entry("internlm2", "lmi-dist"),
Map.entry("jais", "lmi-dist"),
Map.entry("mistral", "lmi-dist"),
Map.entry("mixtral", "lmi-dist"),
Map.entry("opt", "lmi-dist"),
Map.entry("phi", "lmi-dist"),
Map.entry("phi3", "lmi-dist"),
Map.entry("qwen", "lmi-dist"),
Map.entry("qwen2", "lmi-dist"),
Map.entry("qwen2_moe", "lmi-dist"),
Map.entry("stablelm", "lmi-dist"),
Map.entry("xverse", "lmi-dist"),
Map.entry("starcoder2", "lmi-dist"));
private static final Set<String> OPTIMIZED_TASK_ARCHITECTURES =
Set.of("ForCausalLM", "LMHeadModel", "ForConditionalGeneration");
private LmiConfigRecommender() {}
static void configure(
ModelInfo<?, ?> modelInfo,
Properties lmiProperties,
LmiUtils.HuggingFaceModelConfig modelConfig) {
String features = Utils.getEnvOrSystemProperty("SERVING_FEATURES");
setDynamicBatch(lmiProperties, modelConfig, modelInfo, features);
setRollingBatch(lmiProperties, modelConfig, features);
setMpiMode(lmiProperties, modelConfig, features);
setTensorParallelDegree(lmiProperties);
setRollingBatchSize(lmiProperties);
}
private static void setRollingBatch(
Properties lmiProperties,
LmiUtils.HuggingFaceModelConfig modelConfig,
String features) {
// If dynamic batch is enabled, we don't enable rolling batch.
if (Integer.parseInt(lmiProperties.getProperty("batch_size", "1")) > 1) {
lmiProperties.setProperty("option.rolling_batch", "disable");
return;
}
String rollingBatch = lmiProperties.getProperty("option.rolling_batch", "auto");
String modelType = modelConfig.getModelType();
if (!"auto".equals(rollingBatch)) {
return;
} else if (!isTextGenerationModel(modelConfig)) {
// Non text-generation use-cases are not compatible with rolling batch
rollingBatch = "disable";
} else if (isTnxEnabled(features)) {
rollingBatch = "tnx";
} else if (isLmiDistEnabled(features)
&& "lmi-dist".equals(MODEL_TO_ROLLING_BATCH.get(modelType))) {
rollingBatch = "lmi-dist";
} else if (isVllmEnabled(features)
&& "vllm".equals(MODEL_TO_ROLLING_BATCH.get(modelType))) {
rollingBatch = "vllm";
} else if (isTrtLlmEnabled(features)) {
rollingBatch = "trtllm";
} else if (Ec2Utils.isSageMaker()) {
rollingBatch = "scheduler";
} else {
rollingBatch = "disable";
}
lmiProperties.setProperty("option.rolling_batch", rollingBatch);
}
private static void setMpiMode(
Properties lmiProperties,
LmiUtils.HuggingFaceModelConfig modelConfig,
String features) {
String rollingBatch = lmiProperties.getProperty("option.rolling_batch");
if ("lmi-dist".equals(rollingBatch) || "trtllm".equals(rollingBatch)) {
lmiProperties.setProperty("option.mpi_mode", "true");
}
// TODO TrtLLM python backend: Change it once TrtLLM supports T5 with inflight batching.
if (isT5TrtLlm(modelConfig, features)) {
lmiProperties.setProperty("option.mpi_mode", "true");
}
}
private static void setTensorParallelDegree(Properties lmiProperties) {
if (lmiProperties.containsKey("option.tensor_parallel_degree")) {
return;
}
String tpDegree = Utils.getenv("TENSOR_PARALLEL_DEGREE", "max");
if ("max".equals(tpDegree)) {
tpDegree = String.valueOf(CudaUtils.getGpuCount());
}
lmiProperties.setProperty("option.tensor_parallel_degree", tpDegree);
}
private static void setDynamicBatch(
Properties lmiProperties,
LmiUtils.HuggingFaceModelConfig modelConfig,
ModelInfo<?, ?> modelInfo,
String features) {
// TODO TrtLLM python backend: Change it once TrtLLM supports T5 with inflight batching.
if (isT5TrtLlm(modelConfig, features)) {
// To do runtime compilation for TensorRT-LLM T5 model.
lmiProperties.setProperty("trtllm_python_backend", String.valueOf(true));
lmiProperties.setProperty("option.rolling_batch", "disable");
// We set batch_size only when customer did not provide it.
if (Integer.parseInt(lmiProperties.getProperty("batch_size", "0")) == 0) {
modelInfo.batchSize = 32;
lmiProperties.setProperty("batch_size", String.valueOf(32));
}
}
}
private static void setRollingBatchSize(Properties lmiProperties) {
if (lmiProperties.containsKey("option.max_rolling_batch_size")) {
return;
}
String rollingBatch = lmiProperties.getProperty("option.rolling_batch");
int rollingBatchSize = 32;
if ("vllm".equals(rollingBatch) || "lmi-dist".equals(rollingBatch)) {
rollingBatchSize = 256;
}
if ("trtllm".equals(rollingBatch)) {
// https://github.com/NVIDIA/TensorRT-LLM/blob/v0.9.0/tensorrt_llm/_common.py#L208-L215
// TODO: setting better default per 0.9.0 guidance 1024 * 16 = 16384
if (!lmiProperties.containsKey("option.max_num_tokens")) {
lmiProperties.setProperty("option.max_num_tokens", "16384");
}
rollingBatchSize = 256;
}
lmiProperties.setProperty(
"option.max_rolling_batch_size", String.valueOf(rollingBatchSize));
}
private static boolean isVllmEnabled(String features) {
return features != null && features.contains("vllm");
}
private static boolean isLmiDistEnabled(String features) {
return features != null && features.contains("lmi-dist");
}
private static boolean isTrtLlmEnabled(String features) {
return features != null && features.contains("trtllm");
}
private static boolean isTnxEnabled(String features) {
return features != null && features.contains("tnx");
}
private static boolean isT5TrtLlm(
LmiUtils.HuggingFaceModelConfig modelConfig, String features) {
return isTrtLlmEnabled(features) && "t5".equals(modelConfig.getModelType());
}
private static boolean isTextGenerationModel(LmiUtils.HuggingFaceModelConfig modelConfig) {
for (String arch : modelConfig.getArchitectures()) {
boolean isTextGenerationModel =
OPTIMIZED_TASK_ARCHITECTURES.stream().anyMatch(arch::endsWith);
if (isTextGenerationModel) {
return true;
}
}
logger.warn(
"The model task architecture {} is not supported for optimized inference. LMI will"
+ " attempt to load the model using HuggingFace Accelerate. Optimized inference"
+ " performance is only available for the following task architectures: {}",
modelConfig.getArchitectures(),
OPTIMIZED_TASK_ARCHITECTURES);
return false;
}
}
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/LmiUtils.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.Device;
import ai.djl.ModelException;
import ai.djl.engine.Engine;
import ai.djl.engine.EngineException;
import ai.djl.repository.zoo.ModelNotFoundException;
import ai.djl.util.JsonUtils;
import ai.djl.util.Utils;
import ai.djl.util.cuda.CudaUtils;
import com.google.gson.JsonSyntaxException;
import com.google.gson.annotations.SerializedName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Stream;
/** A utility class to detect optimal engine for LMI model. */
public final class LmiUtils {
private static final Logger logger = LoggerFactory.getLogger(LmiUtils.class);
private LmiUtils() {}
static void configureLmiModel(ModelInfo<?, ?> modelInfo) throws ModelException {
HuggingFaceModelConfig modelConfig = getHuggingFaceModelConfig(modelInfo);
Properties prop = modelInfo.getProperties();
if (modelConfig == null) {
// Precompiled models may not have config, set mpi mode when trtllm
// TODO: Include TRT/Neuron compiled models in this configure flow
String features = Utils.getEnvOrSystemProperty("SERVING_FEATURES");
if (features != null && features.contains("trtllm")) {
prop.setProperty("option.mpi_mode", "true");
}
return;
}
LmiConfigRecommender.configure(modelInfo, prop, modelConfig);
logger.info(
"Detected mpi_mode: {}, rolling_batch: {}, tensor_parallel_degree {}, for"
+ " modelType: {}",
prop.getProperty("option.mpi_mode"),
prop.getProperty("option.rolling_batch"),
prop.getProperty("option.tensor_parallel_degree"),
modelConfig.getModelType());
}
static boolean isTrtLlmRollingBatch(Properties properties) {
String rollingBatch = properties.getProperty("option.rolling_batch");
if ("trtllm".equals(rollingBatch)) {
return true;
}
if (rollingBatch == null || "auto".equals(rollingBatch)) {
// FIXME: find a better way to set default rolling batch for trtllm
String features = Utils.getEnvOrSystemProperty("SERVING_FEATURES");
return features != null && features.contains("trtllm");
}
return false;
}
static boolean needConvert(ModelInfo<?, ?> info) {
Properties properties = info.getProperties();
return isTrtLlmRollingBatch(properties) || properties.containsKey("trtllm_python_backend");
}
static void convertTrtLLM(ModelInfo<?, ?> info) throws IOException {
Path trtRepo;
String modelId = null;
if (info.downloadDir != null) {
trtRepo = info.downloadDir;
} else {
trtRepo = info.modelDir;
modelId = info.prop.getProperty("option.model_id");
if (modelId != null && Files.isDirectory(Paths.get(modelId))) {
trtRepo = Paths.get(modelId);
}
}
if (modelId == null) {
modelId = trtRepo.toString();
}
String tpDegree = info.prop.getProperty("option.tensor_parallel_degree");
if (tpDegree == null) {
tpDegree = Utils.getenv("TENSOR_PARALLEL_DEGREE", "max");
}
if ("max".equals(tpDegree)) {
tpDegree = String.valueOf(CudaUtils.getGpuCount());
}
// TODO TrtLLM python backend: Change it once TrtLLM supports T5 with inflight batching.
if (info.prop.containsKey("trtllm_python_backend")) {
// Inflight batching support is not available for certain models like t5.
// Python backend models have different model repo format compared to C++ backend.
// And whether it is valid or not is checked in tensorrt_llm_toolkit. So it is not
// necessary to check here.
if (!isValidTrtLlmPythonModelRepo(trtRepo)) {
info.downloadDir = buildTrtLlmArtifacts(info.modelDir, modelId, tpDegree);
}
} else {
info.prop.put("option.rolling_batch", "trtllm");
if (!isValidTrtLlmModelRepo(trtRepo)) {
info.downloadDir = buildTrtLlmArtifacts(info.modelDir, modelId, tpDegree);
}
}
}
static void convertOnnxModel(ModelInfo<?, ?> info) throws IOException {
String prefix = info.prop.getProperty("option.modelName", info.modelDir.toFile().getName());
if (Files.isRegularFile(info.modelDir.resolve(prefix + ".onnx"))
|| Files.isRegularFile(info.modelDir.resolve("model.onnx"))) {
return;
}
Path repo;
String modelId = null;
if (info.downloadDir != null) {
repo = info.downloadDir;
} else {
repo = info.modelDir;
modelId = info.prop.getProperty("option.model_id");
if (modelId != null && Files.isDirectory(Paths.get(modelId))) {
repo = Paths.get(modelId);
}
}
if (modelId == null) {
modelId = repo.toString();
}
info.modelUrl = convertOnnx(modelId).toUri().toURL().toString();
}
private static Path convertOnnx(String modelId) throws IOException {
logger.info("Converting model to onnx artifacts");
String hash = Utils.hash(modelId);
String download = Utils.getenv("SERVING_DOWNLOAD_DIR", null);
Path parent = download == null ? Utils.getCacheDir() : Paths.get(download);
Path repoDir = parent.resolve("onnx").resolve(hash);
if (Files.exists(repoDir)) {
logger.info("Onnx artifacts already converted: {}", repoDir);
return repoDir;
}
String[] cmd = {
"djl-convert",
"--output-dir",
repoDir.toAbsolutePath().toString(),
"--output-format",
"OnnxRuntime",
"-m",
modelId,
"--optimize",
CudaUtils.hasCuda() ? "O4" : "O2",
"--device",
CudaUtils.hasCuda() ? "cuda" : "cpu"
};
boolean success = false;
try {
Process exec = new ProcessBuilder(cmd).redirectErrorStream(true).start();
try (BufferedReader reader =
new BufferedReader(
new InputStreamReader(exec.getInputStream(), StandardCharsets.UTF_8))) {
String line;
while ((line = reader.readLine()) != null) {
logger.debug("convert: {}", line);
}
}
int exitCode = exec.waitFor();
if (0 != exitCode) {
throw new EngineException("Model conversion process failed!");
}
success = true;
logger.info("Onnx artifacts built successfully");
return repoDir;
} catch (InterruptedException e) {
throw new IOException("Failed to build Onnx artifacts", e);
} finally {
if (!success) {
Utils.deleteQuietly(repoDir);
}
}
}
/**
* Returns the Huggingface config.json file URI.
*
* @param modelInfo the model object
* @param modelId the model id
* @return the Huggingface config.json file URI
*/
public static URI generateHuggingFaceConfigUri(ModelInfo<?, ?> modelInfo, String modelId) {
Path modelDir = modelInfo.modelDir;
if (Files.isRegularFile(modelDir.resolve("config.json"))) {
return modelDir.resolve("config.json").toUri();
} else if (Files.isRegularFile(modelDir.resolve("model_index.json"))) {
return modelDir.resolve("model_index.json").toUri();
}
if (modelId == null || modelId.startsWith("djl://")) {
// djl model should be covered by local file in modelDir case
return null;
} else if (modelId.startsWith("s3://")) {
// HF_MODEL_ID=s3:// should not reach here, this is OPTION_MODEL_ID case.
Path downloadDir = modelInfo.downloadDir;
if (Files.isRegularFile(downloadDir.resolve("config.json"))) {
return downloadDir.resolve("config.json").toUri();
} else if (Files.isRegularFile(downloadDir.resolve("model_index.json"))) {
return downloadDir.resolve("model_index.json").toUri();
}
return null;
} else {
modelInfo.prop.setProperty("option.model_id", modelId);
Path dir = Paths.get(modelId);
if (Files.isDirectory(dir)) {
Path configFile = dir.resolve("config.json");
if (Files.isRegularFile(configFile)) {
return configFile.toUri();
}
// stable diffusion models have a different file name with the config...
configFile = dir.resolve("model_index.json");
if (Files.isRegularFile(configFile)) {
return configFile.toUri();
}
return null;
}
return getHuggingFaceHubConfigUri(modelId);
}
}
private static URI getHuggingFaceHubConfigUri(String modelId) {
String[] possibleConfigFiles = {"config.json", "model_index.json"};
String hubToken = Utils.getEnvOrSystemProperty("HF_TOKEN");
Map<String, String> headers = new ConcurrentHashMap<>();
headers.put("User-Agent", "DJL/" + Engine.getDjlVersion());
if (hubToken != null) {
headers.put("Authorization", "Bearer " + hubToken);
}
for (String configFile : possibleConfigFiles) {
URI configUri =
URI.create("https://huggingface.co/" + modelId + "/raw/main/" + configFile);
try (InputStream is = Utils.openUrl(configUri.toURL(), headers)) {
is.transferTo(OutputStream.nullOutputStream());
return configUri;
} catch (IOException e) {
logger.warn("Hub config file {} does not exist for model {}.", configFile, modelId);
}
}
return null;
}
private static HuggingFaceModelConfig getHuggingFaceModelConfig(ModelInfo<?, ?> modelInfo)
throws ModelException {
String modelId = modelInfo.prop.getProperty("option.model_id");
URI modelConfigUri = generateHuggingFaceConfigUri(modelInfo, modelId);
if (modelConfigUri == null) {
return null;
}
Map<String, String> headers = new ConcurrentHashMap<>();
headers.put("User-Agent", "DJL/" + Engine.getDjlVersion());
String hubToken = Utils.getEnvOrSystemProperty("HF_TOKEN");
if (hubToken != null) {
headers.put("Authorization", "Bearer " + hubToken);
}
try (InputStream is = Utils.openUrl(modelConfigUri.toURL(), headers)) {
return JsonUtils.GSON.fromJson(Utils.toString(is), HuggingFaceModelConfig.class);
} catch (IOException | JsonSyntaxException e) {
throw new ModelNotFoundException("Invalid huggingface model id: " + modelId, e);
}
}
private static Path buildTrtLlmArtifacts(Path modelDir, String modelId, String tpDegree)
throws IOException {
logger.info("Converting model to TensorRT-LLM artifacts");
String hash = Utils.hash(modelId + tpDegree);
String download = Utils.getenv("SERVING_DOWNLOAD_DIR", null);
Path parent = download == null ? Utils.getCacheDir() : Paths.get(download);
Path trtLlmRepoDir = parent.resolve("trtllm").resolve(hash);
if (Files.exists(trtLlmRepoDir)) {
logger.info("TensorRT-LLM artifacts already converted: {}", trtLlmRepoDir);
return trtLlmRepoDir;
}
String[] cmd = {
"python",
"/opt/djl/partition/trt_llm_partition.py",
"--properties_dir",
modelDir.toAbsolutePath().toString(),
"--trt_llm_model_repo",
trtLlmRepoDir.toString(),
"--tensor_parallel_degree",
tpDegree,
"--model_path",
modelId
};
boolean success = false;
try {
Process exec = new ProcessBuilder(cmd).redirectErrorStream(true).start();
try (BufferedReader reader =
new BufferedReader(
new InputStreamReader(exec.getInputStream(), StandardCharsets.UTF_8))) {
String line;
while ((line = reader.readLine()) != null) {
logger.info("convert_py: {}", line);
}
}
int exitCode = exec.waitFor();
if (0 != exitCode) {
throw new EngineException("Model conversion process failed!");
}
success = true;
logger.info("TensorRT-LLM artifacts built successfully");
return trtLlmRepoDir;
} catch (InterruptedException e) {
throw new IOException("Failed to build TensorRT-LLM artifacts", e);
} finally {
if (!success) {
Utils.deleteQuietly(trtLlmRepoDir);
}
}
}
// TODO: migrate this to CUDAUtils in next version
static String getAWSGpuMachineType() {
String computeCapability = CudaUtils.getComputeCapability(0);
// Get gpu memory in GB sizes
double totalMemory =
CudaUtils.getGpuMemory(Device.gpu()).getMax() / 1024.0 / 1024.0 / 1024.0;
if ("7.5".equals(computeCapability)) {
return "g4";
} else if ("8.0".equals(computeCapability)) {
if (totalMemory > 45.0) {
return "p4de";
}
return "p4d";
} else if ("8.6".equals(computeCapability)) {
return "g5";
} else if ("8.9".equals(computeCapability)) {
if (totalMemory > 25.0) {
return "g6e";
}
return "g6";
} else if ("9.0".equals(computeCapability)) {
return "p5";
} else {
logger.warn("Could not identify GPU arch {}", computeCapability);
return null;
}
}
static boolean isValidTrtLlmPythonModelRepo(Path modelPath) throws IOException {
AtomicBoolean isValid = new AtomicBoolean();
try (Stream<Path> walk = Files.list(modelPath)) {
walk.filter(Files::isDirectory)
.filter(
p -> {
String directoryName = p.getFileName().toString();
return directoryName.contains("encoder")
|| directoryName.contains("decoder");
})
.forEach(
p -> {
logger.info(String.valueOf(p));
Path configFile = p.resolve("config.json");
if (Files.isRegularFile(configFile)) {
logger.info("Found trtllm python model: {}", p);
isValid.set(true);
}
});
}
return isValid.get();
}
static boolean isValidTrtLlmModelRepo(Path modelPath) throws IOException {
// TODO: match model name
AtomicBoolean isValid = new AtomicBoolean();
try (Stream<Path> walk = Files.list(modelPath)) {
walk.filter(Files::isDirectory)
.forEach(
p -> {
Path confFile = p.resolve("config.pbtxt");
// TODO: add stricter check for tokenizer
Path tokenizer = p.resolve("tokenizer_config.json");
if (Files.isRegularFile(confFile)
&& Files.isRegularFile(tokenizer)) {
logger.info("Found triton model: {}", p);
isValid.set(true);
}
});
}
return isValid.get();
}
// This represents the config of huggingface models NLP models as well
// as the config of diffusers models. The config is different for both, but for
// now we can leverage a single class since we don't need too much information from the config.
static final class HuggingFaceModelConfig {
@SerializedName("model_type")
private String modelType;
@SerializedName("architectures")
private List<String> configArchitectures;
@SerializedName("auto_map")
private Map<String, String> autoMap;
@SerializedName("_diffusers_version")
private String diffusersVersion;
private Set<String> allArchitectures;
public String getModelType() {
if (modelType == null) {
return diffusersVersion == null ? null : "stable-diffusion";
}
return modelType;
}
public Set<String> getArchitectures() {
if (allArchitectures == null) {
determineAllArchitectures();
}
return allArchitectures;
}
private void determineAllArchitectures() {
allArchitectures = new HashSet<>();
if (configArchitectures != null) {
allArchitectures.addAll(configArchitectures);
}
if (autoMap != null) {
allArchitectures.addAll(autoMap.keySet());
}
}
}
}
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/ModelInfo.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.Device;
import ai.djl.Model;
import ai.djl.ModelException;
import ai.djl.engine.Engine;
import ai.djl.engine.EngineException;
import ai.djl.inference.Predictor;
import ai.djl.metric.Dimension;
import ai.djl.metric.Metric;
import ai.djl.metric.Metrics;
import ai.djl.metric.Unit;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.ndarray.NDManager;
import ai.djl.repository.Artifact;
import ai.djl.repository.FilenameUtils;
import ai.djl.repository.MRL;
import ai.djl.repository.Repository;
import ai.djl.repository.zoo.Criteria;
import ai.djl.repository.zoo.ModelNotFoundException;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.serving.wlm.util.EventManager;
import ai.djl.serving.wlm.util.WlmConfigManager;
import ai.djl.serving.wlm.util.WlmOutOfMemoryException;
import ai.djl.translate.TranslateException;
import ai.djl.util.NeuronUtils;
import ai.djl.util.Utils;
import ai.djl.util.cuda.CudaUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.lang.management.MemoryUsage;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.Scanner;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Stream;
/** A class represent a loaded model and it's metadata. */
public final class ModelInfo<I, O> extends WorkerPoolConfig<I, O> {
private static final Logger logger = LoggerFactory.getLogger(ModelInfo.class);
private static final Logger MODEL_METRIC = LoggerFactory.getLogger("model_metric");
private static final Pattern PATTERN = Pattern.compile("MemAvailable:\\s+(\\d+) kB");
private static final Pattern SNAKE_CASE = Pattern.compile("[A-Z_]+");
private String engineName;
private String loadOnDevices;
// the following fields can be loaded from workflow json file
private Map<String, String> filters;
private Map<String, Object> arguments;
private Map<String, String> options;
private String application;
private String modelName;
private String translatorFactory;
private String translator;
private boolean dynamicAdapters;
transient Path modelDir;
private transient String artifactName;
transient Path downloadDir;
transient Properties prop;
private transient Status status;
private transient Class<I> inputClass;
private transient Class<O> outputClass;
private transient Criteria<I, O> criteria;
private transient Map<Device, ZooModel<I, O>> models;
private transient Map<String, Adapter> adapters;
private transient Engine engine;
private transient boolean initialize;
private transient EventManager eventManager;
private transient Dimension dimension;
private ModelInfo() {
eventManager = EventManager.getInstance();
dimension = new Dimension("Model", "model");
}
/**
* Constructs a new {@code ModelInfo} instance.
*
* @param modelUrl the model Url
*/
@SuppressWarnings("unchecked")
public ModelInfo(String modelUrl) {
this.id = modelUrl;
this.modelUrl = modelUrl;
this.inputClass = (Class<I>) Input.class;
this.outputClass = (Class<O>) Output.class;
adapters = new ConcurrentHashMap<>();
eventManager = EventManager.getInstance();
dimension = new Dimension("Model", id);
}
/**
* Constructs a {@link ModelInfo} based on a {@link Criteria}.
*
* @param id the id for the created {@link ModelInfo}
* @param modelUrl the model Url
* @param criteria the model criteria
*/
public ModelInfo(String id, String modelUrl, Criteria<I, O> criteria) {
this.id = id;
this.modelUrl = modelUrl;
this.criteria = criteria;
inputClass = criteria.getInputClass();
outputClass = criteria.getOutputClass();
adapters = new ConcurrentHashMap<>();
eventManager = EventManager.getInstance();
dimension = new Dimension("Model", id);
}
/**
* Constructs a new {@code ModelInfo} instance.
*
* @param id the ID of the model that will be used by workflow
* @param modelUrl the model url
* @param version the version of the model
* @param engineName the engine to load the model
* @param loadOnDevices the devices to load the model on
* @param inputClass the model input class
* @param outputClass the model output class
* @param queueSize the maximum request queue size
* @param maxIdleSeconds the initial maximum idle time for workers
* @param maxBatchDelayMillis the initial maximum delay when scaling up before giving up
* @param batchSize the batch size for this model
* @param minWorkers the minimum number of workers
* @param maxWorkers the maximum number of workers
*/
public ModelInfo(
String id,
String modelUrl,
String version,
String engineName,
String loadOnDevices,
Class<I> inputClass,
Class<O> outputClass,
int queueSize,
int maxIdleSeconds,
int maxBatchDelayMillis,
int batchSize,
int minWorkers,
int maxWorkers) {
this.id = id;
this.modelUrl = modelUrl;
this.version = version;
this.engineName = engineName;
this.loadOnDevices = loadOnDevices;
this.inputClass = inputClass;
this.outputClass = outputClass;
this.maxBatchDelayMillis = maxBatchDelayMillis;
this.maxIdleSeconds = maxIdleSeconds; // default max idle time 60s
this.queueSize = queueSize;
this.batchSize = batchSize;
this.minWorkers = Math.min(minWorkers, maxWorkers);
this.maxWorkers = maxWorkers;
adapters = new ConcurrentHashMap<>();
eventManager = EventManager.getInstance();
dimension = new Dimension("Model", id);
}
/**
* Returns the properties of the model.
*
* @return the properties of the model
*/
public Properties getProperties() {
return prop;
}
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public void load(Device device) throws ModelException, IOException {
if (getModels().containsKey(device)) {
return;
}
try {
// Download the model again if the model files are deleted
initialize();
checkAvailableMemory(device);
} catch (IOException e) {
throw new ModelNotFoundException(e);
}
eventManager.onModelLoading(this, device);
long begin = System.nanoTime();
try {
Criteria.Builder<I, O> builder;
if (criteria != null) {
builder = criteria.toBuilder().optEngine(engineName);
} else {
builder =
Criteria.builder()
.setTypes(inputClass, outputClass)
.optModelUrls(modelUrl)
.optModelName(modelName)
.optEngine(engineName)
.optFilters(filters)
.optArguments(arguments)
.optOptions(options);
if (application != null) {
builder.optArgument("application", application);
}
if (translator != null) {
builder.optArgument("translator", translator);
}
if (translatorFactory != null) {
builder.optArgument("translatorFactory", translatorFactory);
}
if (batchSize > 1) {
builder.optArgument("batchifier", "stack");
}
}
logger.info("Loading model {} {} on {}", id, uid, device);
if ("nc".equals(device.getDeviceType()) && "PyTorch".equals(engineName)) {
// assume neuron only support PyTorch
logger.info("{}: Bypass NC core allocation", uid);
} else {
builder.optDevice(device);
}
if (downloadDir != null) {
// override model_id
builder.optOption("model_id", downloadDir.toAbsolutePath().toString());
}
ZooModel<I, O> m = builder.build().loadModel();
m.setProperty("metric_dimension", id);
long duration = (System.nanoTime() - begin) / 1000;
Metric metric = new Metric("LoadModel", duration, Unit.MICROSECONDS, dimension);
MODEL_METRIC.info("{}", metric);
eventManager.onModelLoaded(this);
if (engine == null) {
engine = m.getNDManager().getEngine();
}
if (models.isEmpty()) {
// Check for adapters on first load
List<Path> possibleAdapterDirs = new ArrayList<>(2);
possibleAdapterDirs.add(modelDir);
if (downloadDir != null && !modelDir.equals(downloadDir)) {
possibleAdapterDirs.add(downloadDir);
}
for (Path parentDir : possibleAdapterDirs) {
if (Files.isDirectory(parentDir.resolve("adapters"))) {
Files.list(parentDir.resolve("adapters"))
.forEach(
adapterDir -> {
eventManager.onAdapterLoading(this, adapterDir);
long start = System.nanoTime();
String adapterName =
adapterDir.getFileName().toString();
Adapter adapter =
Adapter.newInstance(
this,
adapterName,
adapterDir.toAbsolutePath().toString(),
Collections.emptyMap());
registerAdapter(adapter);
long d = (System.nanoTime() - start) / 1000;
Metric me =
new Metric(
"LoadAdapter",
d,
Unit.MICROSECONDS,
dimension);
MODEL_METRIC.info("{}", me);
eventManager.onAdapterLoaded(this, adapter);
});
}
}
}
models.put(device, m);
status = Status.READY;
} finally {
if (status == null) {
status = Status.FAILED;
}
}
}
/**
* Returns all loaded models.
*
* @return all loaded models
*/
public Map<Device, ZooModel<I, O>> getModels() {
if (models == null) {
models = new ConcurrentHashMap<>();
}
return models;
}
/**
* Returns the loaded {@link ZooModel} for a device.
*
* @param device the device to return the model on
* @return the loaded {@link ZooModel}
*/
public ZooModel<I, O> getModel(Device device) {
if (getModels().get(device) == null) {
throw new IllegalStateException("Model \"" + id + "\" has not been loaded yet.");
}
return getModels().get(device);
}
/** {@inheritDoc} */
@Override
public ThreadConfig<I, O> newThread(Device device) {
return new ModelThread(device);
}
/**
* Returns the engine.
*
* @return the engine
*/
public Engine getEngine() {
return engine;
}
/**
* Returns the engine name.
*
* @return the engine name
*/
public String getEngineName() {
return engineName;
}
/** {@inheritDoc} */
@Override
public Status getStatus() {
if (status == null) {
return Status.PENDING;
} else if (status == Status.FAILED) {
return Status.FAILED;
}
for (Model m : getModels().values()) {
int failures = m.intProperty("failed", 0);
if (failures > 0) {
int def = Integer.parseInt(Utils.getenv("SERVING_RETRY_THRESHOLD", "10"));
int threshold = m.intProperty("retry_threshold", def);
if (failures > threshold) {
logger.info(
"{}: exceed retry threshold: {}, mark model as failed.",
uid,
threshold);
return Status.FAILED;
}
}
}
return status;
}
/**
* Returns the model input class.
*
* @return the model input class
*/
public Class<I> getInputClass() {
return inputClass;
}
/**
* Returns the model output class.
*
* @return the model output class
*/
public Class<O> getOutputClass() {
return outputClass;
}
/**
* Clarifies the input and output class when not specified.
*
* <p>Warning: This is intended for internal use with reflection.
*
* @param inputClass the model input class
* @param outputClass the model output class
*/
public void hasInputOutputClass(Class<I> inputClass, Class<O> outputClass) {
if (this.inputClass != null || this.outputClass != null) {
throw new IllegalStateException(
"hasInputOutputClass can only be used when input or output are not yet set");
}
this.inputClass = inputClass;
this.outputClass = outputClass;
}
/** {@inheritDoc} */
@Override
public int getMinWorkers(Device device) {
if (minWorkers != null && minWorkers >= 0) {
return minWorkers;
}
return getWorkersMinMaxProperty(getModel(device), device, "minWorkers", 1);
}
/** {@inheritDoc} */
@Override
public int getMaxWorkers(Device device) {
if (maxWorkers != null && maxWorkers >= 0) {
return maxWorkers;
}
WlmConfigManager configManager = WlmConfigManager.getInstance();
if (configManager.isDebug()) {
return 1;
}
// get from model's property
Model model = getModel(device);
int maxProp = getWorkersMinMaxProperty(model, device, "maxWorkers", -1);
if (maxProp > 0) {
return maxProp;
}
NDManager manager = model.getNDManager();
if ("nc".equals(device.getDeviceType())) {
if ("Python".equals(manager.getEngine().getEngineName())) {
return 1;
}
return 2; // default to max 2 workers for inferentia
}
if (Device.Type.GPU.equals(device.getDeviceType())) {
String eng = manager.getEngine().getEngineName();
if ("MXNet".equals(eng) || "Python".equals(eng)) {
// FIXME: MXNet GPU Model doesn't support multi-threading
return 1;
}
return 2;
}
int cpuCores = Runtime.getRuntime().availableProcessors();
int ompThreads = Integer.parseInt(Utils.getenv("OMP_NUM_THREADS", "-1"));
if (ompThreads > 0) {
if (ompThreads > cpuCores) {
ompThreads = cpuCores;
}
return cpuCores / ompThreads;
}
return 2;
}
private int getWorkersMinMaxProperty(Model model, Device device, String key, int def) {
String workers = model.getProperty(device.getDeviceType() + '.' + key);
if (workers != null) {
return Integer.parseInt(workers);
}
workers = model.getProperty(key);
if (workers != null) {
return Integer.parseInt(workers);
}
return def;
}
/** {@inheritDoc} */
@Override
public void initialize() throws IOException, ModelException {
if (initialize) {
return;
}
if (adapters == null) {
adapters = new ConcurrentHashMap<>();
}
eventManager.onModelDownloading(this);
long begin = System.nanoTime();
downloadModel();
loadServingProperties();
downloadS3();
eventManager.onModelDownloaded(this, downloadDir);
configPerModelSettings();
downloadDraftModel();
long duration = (System.nanoTime() - begin) / 1000;
Metric metric = new Metric("DownloadModel", duration, Unit.MICROSECONDS, dimension);
MODEL_METRIC.info("{}", metric);
if (LmiUtils.needConvert(this)) {
eventManager.onModelConverting(this, "trtllm");
begin = System.nanoTime();
LmiUtils.convertTrtLLM(this);
duration = (System.nanoTime() - begin) / 1000;
metric = new Metric("ConvertTrtllm", duration, Unit.MICROSECONDS, dimension);
MODEL_METRIC.info("{}", metric);
eventManager.onModelConverted(this, "trtllm");
} else if ("OnnxRuntime".equals(getEngineName())) {
eventManager.onModelConverting(this, "onnx");
begin = System.nanoTime();
LmiUtils.convertOnnxModel(this);
duration = (System.nanoTime() - begin) / 1000;
metric = new Metric("ConvertOnnx", duration, Unit.MICROSECONDS, dimension);
MODEL_METRIC.info("{}", metric);
eventManager.onModelConverted(this, "onnx");
}
// override prop keys are not write to serving.properties,
// we have to explicitly set in Criteria
if (options == null) {
options = new ConcurrentHashMap<>();
}
if (arguments == null) {
arguments = new ConcurrentHashMap<>();
// apply maxWorkers env for MPI mode
String maxWorkers = Utils.getenv("SERVING_MAX_WORKERS");
String minWorkers = Utils.getenv("SERVING_MIN_WORKERS");
if (maxWorkers != null) {
arguments.putIfAbsent("maxWorkers", maxWorkers);
}
if (minWorkers != null) {
arguments.putIfAbsent("minWorkers", minWorkers);
}
}
for (String key : prop.stringPropertyNames()) {
if (key.startsWith("option.")) {
options.put(key.substring(7), prop.getProperty(key));
} else {
arguments.put(key, prop.getProperty(key));
}
}
initialize = true;
}
/**
* Adds an adapter to this {@link ModelInfo}.
*
* @param adapter the adapter to add
*/
public void registerAdapter(Adapter adapter) {
synchronized (this) {
if (adapters.containsKey(adapter.getName())) {
throw new IllegalArgumentException(
"The adapter "
+ adapter.getName()
+ " already exists. If you want to replace it, please unregistering"
+ " before registering a new adapter with the same name.");
}
adapters.put(adapter.getName(), adapter);
}
}
/**
* Removes an adapter from this {@link ModelInfo}.
*
* @param name the adapter to remove
* @return the removed adapter
*/
public Adapter unregisterAdapter(String name) {
synchronized (this) {
// TODO: Remove from current workers
if (!adapters.containsKey(name)) {
throw new IllegalArgumentException(
"The adapter "
+ name
+ " was not found and therefore can't be unregistered");
}
return adapters.remove(name);
}
}
/**
* Returns the adapters for this model.
*
* @return the adapters for this model
*/
public Map<String, Adapter> getAdapters() {
return adapters;
}
/**
* Returns an adapter on this {@link ModelInfo}.
*
* @param name the adapter name to get
* @return the adapter
*/
public Adapter getAdapter(String name) {
return adapters.get(name);
}
/** {@inheritDoc} */
@Override
public void close() {
if (!getModels().isEmpty() && !Boolean.getBoolean("ai.djl.serving.keep_cache")) {
logger.info("Unloading model: {}", this);
if (downloadDir != null) {
Utils.deleteQuietly(downloadDir);
}
Path path = null;
for (Model m : models.values()) {
m.close();
path = m.getModelPath();
}
models.clear();
Path cacheDir = Utils.getCacheDir().toAbsolutePath();
if (Objects.requireNonNull(path).startsWith(cacheDir)) {
Utils.deleteQuietly(path);
}
}
}
/**
* Infer model name form model URL in case model name is not provided.
*
* @param url the model URL
* @return the model name
*/
public static String inferModelNameFromUrl(String url) {
URI uri = URI.create(url);
String path = uri.getPath();
if (path == null) {
path = uri.getSchemeSpecificPart();
}
boolean isDirectory = path.endsWith("/");
if (isDirectory) {
path = path.substring(0, path.length() - 1);
}
int pos = path.lastIndexOf('/');
String modelName;
if (pos >= 0) {
modelName = path.substring(pos + 1);
} else {
modelName = path;
}
if (!isDirectory) {
modelName = FilenameUtils.getNamePart(modelName);
}
modelName = modelName.replaceAll("(\\W|^_)", "_");
return modelName;
}
/** {@inheritDoc} */
@Override
public Device withDefaultDevice(String deviceName) {
return Device.fromName(deviceName, Engine.getEngine(engineName));
}
private String inferEngine() throws ModelException {
String eng = prop.getProperty("engine");
if (eng != null) {
return eng;
}
String prefix = prop.getProperty("option.modelName", artifactName);
if (Files.isRegularFile(modelDir.resolve("metadata.yaml"))) {
eng = SageMakerUtils.inferSageMakerEngine(this);
if (eng != null) {
return eng;
}
}
String modelId = prop.getProperty("option.model_id");
if (modelId != null && modelId.startsWith("djl://")) {
Repository repo = Repository.newInstance("tmp", modelId);
MRL mrl = repo.getResources().get(0);
String groupId = mrl.getGroupId();
ModelZoo zoo = ModelZoo.getModelZoo(groupId);
return zoo.getSupportedEngines().iterator().next();
} else if (Files.isRegularFile(modelDir.resolve(prefix + ".pt"))
|| Files.isRegularFile(modelDir.resolve("model.pt"))) {
return "PyTorch";
} else if (Files.isRegularFile(modelDir.resolve("config.pbtxt"))) {
return "TritonServer";
} else if (Files.isRegularFile(modelDir.resolve("saved_model.pb"))) {
return "TensorFlow";
} else if (Files.isRegularFile(modelDir.resolve(prefix + "-symbol.json"))) {
return "MXNet";
} else if (Files.isRegularFile(modelDir.resolve(prefix + ".onnx"))
|| Files.isRegularFile(modelDir.resolve("model.onnx"))) {
return "OnnxRuntime";
} else if (Files.isRegularFile(modelDir.resolve(prefix + ".trt"))
|| Files.isRegularFile(modelDir.resolve(prefix + ".uff"))) {
return "TensorRT";
} else if (Files.isRegularFile(modelDir.resolve(prefix + ".json"))
|| Files.isRegularFile(modelDir.resolve(prefix + ".xgb"))
|| Files.isRegularFile(modelDir.resolve("model.xgb"))) {
return "XGBoost";
} else if (Files.isRegularFile(modelDir.resolve(prefix + ".gguf"))) {
return "Llama";
} else if (isPythonModel(prefix)) {
// TODO: How to differentiate Rust model from Python
return "Python";
} else {
try {
if (Utils.getCurrentEpoch(modelDir, prefix) >= 0) {
// Assume this is DJL model
return Engine.getDefaultEngineName();
}
} catch (IOException e) {
logger.warn("{}: Failed search parameter files in folder: {}", uid, modelDir, e);
}
}
throw new ModelNotFoundException("Failed to detect engine of the model: " + modelDir);
}
private boolean isTorchServeModel() {
if (Files.isDirectory(modelDir.resolve("MAR-INF"))) {
logger.info("Found legacy torchserve model, use Python engine.");
return true;
}
return false;
}
private boolean isPythonModel(String prefix) {
return Files.isRegularFile(modelDir.resolve("model.py"))
|| Files.isRegularFile(modelDir.resolve(prefix + ".py"))
|| prop.getProperty("option.model_id") != null
|| Files.isRegularFile(modelDir.resolve("config.json"))
|| isTorchServeModel();
}
private void downloadModel() throws ModelException, IOException {
if (modelUrl.startsWith("s3://")) {
modelDir = downloadS3ToDownloadDir(modelUrl);
modelUrl = modelDir.toUri().toURL().toString();
return;
}
Repository repository = Repository.newInstance("modelStore", modelUrl);
List<MRL> mrls = repository.getResources();
if (mrls.isEmpty()) {
throw new ModelNotFoundException("Invalid model url: " + modelUrl);
}
Artifact artifact = mrls.get(0).getDefaultArtifact();
repository.prepare(artifact);
modelDir = Utils.getNestedModelDir(repository.getResourceDirectory(artifact));
artifactName = artifact.getName();
}
private void loadServingProperties() {
if (prop == null) {
Path file = modelDir.resolve("serving.properties");
prop = new Properties();
if (Files.isRegularFile(file)) {
try (InputStream is = Files.newInputStream(file)) {
prop.load(is);
} catch (IOException e) {
logger.warn("{}: Failed read serving.properties file", uid, e);
}
}
// load default settings from env
for (Map.Entry<String, String> entry : Utils.getenv().entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
if (value == null || value.isEmpty()) {
continue;
}
if (key.startsWith("OPTION_")) {
key = key.substring(7);
if (SNAKE_CASE.matcher(key).matches()) {
key = key.toLowerCase(Locale.ROOT);
}
if ("entrypoint".equals(key)) {
key = "entryPoint";
} else if ("engine".equals(key)) {
prop.putIfAbsent("engine", value);
continue;
}
prop.putIfAbsent("option." + key, value);
} else if (key.startsWith("ARGS_")) {
key = key.substring(5);
if (SNAKE_CASE.matcher(key).matches()) {
key = key.toLowerCase(Locale.ROOT);
}
arguments.putIfAbsent(key, value);
}
}
}
}
private void configPerModelSettings() throws ModelException {
// per model settings can only be configured once
WlmConfigManager wlmc = WlmConfigManager.getInstance();
if (queueSize <= 0) {
queueSize = intValue(prop, "job_queue_size", wlmc.getJobQueueSize());
}
if (batchSize <= 0) {
batchSize = intValue(prop, "max_dynamic_batch_size", wlmc.getBatchSize());
batchSize = intValue(prop, "batch_size", batchSize);
if (prop.containsKey("max_dynamic_batch_size")) {
prop.setProperty("batch_size", String.valueOf(batchSize));
}
}
if (maxBatchDelayMillis <= 0) {
maxBatchDelayMillis = intValue(prop, "max_batch_delay", wlmc.getMaxBatchDelayMillis());
}
if (maxIdleSeconds <= 0) {
maxIdleSeconds = intValue(prop, "max_idle_time", wlmc.getMaxIdleSeconds());
}
if (loadOnDevices == null) {
loadOnDevices = prop.getProperty("load_on_devices", wlmc.getLoadOnDevices());
}
if (engineName == null) {
engineName = inferEngine();
}
LmiUtils.configureLmiModel(this);
StringBuilder sb = new StringBuilder();
for (Map.Entry<Object, Object> entry : prop.entrySet()) {
String key = entry.getKey().toString();
if (!"job_queue_size".equals(key)
&& !"batch_size".equals(key)
&& !"max_dynamic_batch_size".equals(key)
&& !"max_idle_time".equals(key)
&& !"max_batch_delay".equals(key)
&& !"load_on_devices".equals(key)
&& !"engine".equals(key)
&& !"option.entryPoint".equals(key)) {
sb.append("\n ").append(key).append(": ").append(entry.getValue());
}
}
if ("MPI".equals(engineName)) {
prop.put("option.mpi_mode", "true");
}
logger.info(
"{}: Apply per model settings:\n"
+ " job_queue_size: {}\n"
+ " max_dynamic_batch_size: {}\n"
+ " max_batch_delay: {}\n"
+ " max_idle_time: {}\n"
+ " load_on_devices: {}\n"
+ " engine: {}\n"
+ " mpi_mode: {}\n"
+ " option.entryPoint: {}{}",
uid,
queueSize,
batchSize,
maxBatchDelayMillis,
maxIdleSeconds,
loadOnDevices,
engineName,
prop.get("option.mpi_mode"),
prop.get("option.entryPoint"),
sb);
eventManager.onModelConfigured(this);
}
void checkAvailableMemory(Device device) throws IOException {
if (Boolean.getBoolean("skip_oom_check")) {
return;
}
long requiredMemory = intValue(prop, "required_memory_mb", 0) * 1024L * 1024;
WlmConfigManager wlmc = WlmConfigManager.getInstance();
int defMemory = wlmc.getReservedMemoryMb();
long reservedMemory = intValue(prop, "reserved_memory_mb", defMemory) * 1024L * 1024;
String tpDegreeStr = Utils.getenv("TENSOR_PARALLEL_DEGREE", "0");
tpDegreeStr = prop.getProperty("option.tensor_parallel_degree", tpDegreeStr);
int tpDegree;
if ("max".equals(tpDegreeStr)) {
Engine eng = Engine.getEngine(engineName);
if (eng.getGpuCount() > 0) {
tpDegree = eng.getGpuCount();
} else {
tpDegree = NeuronUtils.getNeuronCores();
}
} else {
tpDegree = Integer.parseInt(tpDegreeStr);
}
if (requiredMemory <= 0
&& tpDegree < 1
&& "true".equals(Utils.getenv("SAGEMAKER_MULTI_MODEL"))) {
// TODO:
// 1. handle LMI use case in future
// 2. if huggingface model_id is specified, the model is downloaded
// in the python process, current file size based estimation doesn't work
logger.warn("{}: No reserved_memory_mb defined, estimating memory usage ...", uid);
try (Stream<Path> walk = Files.walk(modelDir)) {
requiredMemory = walk.mapToLong(ModelInfo::getFileSize).sum();
}
if (downloadDir != null) {
try (Stream<Path> walk = Files.walk(downloadDir)) {
requiredMemory += walk.mapToLong(ModelInfo::getFileSize).sum();
}
}
// estimate the memory to be 1.2x of file size
requiredMemory = requiredMemory * 12 / 10;
}
// Assume requires the same amount of CPU memory when load on GPU
long free = getAvailableCpuMemory();
logger.info(
"{}: Available CPU memory: {} MB, required: {} MB, reserved: {} MB",
uid,
free / 1024 / 1024,
requiredMemory / 1024 / 1024,
reservedMemory / 1024 / 1024);
if (free - requiredMemory < reservedMemory) {
throw new WlmOutOfMemoryException("No enough memory to load the model.");
}
if (device.isGpu()) {
MemoryUsage usage;
try {
usage = CudaUtils.getGpuMemory(device);
} catch (IllegalArgumentException | EngineException e) {
logger.warn("Failed to get GPU memory", e);
throw new WlmOutOfMemoryException("No enough memory to load the model."); // NOPMD
}
free = usage.getMax() - usage.getCommitted();
long gpuMem = intValue(prop, "gpu.reserved_memory_mb", -1) * 1024L * 1024;
if (gpuMem > 0) {
reservedMemory = gpuMem;
}
gpuMem = intValue(prop, "gpu.required_memory_mb", -1) * 1024L * 1024;
if (gpuMem > 0) {
requiredMemory = gpuMem;
}
logger.info(
"{}: Available GPU memory: {} MB, required: {} MB, reserved: {} MB",
uid,
free / 1024 / 1024,
requiredMemory / 1024 / 1024,
reservedMemory / 1024 / 1024);
if (free - requiredMemory < reservedMemory) {
throw new WlmOutOfMemoryException("No enough memory to load the model.");
}
}
}
/** {@inheritDoc} */
@Override
public String[] getLoadOnDevices() {
Engine eng = Engine.getEngine(engineName);
if ("*".equals(loadOnDevices)) {
int gpuCount = eng.getGpuCount();
String v = Utils.getenv("TENSOR_PARALLEL_DEGREE", "-1");
v = prop.getProperty("option.tensor_parallel_degree", v);
int tpDegree;
if ("max".equals(v)) {
if (gpuCount > 0) {
tpDegree = gpuCount;
} else {
tpDegree = NeuronUtils.getNeuronCores();
}
} else {
tpDegree = Integer.parseInt(v);
}
if (gpuCount > 0) {
int gpuPerWorker = 1;
if (Boolean.parseBoolean(prop.getProperty("option.mpi_mode"))) {
return new String[] {"0"};
} else if ("Python".equals(engineName)) {
if (tpDegree > 0) {
gpuPerWorker = tpDegree;
int procs = gpuCount / gpuPerWorker;
if (procs == 0) {
throw new EngineException(
"GPU devices are not enough to run "
+ gpuPerWorker
+ " partitions.");
}
if (maxWorkers == null || maxWorkers < 0) {
gpuCount = procs;
} else {
gpuCount = Math.min(procs, maxWorkers);
}
}
}
String[] ret = new String[gpuCount];
for (int i = 0; i < gpuCount; ++i) {
ret[i] = String.valueOf(i * gpuPerWorker);
}
return ret;
} else if (NeuronUtils.hasNeuron()) {
int neurons = NeuronUtils.getNeuronCores();
int ncPerWorker;
if (tpDegree > 0) {
// Assume user understand TP only works on inf2
ncPerWorker = tpDegree;
int procs = neurons / ncPerWorker;
if (procs == 0) {
throw new EngineException(
"Neuron devices are not enough to run "
+ ncPerWorker
+ " partitions. Please refer to: "
+ "https://github.com/aws-neuron/transformers-neuronx#tensor-parallelism-support");
}
neurons = procs;
} else {
ncPerWorker = 1;
}
String[] ret = new String[neurons];
for (int i = 0; i < neurons; ++i) {
ret[i] = "nc" + (i * ncPerWorker);
}
return ret;
}
} else if (!loadOnDevices.isEmpty()) {
return loadOnDevices.split(";");
}
return new String[] {"-1"};
}
/** {@inheritDoc} */
@Override
public boolean isParallelLoading() {
return Boolean.parseBoolean(prop.getProperty("option.parallel_loading"));
}
private static long getFileSize(Path path) {
try {
if (Files.isRegularFile(path) && !Files.isHidden(path)) {
return Files.size(path);
}
return 0;
} catch (IOException e) {
logger.warn("Failed to get size of: {}", path, e);
}
return 0L;
}
private long getAvailableCpuMemory() {
if (System.getProperty("os.name").startsWith("Linux")) {
try (Scanner scanner = new Scanner(Paths.get("/proc/meminfo"))) {
while (scanner.hasNext()) {
String line = scanner.nextLine();
Matcher m = PATTERN.matcher(line);
if (m.matches()) {
return Long.parseLong(m.group(1)) * 1024;
}
}
logger.warn("{}: Failed to read free memory from /proc/meminfo", uid);
} catch (IOException e) {
logger.warn("{}: Failed open /proc/meminfo file", uid, e);
}
}
return Integer.MAX_VALUE * 1024L;
}
private Path downloadS3ToDownloadDir(String s3Url) throws IOException, ModelException {
logger.info("{}: S3 url found, start downloading from {}", uid, s3Url);
// Use fixed download path to avoid repeat download
String hash = Utils.hash(s3Url);
String download = Utils.getenv("SERVING_DOWNLOAD_DIR", null);
Path parent = download == null ? Utils.getCacheDir() : Paths.get(download);
parent = parent.resolve("download");
Path downloadModelDir = parent.resolve(hash);
if (Files.exists(downloadModelDir)) {
logger.info("{}: artifacts has been downloaded already: {}", uid, downloadModelDir);
} else {
Files.createDirectories(parent);
Path tmp = Files.createTempDirectory(parent, "tmp");
try {
runS3cmd(s3Url, tmp.toAbsolutePath().toString());
Utils.moveQuietly(tmp, downloadModelDir);
logger.info("{}: Download completed! Files saved to {}", uid, downloadModelDir);
} finally {
Utils.deleteQuietly(tmp);
}
}
return downloadModelDir;
}
void downloadS3() throws ModelException, IOException {
String modelId = prop.getProperty("option.model_id");
if (modelId == null) {
return;
}
if (modelId.startsWith("s3://")) {
this.downloadDir = downloadS3ToDownloadDir(modelId);
} else if (modelId.startsWith("djl://")) {
logger.info("{}: djl model zoo url found: {}", uid, modelId);
modelUrl = modelId;
// download real model from model zoo
downloadModel();
}
}
private void runS3cmd(String src, String dest) throws ModelException {
try {
String[] commands;
if (Files.exists(Paths.get("/opt/djl/bin/s5cmd"))) {
if (!src.endsWith("*")) {
if (src.endsWith("/")) {
src = src + '*';
} else {
src = src + "/*";
}
}
commands =
new String[] {
"/opt/djl/bin/s5cmd", "--retry-count", "1", "sync", src, dest
};
} else {
logger.info("s5cmd is not installed, using aws cli");
if (Boolean.parseBoolean(
Utils.getEnvOrSystemProperty("DJL_TEST_S3_NO_CREDENTIALS"))) {
logger.info("Skipping s3 credentials");
commands = new String[] {"aws", "s3", "sync", "--no-sign-request", src, dest};
} else {
commands = new String[] {"aws", "s3", "sync", src, dest};
}
}
Process exec = new ProcessBuilder(commands).redirectErrorStream(true).start();
String logOutput;
try (InputStream is = exec.getInputStream()) {
logOutput = Utils.toString(is);
}
int exitCode = exec.waitFor();
if (0 != exitCode || logOutput.startsWith("ERROR ")) {
logger.error("Download error: {}", logOutput);
throw new EngineException("Download model failed.");
} else {
logger.debug(logOutput);
}
} catch (IOException | InterruptedException e) {
throw new ModelNotFoundException("Model failed to download from s3", e);
}
}
private void downloadDraftModel() throws ModelException, IOException {
String draftModelId = prop.getProperty("option.speculative_draft_model");
if (draftModelId != null && draftModelId.startsWith("s3://")) {
Path draftDownloadDir = downloadS3ToDownloadDir(draftModelId);
prop.setProperty(
"option.speculative_draft_model", draftDownloadDir.toAbsolutePath().toString());
}
}
private static int intValue(Properties prop, String key, int defValue) {
String value = prop.getProperty(key);
if (value == null) {
return defValue;
}
return Integer.parseInt(value);
}
protected class ModelThread extends ThreadConfig<I, O> {
private Predictor<I, O> predictor;
ZooModel<I, O> model;
protected ModelThread(Device device) {
super(device);
model = getModel(device);
predictor = model.newPredictor();
boolean logModelMetric = Boolean.parseBoolean(model.getProperty("log_request_metric"));
if (logModelMetric) {
int metricsAggregation = model.intProperty("metrics_aggregation", 1000);
Metrics metrics = new Metrics();
metrics.setLimit(metricsAggregation);
metrics.setOnLimit(
(m, s) -> {
MODEL_METRIC.info("{}", m.percentile(s, 50));
MODEL_METRIC.info("{}", m.percentile(s, 90));
});
predictor.setMetrics(metrics);
}
synchronized (this) {
for (Map.Entry<String, Adapter> adapter : adapters.entrySet()) {
configJobs.add(adapter.getValue().registerJob(ModelInfo.this, this).getJob());
}
}
}
@Override
@SuppressWarnings("unchecked")
public void run(List<Job<I, O>> jobs) throws TranslateException {
List<Job<I, O>> validJobs = new ArrayList<>(jobs.size());
for (Job<I, O> job : jobs) {
if (job.getInput() instanceof Input) {
Input i = (Input) job.getInput();
if (i.isCancelled()) {
logger.debug("Skip cancelled job");
continue;
}
if (i.getContent().contains("adapter")) {
String adapter = i.getAsString("adapter");
if (!dynamicAdapters && !adapters.containsKey(adapter)) {
String failMessage =
"The adapter " + adapter + " has not been registered";
Job.setFailOutput((Job<Input, Output>) job, 503, failMessage);
continue;
}
}
}
validJobs.add(job);
}
if (!validJobs.isEmpty()) {
Job.runAll(validJobs, js -> predictor.batchPredict(js));
}
}
/**
* Returns the predictor.
*
* @return the predictor
*/
public Predictor<I, O> getPredictor() {
return predictor;
}
/** {@inheritDoc} */
@Override
public void close() {
predictor.close();
}
}
}
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/PermanentBatchAggregator.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.serving.wlm.util.WorkerJob;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.LinkedBlockingDeque;
/**
* a batch aggregator that never terminates by itself. the surrounding thread has to be interrupted
* by sending an interrupt signal.
*
* @author erik.bamberg@web.de
*/
public class PermanentBatchAggregator<I, O> extends BatchAggregator<I, O> {
private static final Logger logger = LoggerFactory.getLogger(PermanentBatchAggregator.class);
/**
* Constructs a {@code PermanentBatchAggregator} instance.
*
* @param wpc the workerPoolConfig to use.
* @param jobQueue the job queue for polling data from.
*/
public PermanentBatchAggregator(
WorkerPoolConfig<I, O> wpc, LinkedBlockingDeque<WorkerJob<I, O>> jobQueue) {
super(wpc, jobQueue);
}
/** {@inheritDoc} */
@Override
protected List<WorkerJob<I, O>> pollBatch() throws InterruptedException {
List<WorkerJob<I, O>> list = new ArrayList<>(batchSize);
WorkerJob<I, O> wj = jobQueue.take();
list.add(wj);
drainTo(list, maxBatchDelayMicros);
logger.trace("sending jobs, size: {}", list.size());
return list;
}
/** {@inheritDoc} */
@Override
public boolean isFinished() {
return false;
}
}
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/PyAdapter.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.inference.Predictor;
import ai.djl.modality.Input;
import ai.djl.modality.Output;
import ai.djl.translate.TranslateException;
import java.util.Map;
/** An overload of {@link Adapter} for the python engine. */
public class PyAdapter extends Adapter {
/**
* Constructs an {@link Adapter}.
*
* @param name the adapter name
* @param src the adapter src
* @param options additional adapter options
*/
protected PyAdapter(String name, String src, Map<String, String> options) {
super(name, src, options);
}
@SuppressWarnings("unchecked")
@Override
protected void registerPredictor(Predictor<?, ?> predictor) {
Predictor<Input, Output> p = (Predictor<Input, Output>) predictor;
Input input = new Input();
input.addProperty("handler", "register_adapter");
input.addProperty("name", name);
input.addProperty("src", src);
for (Map.Entry<String, String> entry : options.entrySet()) {
input.add(entry.getKey(), entry.getValue());
}
try {
p.predict(input);
} catch (TranslateException e) {
throw new IllegalStateException(e);
}
}
@SuppressWarnings("unchecked")
@Override
protected void unregisterPredictor(Predictor<?, ?> predictor) {
Predictor<Input, Output> p = (Predictor<Input, Output>) predictor;
Input input = new Input();
input.addProperty("handler", "unregister_adapter");
input.addProperty("name", name);
try {
p.predict(input);
} catch (TranslateException e) {
throw new IllegalStateException(e);
}
}
}
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/SageMakerUtils.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.ModelException;
import ai.djl.repository.zoo.ModelNotFoundException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.Yaml;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Map;
import java.util.Properties;
/** A utility class to detect optimal engine for SageMaker saved model. */
public final class SageMakerUtils {
private static final Logger logger = LoggerFactory.getLogger(SageMakerUtils.class);
private static final String FRAMEWORK = "Framework";
private static final String FRAMEWORK_VERSION = "FrameworkVersion";
private static final String INFERENCE_SPEC = "InferenceSpec";
private static final String INFERENCE_SPEC_HMAC = "InferenceSpecHMAC";
private static final String METADATA = "metadata.yaml";
private static final String MODEL = "Model";
private static final String MODEL_TYPE = "ModelType";
private static final String PYTORCH_MODEL_TYPE = "PyTorchModel";
private static final String SCHEMA = "Schema";
private static final String SCHEMA_HMAC = "SchemaHMAC";
private static final String TASK = "Task";
private static final String VERSION = "Version";
private static final String XGBOOST_MODEL_TYPE = "XGBoostModel";
private SageMakerUtils() {}
static String inferSageMakerEngine(ModelInfo<?, ?> modelInfo) throws ModelException {
Properties prop = modelInfo.prop;
Path modelDir = modelInfo.modelDir;
Path metaDataFile = modelDir.resolve(METADATA);
// Load metadata information from metadata.yaml
Map<String, Object> metaDataMap;
Yaml metadata = new Yaml();
try (InputStream inputStream = Files.newInputStream(metaDataFile.toAbsolutePath())) {
metaDataMap = metadata.load(inputStream);
logger.info("Successfully loaded metadata.yaml");
} catch (IOException fileNotFoundException) {
logger.error("Cannot find valid metadata.yaml: {}", metaDataFile);
throw new ModelNotFoundException(
"Invalid metadata destination: " + metaDataFile, fileNotFoundException);
}
if (!validateMetaData(metaDataMap)) {
// If metadata is not a valid format, try native DJL serving infer engine.
return null;
}
// To validate both schema and inferenceSpec
boolean customizedSchema = hasCustomizedSchema(metaDataMap);
boolean customizedInferenceSpec = hasCustomizedInferenceSpec(metaDataMap);
if (customizedSchema || customizedInferenceSpec) {
// For either customized schema or customized inference spec using python engine with
// sagemaker entry point
logger.info(
"Customized schema builder or inference spec is detected, using python"
+ " backend");
prop.setProperty("option.entryPoint", "djl_python.sagemaker");
return "Python";
}
if (XGBOOST_MODEL_TYPE.equals(metaDataMap.get(MODEL_TYPE))) {
prop.setProperty("option.modelName", metaDataMap.get(MODEL).toString());
return "XGBoost";
} else if (PYTORCH_MODEL_TYPE.equals(metaDataMap.get(MODEL_TYPE))) {
prop.setProperty("option.modelName", metaDataMap.get(MODEL).toString());
return "PyTorch";
} else {
logger.error("Invalid model type: {}", metaDataMap.get(MODEL_TYPE));
throw new ModelException(
String.format(
"Model type %s is not supported in SageMaker model format yet",
metaDataMap.get(MODEL_TYPE).toString()));
}
}
private static boolean validateMetaData(Map<String, Object> metaDataMap) {
// Required field for metadata in SageMaker model format
return metaDataMap.containsKey(FRAMEWORK)
&& metaDataMap.containsKey(FRAMEWORK_VERSION)
&& metaDataMap.containsKey(MODEL)
&& metaDataMap.containsKey(MODEL_TYPE)
&& metaDataMap.containsKey(VERSION)
&& metaDataMap.containsKey(TASK);
}
private static boolean hasCustomizedSchema(Map<String, Object> metaDataMap)
throws ModelException {
if (metaDataMap.containsKey(SCHEMA)) {
if (!metaDataMap.containsKey(SCHEMA_HMAC)) {
throw new ModelException(
"Invalid SageMaker Model format due to SchemaHMAC is not found");
}
return true;
}
return false;
}
private static boolean hasCustomizedInferenceSpec(Map<String, Object> metaDataMap)
throws ModelException {
if (metaDataMap.containsKey(INFERENCE_SPEC)) {
if (!metaDataMap.containsKey(INFERENCE_SPEC_HMAC)) {
throw new ModelException(
"Invalid SageMaker Model format due to InferenceSpecHMAC is not found");
}
return true;
}
return false;
}
}
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/TemporaryBatchAggregator.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.serving.wlm.util.WorkerJob;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit;
/**
* a batch aggregator that terminates after a maximum idle time.
*
* @author erik.bamberg@web.de
*/
public class TemporaryBatchAggregator<I, O> extends BatchAggregator<I, O> {
private static final Logger logger = LoggerFactory.getLogger(TemporaryBatchAggregator.class);
private long idleSince;
private long maxIdleSeconds;
/**
* a batch aggregator that terminates after a maximum idle time.
*
* @param wpc the workerPoolConfig to run for.
* @param jobQueue reference to external job queue for polling.
*/
public TemporaryBatchAggregator(
WorkerPoolConfig<I, O> wpc, LinkedBlockingDeque<WorkerJob<I, O>> jobQueue) {
super(wpc, jobQueue);
this.idleSince = System.currentTimeMillis();
this.maxIdleSeconds = wpc.getMaxIdleSeconds();
}
/** {@inheritDoc} */
@Override
protected List<WorkerJob<I, O>> pollBatch() throws InterruptedException {
List<WorkerJob<I, O>> list = new ArrayList<>(batchSize);
WorkerJob<I, O> wj = jobQueue.poll(maxIdleSeconds, TimeUnit.SECONDS);
if (wj != null && wj.getJob() != null) {
list.add(wj);
drainTo(list, maxBatchDelayMicros);
logger.trace("sending jobs, size: {}", list.size());
idleSince = System.currentTimeMillis();
}
return list;
}
/** {@inheritDoc} */
@Override
public boolean isFinished() {
long idle = System.currentTimeMillis() - idleSince;
logger.trace("Temporary batch aggregator idle time (max {}s): {}ms", maxIdleSeconds, idle);
return idle > maxIdleSeconds * 1000;
}
}
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/WorkLoadManager.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.serving.wlm.util.WlmCapacityException;
import ai.djl.serving.wlm.util.WlmException;
import ai.djl.serving.wlm.util.WlmShutdownException;
import ai.djl.serving.wlm.util.WorkerJob;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Map.Entry;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingDeque;
/**
* WorkLoadManager is responsible to manage the work load of worker thread. the manage scales
* up/down the required amount of worker threads per wpc.
*
* @author erik.bamberg@web.de
*/
public class WorkLoadManager {
private static final Logger logger = LoggerFactory.getLogger(WorkLoadManager.class);
private ExecutorService threadPool;
private ConcurrentHashMap<WorkerPoolConfig<?, ?>, WorkerPool<?, ?>> workerPools;
/** Constructs a {@link WorkLoadManager} instance. */
public WorkLoadManager() {
threadPool =
Executors.newCachedThreadPool(
r -> {
Thread t = Executors.defaultThreadFactory().newThread(r);
t.setDaemon(true);
return t;
});
workerPools = new ConcurrentHashMap<>();
}
/**
* Registers a {@link WorkerPool} (model).
*
* <p>This operation is idempotent and will return the existing workerpool if the wpc was
* already registered.
*
* @param <I> the wpc input class
* @param <O> the wpc output class
* @param wpc the wpc to create the worker pool for
* @return the {@link WorkerPool}
*/
@SuppressWarnings("unchecked")
public <I, O> WorkerPool<I, O> registerWorkerPool(WorkerPoolConfig<I, O> wpc) {
return (WorkerPool<I, O>)
workerPools.computeIfAbsent(wpc, k -> new WorkerPool<>(wpc, threadPool));
}
/**
* Removes a worker pool from management.
*
* @param wpc the wpc to remove
*/
public void unregisterWorkerPool(WorkerPoolConfig<?, ?> wpc) {
WorkerPool<?, ?> pool = getWorkerPool(wpc);
if (pool.decreaseRef() <= 0) {
logger.info("Unloading model: {}", wpc);
pool.shutdownWorkers();
workerPools.remove(wpc);
}
}
/**
* Adds an inference job to the job queue of the next free worker. scales up worker if
* necessary.
*
* @param <I> the wpc input class
* @param <O> the wpc output class
* @param job an inference job to be executed.
* @return {@code true} if submit success, false otherwise.
*/
public <I, O> CompletableFuture<O> runJob(Job<I, O> job) {
CompletableFuture<O> result = new CompletableFuture<>();
WorkerPoolConfig<I, O> wpc = job.getWpc();
if (wpc.getStatus() != WorkerPoolConfig.Status.READY) {
result.completeExceptionally(new WlmException("Model is not ready: " + wpc.getId()));
return result;
}
WorkerPool<I, O> pool = getWorkerPool(wpc);
int maxWorkers = pool.getMaxWorkers();
if (maxWorkers == 0) {
result.completeExceptionally(
new WlmShutdownException(
"All model workers has been shutdown: " + wpc.getId()));
return result;
}
LinkedBlockingDeque<WorkerJob<I, O>> queue = pool.getJobQueue();
if ((queue.remainingCapacity() == 1 && pool.isAllWorkerBusy())
|| pool.isAllWorkerDied()
|| !queue.offer(new WorkerJob<>(job, result))) {
result.completeExceptionally(
new WlmCapacityException(
"Worker queue capacity exceeded for model: " + wpc.getId()));
scaleUp(pool, wpc, maxWorkers);
return result;
}
int currentWorkers = getNumRunningWorkers(wpc);
if (currentWorkers == 0
|| currentWorkers < maxWorkers && queue.size() > wpc.getBatchSize() * 2) {
scaleUp(pool, wpc, maxWorkers);
}
return result;
}
private <I, O> void scaleUp(WorkerPool<I, O> pool, WorkerPoolConfig<I, O> wpc, int maxWorkers) {
synchronized (pool) {
int currentWorkers = getNumRunningWorkers(wpc); // check again
if (currentWorkers < maxWorkers) {
logger.info("Scaling up workers for model {} to {} ", wpc, currentWorkers + 1);
pool.addThreads();
}
}
}
/**
* Returns the number of running workers of a wpc. running workers are workers which are not
* stopped, in error or scheduled to scale down.
*
* @param wpc the wpc we are interested in.
* @return number of running workers.
*/
public int getNumRunningWorkers(WorkerPoolConfig<?, ?> wpc) {
int numWorking = 0;
WorkerPool<?, ?> pool = workerPools.get(wpc);
if (pool != null) {
pool.cleanup();
List<? extends WorkerThread<?, ?>> threads = pool.getWorkers();
for (WorkerThread<?, ?> thread : threads) {
if ((thread.getState() != WorkerState.WORKER_STOPPED)
&& (thread.getState() != WorkerState.WORKER_ERROR)
&& (thread.getState() != WorkerState.WORKER_SCALED_DOWN)) {
++numWorking;
}
}
}
return numWorking;
}
/**
* Returns the {@link WorkerPool} for a wpc.
*
* @param <I> the wpc class
* @param <O> the wpc class
* @param id the wpc id
* @return the {@link WorkerPool}
*/
@SuppressWarnings("unchecked")
public <I, O> WorkerPool<I, O> getWorkerPoolById(String id) {
for (Entry<WorkerPoolConfig<?, ?>, WorkerPool<?, ?>> wp : workerPools.entrySet()) {
if (id.equals(wp.getKey().getId())) {
return (WorkerPool<I, O>) wp.getValue();
}
}
return null;
}
/**
* Returns the {@link WorkerPool} for a model.
*
* @param <I> the wpc input class
* @param <O> the wpc output class
* @param wpc the worker type to get the worker pool for
* @return the {@link WorkerPool}
*/
@SuppressWarnings("unchecked")
public <I, O> WorkerPool<I, O> getWorkerPool(WorkerPoolConfig<I, O> wpc) {
return (WorkerPool<I, O>) workerPools.get(wpc);
}
/** Close all wpcs related to the {@code WorkloadManager}. */
public void close() {
threadPool.shutdownNow();
for (WorkerPool<?, ?> wp : workerPools.values()) {
wp.shutdown();
}
workerPools.clear();
}
}
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/WorkerGroup.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.Device;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutorService;
/** The {@link WorkerGroup} manages the {@link WorkerPool} for a particular {@link Device}. */
public class WorkerGroup<I, O> {
private WorkerPool<I, O> workerPool;
private Device device;
private int minWorkers;
int maxWorkers;
List<WorkerThread<I, O>> workers;
WorkerGroup(WorkerPool<I, O> workerPool, Device device) {
this.workerPool = workerPool;
this.device = device;
workers = new CopyOnWriteArrayList<>();
WorkerPoolConfig<I, O> wpc = workerPool.getWpc();
// Default workers from worker type, may be overridden by configureWorkers on init or scale
minWorkers = wpc.getMinWorkers(device);
maxWorkers = wpc.getMaxWorkers(device);
minWorkers = Math.min(minWorkers, maxWorkers);
}
/**
* Returns the device of the worker group.
*
* @return the device of the worker group
*/
public Device getDevice() {
return device;
}
/**
* Returns a list of workers.
*
* @return a list of workers
*/
public List<WorkerThread<I, O>> getWorkers() {
return workers;
}
/**
* Returns the min number of workers for the model and device.
*
* @return the min number of workers for the model and device
*/
public int getMinWorkers() {
return minWorkers;
}
/**
* Returns the max number of workers for the model and device.
*
* @return the max number of workers for the model and device
*/
public int getMaxWorkers() {
return maxWorkers;
}
/**
* Configures minimum and maximum number of workers.
*
* @param minWorkers the minimum number of workers
* @param maxWorkers the maximum number of workers
*/
public void configureWorkers(int minWorkers, int maxWorkers) {
if (minWorkers >= 0) {
this.minWorkers = minWorkers;
}
if (maxWorkers >= 0) {
this.maxWorkers = maxWorkers;
}
}
void addThreads(int count, boolean permanent) {
WorkerPoolConfig<I, O> wpc = workerPool.getWpc();
ExecutorService threadPool = workerPool.getThreadPool();
for (int i = 0; i < count; ++i) {
WorkerThread<I, O> thread =
WorkerThread.builder(wpc)
.setDevice(device)
.setJobQueue(workerPool.getJobQueue())
.optFixPoolThread(permanent)
.build();
workers.add(thread);
threadPool.submit(thread);
}
}
}
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/WorkerPool.java
|
/*
* Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.Device;
import ai.djl.ModelException;
import ai.djl.serving.wlm.util.WorkerJob;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
/**
* Manages the work load for a single model.
*
* @author erik.bamberg@web.de
*/
public class WorkerPool<I, O> {
private static final Logger logger = LoggerFactory.getLogger(WorkerPool.class);
private final WorkerPoolConfig<I, O> wpc;
private ExecutorService threadPool;
private Map<Device, WorkerGroup<I, O>> workerGroups;
private LinkedBlockingDeque<WorkerJob<I, O>> jobQueue;
private AtomicInteger refCnt;
/**
* Construct and initial data structure.
*
* @param wpc the model this WorkerPool belongs to
* @param threadPool the thread pool executor
*/
WorkerPool(WorkerPoolConfig<I, O> wpc, ExecutorService threadPool) {
this.wpc = wpc;
this.threadPool = threadPool;
workerGroups = new ConcurrentHashMap<>();
refCnt = new AtomicInteger(1);
}
/** Increases the reference count. */
public void increaseRef() {
refCnt.incrementAndGet();
}
/**
* Decrease the reference count and return the current count.
*
* @return the current count
*/
public int decreaseRef() {
return refCnt.decrementAndGet();
}
/**
* Returns the model of the worker pool.
*
* @return the model of the worker pool
*/
public WorkerPoolConfig<I, O> getWpc() {
return wpc;
}
ExecutorService getThreadPool() {
return threadPool;
}
/**
* Returns a map of {@code WorkerGroup}.
*
* @return a map of {@code WorkerGroup}
*/
public Map<Device, WorkerGroup<I, O>> getWorkerGroups() {
return workerGroups;
}
/**
* Returns a list of worker thread.
*
* @return the workers
*/
public List<WorkerThread<I, O>> getWorkers() {
return workerGroups.values().stream()
.flatMap(g -> g.workers.stream())
.collect(Collectors.toList());
}
/**
* Returns the {@code JobQueue} for this model.
*
* @return the jobQueue
*/
public LinkedBlockingDeque<WorkerJob<I, O>> getJobQueue() {
return jobQueue;
}
/**
* Returns the maximum number of workers for a model across all devices.
*
* @return the maximum number of workers for a model across all devices
*/
public int getMaxWorkers() {
return workerGroups.values().stream().mapToInt(g -> g.maxWorkers).reduce(0, Integer::sum);
}
/**
* Return if all workers died.
*
* @return true if all workers died
*/
public boolean isAllWorkerDied() {
for (WorkerGroup<I, O> group : workerGroups.values()) {
for (WorkerThread<?, ?> thread : group.getWorkers()) {
if (thread.isRunning()) {
return false;
}
}
}
return true;
}
/**
* Returns {@code true} if all workers are busy.
*
* @return {@code true} if all workers are busy
*/
public boolean isAllWorkerBusy() {
for (WorkerGroup<I, O> group : workerGroups.values()) {
for (WorkerThread<?, ?> thread : group.getWorkers()) {
if (thread.getState() == WorkerState.WORKER_STARTED) {
return false;
}
}
}
return true;
}
/**
* Returns if the worker groups is fully scaled.
*
* @return true if the worker groups is fully scaled
*/
public boolean isFullyScaled() {
for (WorkerGroup<I, O> group : workerGroups.values()) {
if (group.getMinWorkers() > group.getWorkers().size()) {
return false;
}
}
return true;
}
/**
* Initializes new worker capacities for this model.
*
* @param deviceName the device for the model, null for default devices
*/
public void initWorkers(String deviceName) {
initWorkers(deviceName, -1, -1);
}
/**
* Initializes new worker capacities for this model.
*
* @param deviceName the device for the model, null for default devices
* @param minWorkers minimum amount of workers (-1 for model default).
* @param maxWorkers maximum amount of workers (-1 for model default).
*/
public void initWorkers(String deviceName, int minWorkers, int maxWorkers) {
Device device;
synchronized (wpc) {
try {
wpc.initialize();
device = wpc.withDefaultDevice(deviceName);
logger.info("loading model {} on {} ...", wpc, device);
wpc.load(device);
} catch (ModelException | IOException e) {
throw new CompletionException(e);
}
if (wpc.getStatus() != WorkerPoolConfig.Status.READY) {
logger.warn("Cannot scale workers while model is not READY: {}", wpc);
}
}
// jobQueue should be initialized after model is configure
if (jobQueue == null) {
jobQueue = new LinkedBlockingDeque<>(wpc.getQueueSize());
}
cleanup();
WorkerGroup<I, O> group =
workerGroups.computeIfAbsent(device, d -> new WorkerGroup<>(this, d));
group.configureWorkers(minWorkers, maxWorkers);
doScaleWorker(group);
log();
}
/**
* Sets new worker capacities for this model.
*
* @param deviceName the device for the model, null for all loaded devices
* @param minWorkers minimum amount of workers.
* @param maxWorkers maximum amount of workers.
*/
public void scaleWorkers(String deviceName, int minWorkers, int maxWorkers) {
if (deviceName != null) {
// if the model has not been loaded on device, this will load the model
initWorkers(deviceName, minWorkers, maxWorkers);
return;
}
cleanup();
// scale for all devices
for (WorkerGroup<I, O> group : workerGroups.values()) {
group.configureWorkers(minWorkers, maxWorkers);
doScaleWorker(group);
}
log();
}
private void doScaleWorker(WorkerGroup<I, O> group) {
int minWorkers = group.getMinWorkers();
List<WorkerThread<I, O>> fixedPoolThreads = new ArrayList<>();
for (WorkerThread<I, O> threads : group.getWorkers()) {
if (threads.isFixPoolThread()) {
fixedPoolThreads.add(threads);
}
}
int activeThreads = fixedPoolThreads.size();
if (activeThreads < minWorkers) {
// scale up the fixed pool
logger.info(
"scaling up min workers by {} (from {} to {}) workers. Total range is min {} to"
+ " max {}",
minWorkers - activeThreads,
activeThreads,
minWorkers,
minWorkers,
group.getMaxWorkers());
group.addThreads(minWorkers - activeThreads, true);
} else {
// scale down the fixed pool
logger.info("scale down from workers {} to {}", activeThreads, minWorkers);
fixedPoolThreads
.subList(minWorkers, activeThreads)
.forEach(t -> t.shutdown(WorkerState.WORKER_SCALED_DOWN));
}
}
/** Shutdown all works. */
public void shutdownWorkers() {
synchronized (wpc) {
List<WorkerThread<I, O>> threads = getWorkers();
for (WorkerThread<I, O> thread : threads) {
thread.shutdown(WorkerState.WORKER_SCALED_DOWN);
}
threads.clear();
}
}
/** removes all stopped workers and workers in state error from the pool. */
public void cleanup() {
for (WorkerGroup<I, O> group : workerGroups.values()) {
group.workers.removeIf(WorkerThread::isStale);
}
}
/** Shuts down all the worker threads in the work pool. */
public void shutdown() {
wpc.close();
for (WorkerGroup<I, O> group : workerGroups.values()) {
for (WorkerThread<I, O> worker : group.workers) {
worker.shutdown(WorkerState.WORKER_STOPPED);
}
}
workerGroups.clear();
if (jobQueue != null) {
for (WorkerJob<I, O> wj : jobQueue) {
wj.getFuture().cancel(true);
}
}
}
/**
* Adds temporary threads across existing devices.
*
* <p>Only supports temporary threads because permanent threads are managed per-device, so it
* doesn't need a multi-device version.
*/
void addThreads() {
// Add threads to devices which has most room to grow
List<WorkerGroup<I, O>> sorted = new ArrayList<>(workerGroups.values());
if (sorted.isEmpty()) {
logger.warn("No worker pool available.");
return;
}
sorted.sort(Comparator.comparingInt(p -> p.getMaxWorkers() - p.workers.size()));
WorkerGroup<I, O> group = sorted.get(sorted.size() - 1);
if (group.getMaxWorkers() > group.workers.size()) {
group.addThreads(1, false);
}
}
/**
* Logs the current state of this {@code WorkerPool} when level "Debug" is enabled.
*
* <p>Logs all thread-ids in the pool.
*/
private void log() {
if (logger.isDebugEnabled()) {
StringBuffer buf = new StringBuffer();
getWorkers()
.forEach(
w -> {
buf.append(w.getWorkerId());
if (w.isFixPoolThread()) {
buf.append("-fixedPool\n");
} else {
buf.append("-tmpPool\n");
}
});
logger.debug("worker pool for model {}:\n {}", wpc, buf);
}
}
}
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/WorkerPoolConfig.java
|
/*
* Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.Device;
import ai.djl.ModelException;
import ai.djl.repository.zoo.ModelNotFoundException;
import ai.djl.serving.wlm.util.AutoIncIdGenerator;
import ai.djl.translate.TranslateException;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.LinkedBlockingDeque;
/**
* A {@link WorkerPoolConfig} represents a task that could be run in the {@link WorkLoadManager}.
*
* <p>Each {@link WorkerThread} (also {@link WorkerPool} and {@link WorkerGroup}) focuses on
* executing a single worker type. They contain the configuration for the thread, any persistent
* data, and the code to run on the thread.
*
* @param <I> the input type
* @param <O> the output type
*/
public abstract class WorkerPoolConfig<I, O> {
private static final AutoIncIdGenerator ID_GEN = new AutoIncIdGenerator("M-");
protected transient String id;
protected transient String uid;
protected String version;
protected String modelUrl;
protected int queueSize;
protected int batchSize;
protected int maxBatchDelayMillis;
protected int maxIdleSeconds;
protected Integer minWorkers; // Integer so it becomes null when parsed from JSON
protected Integer maxWorkers; // Integer so it becomes null when parsed from JSON
protected WorkerPoolConfig() {
uid = ID_GEN.generate();
}
/**
* Loads the worker type to the specified device.
*
* @param device the device to load worker type on
* @throws IOException if failed to read worker type file
* @throws ModelException if failed to load the specified model
*/
public abstract void load(Device device) throws ModelException, IOException;
/**
* Starts a new {@link WorkerThread} for this {@link WorkerPoolConfig}.
*
* @param device the device to run on
* @return the new {@link ThreadConfig}
*/
public abstract ThreadConfig<I, O> newThread(Device device);
/**
* Initialize the worker.
*
* @throws IOException if failed to download worker
* @throws ModelNotFoundException if model not found
*/
public abstract void initialize() throws IOException, ModelException;
/** Close all loaded workers. */
public abstract void close();
/**
* Returns the default device for this model if device is null.
*
* @param deviceName the device to use if it is not null
* @return a non-null device
*/
public Device withDefaultDevice(String deviceName) {
return Device.fromName(deviceName);
}
/**
* Returns the worker type loading status.
*
* @return the worker type loading status
*/
public abstract Status getStatus();
/**
* Returns if the worker type can be load parallel on multiple devices.
*
* @return if the worker type can be load parallel on multiple devices
*/
public abstract boolean isParallelLoading();
/**
* Returns the devices the worker type will be loaded on at startup.
*
* @return the devices the worker type will be loaded on at startup
*/
public abstract String[] getLoadOnDevices();
/**
* Sets the worker configs ID.
*
* @param id the worker configs ID
*/
public void setId(String id) {
this.id = id;
}
/**
* Returns the worker configs unique ID.
*
* @return the worker configs unique ID
*/
public String getUid() {
return uid;
}
/**
* Returns the worker configs ID.
*
* @return the worker configs ID
*/
public String getId() {
return id;
}
/**
* Returns the worker type version.
*
* @return the worker type version
*/
public String getVersion() {
return version;
}
/**
* Returns the worker type url.
*
* @return the worker type url
*/
public String getModelUrl() {
return modelUrl;
}
/**
* Sets the configured max idle time in seconds of workers.
*
* @param maxIdleSeconds the configured max idle time in seconds of workers
*/
public void setMaxIdleSeconds(int maxIdleSeconds) {
this.maxIdleSeconds = maxIdleSeconds;
}
/**
* Returns the configured max idle time in seconds of workers.
*
* @return the max idle time in seconds
*/
public int getMaxIdleSeconds() {
return maxIdleSeconds;
}
/**
* Sets the configured batch size.
*
* @param batchSize the configured batch size
*/
public void setBatchSize(int batchSize) {
this.batchSize = batchSize;
}
/**
* Returns the configured batch size.
*
* @return the configured batch size
*/
public int getBatchSize() {
return batchSize;
}
/**
* Sets the maximum delay in milliseconds to aggregate a batch.
*
* @param maxBatchDelayMillis the maximum delay in milliseconds to aggregate a batch
*/
public void setMaxBatchDelayMillis(int maxBatchDelayMillis) {
this.maxBatchDelayMillis = maxBatchDelayMillis;
}
/**
* Returns the maximum delay in milliseconds to aggregate a batch.
*
* @return the maximum delay in milliseconds to aggregate a batch
*/
public int getMaxBatchDelayMillis() {
return maxBatchDelayMillis;
}
/**
* Sets the configured size of the workers queue.
*
* @param queueSize the configured size of the workers queue
*/
public void setQueueSize(int queueSize) {
this.queueSize = queueSize;
}
/**
* Returns the configured size of the workers queue.
*
* @return requested size of the workers queue.
*/
public int getQueueSize() {
return queueSize;
}
/**
* Sets the starting number of min workers.
*
* @param minWorkers Sets the starting number of min workers
*/
public void setMinWorkers(int minWorkers) {
if (maxWorkers != null && maxWorkers < minWorkers) {
throw new IllegalArgumentException(
"The max workers for a model or worker can't be smaller than the min workers");
}
this.minWorkers = minWorkers;
}
/**
* Returns the minimum number of workers.
*
* @param device the device to get the min workers for
* @return the minimum number of workers
*/
public int getMinWorkers(Device device) {
return minWorkers;
}
/**
* Sets the starting number of max workers.
*
* @param maxWorkers Sets the starting number of max workers
*/
public void setMaxWorkers(int maxWorkers) {
if (minWorkers != null && maxWorkers < minWorkers) {
throw new IllegalArgumentException(
"The max workers for a model or worker can't be smaller than the min workers");
}
if (maxWorkers == 0) {
throw new IllegalArgumentException("Models must have a maxWorkers greater than 0");
}
this.maxWorkers = maxWorkers;
}
/**
* Returns the maximum number of workers.
*
* @param device the device to get the max workers for
* @return the maximum number of workers
*/
public int getMaxWorkers(Device device) {
return maxWorkers;
}
/**
* Sets the starting minimum and maximum number of workers.
*
* @param minWorkers the new minimum number of workers
* @param maxWorkers the new maximum number of workers
*/
public void setMinMaxWorkers(int minWorkers, int maxWorkers) {
if (maxWorkers < minWorkers) {
throw new IllegalArgumentException(
"The max workers for a model or worker can't be smaller than the min workers");
}
if (minWorkers == 0) {
throw new IllegalArgumentException(
"Having a minWorkers of 0 is not currently supported");
}
if (maxWorkers == 0) {
throw new IllegalArgumentException("Models must have a maxWorkers greater than 0");
}
this.minWorkers = minWorkers;
this.maxWorkers = maxWorkers;
}
/** {@inheritDoc} */
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof WorkerPoolConfig)) {
return false;
}
WorkerPoolConfig<?, ?> wpc = (WorkerPoolConfig<?, ?>) o;
return id.equals(wpc.id) && Objects.equals(version, wpc.version);
}
/** {@inheritDoc} */
@Override
public int hashCode() {
return Objects.hash(id, version);
}
/** {@inheritDoc} */
@Override
public String toString() {
if (version != null) {
return id + ':' + version + " (" + uid + ", " + getStatus() + ')';
}
return id + " (" + uid + ", " + getStatus() + ')';
}
/** An enum represents state of a worker type. */
public enum Status {
PENDING,
READY,
FAILED
}
/**
* The part of the {@link WorkerPoolConfig} for an individual {@link WorkerThread}.
*
* @param <I> the input type
* @param <O> the output type
*/
public abstract static class ThreadConfig<I, O> {
private Device device;
protected LinkedBlockingDeque<Job<I, O>> configJobs;
protected ThreadConfig(Device device) {
this.device = device;
configJobs = new LinkedBlockingDeque<>();
}
/**
* Runs the work on the {@link WorkerThread} and stores in the job.
*
* @param inputs the work input
* @throws TranslateException if it failed to compute
*/
public abstract void run(List<Job<I, O>> inputs) throws TranslateException;
/**
* Gets the configuration jobs for the worker.
*
* @return the configuration jobs for the worker
*/
public LinkedBlockingDeque<Job<I, O>> getConfigJobs() {
return configJobs;
}
/** Closes the thread type and frees any resources. */
public abstract void close();
}
}
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/WorkerState.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
/** An enum represents state of a worker. */
public enum WorkerState {
WORKER_STARTED,
WORKER_MODEL_LOADED,
WORKER_STOPPED,
WORKER_BUSY,
WORKER_ERROR,
WORKER_SCALED_DOWN
}
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/WorkerThread.java
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm;
import ai.djl.Device;
import ai.djl.serving.wlm.WorkerPoolConfig.ThreadConfig;
import ai.djl.serving.wlm.util.AutoIncIdGenerator;
import ai.djl.serving.wlm.util.WlmException;
import ai.djl.serving.wlm.util.WorkerJob;
import ai.djl.translate.TranslateException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
/** The {@link WorkerThread} is the worker managed by the {@link WorkLoadManager}. */
public final class WorkerThread<I, O> implements Runnable {
private static final Logger logger = LoggerFactory.getLogger(WorkerThread.class);
private static final AutoIncIdGenerator ID_GEN = new AutoIncIdGenerator("WT-");
private ThreadConfig<I, O> threadConfig;
private AtomicBoolean running = new AtomicBoolean(true);
private LinkedBlockingDeque<WorkerJob<I, O>> configJobs;
private BatchAggregator<I, O> aggregator;
private Device device;
private AtomicReference<Thread> currentThread = new AtomicReference<>();
private WorkerState state;
private String workerId;
private long startTime;
private boolean fixPoolThread;
private long stateChangeTime;
/**
* Builds a workerThread with this builder.
*
* @param builder build a new worker thread using this builder.
*/
private WorkerThread(Builder<I, O> builder) {
this.aggregator = builder.aggregator;
this.workerId = ID_GEN.generate();
this.startTime = System.currentTimeMillis();
this.fixPoolThread = builder.fixPoolThread;
this.device = builder.device;
threadConfig = builder.workerPoolConfig.newThread(device);
configJobs = new LinkedBlockingDeque<>();
logger.info(
"Starting worker thread {} for model {} on device {}",
workerId,
builder.workerPoolConfig,
device);
}
/** {@inheritDoc} */
@Override
public void run() {
Thread thread = Thread.currentThread();
thread.setName(workerId);
currentThread.set(thread);
this.state = WorkerState.WORKER_STARTED;
List<Job<I, O>> req = null;
String errorMessage = "Worker shutting down";
try {
runAllConfigJobs(); // Run initial config jobs
while (isRunning() && !aggregator.isFinished()) {
req = aggregator.getRequest();
if (req != null && !req.isEmpty()) {
state = WorkerState.WORKER_BUSY;
runAllConfigJobs(); // Run new config jobs
try {
runJobs(req);
aggregator.sendResponse();
} catch (TranslateException e) {
logger.warn("{}: Failed to predict", workerId, e);
aggregator.sendError(e);
} finally {
state = WorkerState.WORKER_STARTED;
}
}
req = null;
}
} catch (InterruptedException e) {
logger.debug("Shutting down worker thread {} .. Scaling down.", workerId);
} catch (Throwable t) {
logger.error("{}: Server error", workerId, t);
errorMessage = t.getMessage();
} finally {
logger.debug(
"Shutting down the worker thread {} .. {}",
workerId,
currentThread.get().getName());
currentThread.set(null);
shutdown(WorkerState.WORKER_STOPPED);
if (req != null) {
Exception e = new WlmException(errorMessage);
aggregator.sendError(e);
}
}
}
private void runAllConfigJobs() throws TranslateException {
while (!threadConfig.getConfigJobs().isEmpty()) {
// Run base worker pool configurations if present
runConfigJob(threadConfig.getConfigJobs().pop());
}
while (!configJobs.isEmpty()) {
// Run thread config jobs if present
runConfigJob(configJobs.pop().getJob());
}
}
private O runConfigJob(Job<I, O> configJob) throws TranslateException {
runJobs(Collections.singletonList(configJob));
return configJob.getOutput();
}
private void runJobs(List<Job<I, O>> input) throws TranslateException {
Map<Optional<JobFunction<I, O>>, List<Job<I, O>>> jobs =
input.stream().collect(Collectors.groupingBy(Job::getRunner));
for (Map.Entry<Optional<JobFunction<I, O>>, List<Job<I, O>>> fjob : jobs.entrySet()) {
if (fjob.getKey().isPresent()) {
Job.runAll(fjob.getValue(), fjob.getKey().get());
} else {
threadConfig.run(fjob.getValue());
}
}
}
/**
* Returns the worker thread ID.
*
* @return the worker thread ID
*/
public String getWorkerId() {
return workerId;
}
/**
* Returns the worker thread ID (number without prefix).
*
* @return the worker thread ID (number without prefix)
*/
public int getWorkerIdNum() {
return ID_GEN.stripPrefix(workerId);
}
/**
* Returns the {@link WorkerPoolConfig}'s {@link ThreadConfig} for this thread.
*
* @return the {@link WorkerPoolConfig}'s {@link ThreadConfig} for this thread
*/
public ThreadConfig<I, O> getThreadType() {
return threadConfig;
}
/**
* Returns true if the worker thread is running.
*
* @return true if the worker thread is running
*/
public boolean isRunning() {
return running.get();
}
/**
* Returns the device used by the thread.
*
* @return the device used by the thread
*/
public Device getDevice() {
return device;
}
/**
* Returns the thread start time.
*
* @return the thread start time
*/
public long getStartTime() {
return startTime;
}
/**
* Returns the worker state.
*
* @return the worker state
*/
public WorkerState getState() {
return state;
}
/**
* Shuts down the worker thread.
*
* @param state the state to set the thread to
*/
public void shutdown(WorkerState state) {
running.set(false);
setState(state);
Thread thread = currentThread.getAndSet(null);
if (thread != null) {
thread.interrupt();
Exception e = new WlmException("Worker shutting down");
aggregator.sendError(e);
}
logger.info("Shutting down temporary worker {}", workerId);
threadConfig.close();
}
/**
* Adds a configuration job to this thread.
*
* @param wj the configuration job to add
*/
public void addConfigJob(WorkerJob<I, O> wj) {
configJobs.add(wj);
}
void setState(WorkerState newState) {
logger.debug("Worker thread {} has state change: {} -> {}", workerId, state, newState);
if (state != WorkerState.WORKER_SCALED_DOWN) {
// Don't update the state if it was terminated on purpose.. Scaling in..
this.state = newState;
stateChangeTime = System.currentTimeMillis();
}
}
boolean isStale() {
return (state == WorkerState.WORKER_STOPPED
|| state == WorkerState.WORKER_ERROR
|| state == WorkerState.WORKER_SCALED_DOWN)
&& System.currentTimeMillis() - stateChangeTime > Duration.ofMinutes(1).toMillis();
}
/**
* check if this worker is instantiate is one of the fix threads of a pool. fix threads are not
* automatically scales down, so they are candidate for down scaling when minWorker/maxWorker
* size of a model changes.
*
* @return the fixPoolThread
*/
public boolean isFixPoolThread() {
return fixPoolThread;
}
/**
* Creates a builder to build a {@code WorkerThread}.
*
* @param <I> the workerPoolConfig input class
* @param <O> the workerPoolConfig output class
* @param wpc the {@link WorkerPoolConfig} the thread will be responsible for
* @return a new builder
*/
public static <I, O> Builder<I, O> builder(WorkerPoolConfig<I, O> wpc) {
return new Builder<>(wpc);
}
/** A Builder to construct a {@code WorkerThread}. */
public static final class Builder<I, O> {
private WorkerPoolConfig<I, O> workerPoolConfig;
private Device device;
private BatchAggregator<I, O> aggregator;
private LinkedBlockingDeque<WorkerJob<I, O>> jobQueue;
private boolean fixPoolThread;
Builder(WorkerPoolConfig<I, O> wpc) {
this.workerPoolConfig = wpc;
this.fixPoolThread = true;
}
/**
* RSets the device to run operations on.
*
* @param device the device to run operations on
* @return self-reference to this builder
*/
public Builder<I, O> setDevice(Device device) {
this.device = device;
return this;
}
/**
* Sets the jobQueue used to poll for new jobs.
*
* @param jobQueue the jobQueue to set
* @return self-reference to this builder.
*/
public Builder<I, O> setJobQueue(LinkedBlockingDeque<WorkerJob<I, O>> jobQueue) {
this.jobQueue = jobQueue;
return this;
}
/**
* Sets if the workerThread should be part of the fixed pool. Fixed Pool workers don't
* terminate themself but are managed by WorkLoadManager min/max-worker scale functionality.
*
* @param fixPoolThread the fixPoolThread to set
* @return self-reference to this builder.
*/
public Builder<I, O> optFixPoolThread(boolean fixPoolThread) {
this.fixPoolThread = fixPoolThread;
return this;
}
/**
* Builds the {@link WorkerThread} with the provided data.
*
* @return an {@link WorkerThread}
*/
public WorkerThread<I, O> build() {
if (device == null) {
throw new IllegalArgumentException("Must set device for worker thread");
}
if (jobQueue == null) {
throw new IllegalArgumentException("jobQueue has to be set.");
}
if (fixPoolThread) {
aggregator = new PermanentBatchAggregator<>(workerPoolConfig, jobQueue);
} else {
aggregator = new TemporaryBatchAggregator<>(workerPoolConfig, jobQueue);
}
return new WorkerThread<>(this);
}
}
}
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/package-info.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* Contains the model server backend which manages worker threads and executes jobs on models.
*
* @see ai.djl.serving.wlm.WorkLoadManager
*/
package ai.djl.serving.wlm;
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/util/AutoIncIdGenerator.java
|
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm.util;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Class to generate an unique worker id.
*
* @author erik.bamberg@web.de
*/
public class AutoIncIdGenerator {
private AtomicInteger counter;
private String prefix;
/**
* Constructs an {@link AutoIncIdGenerator}.
*
* @param prefix the prefix for the set of IDs
*/
public AutoIncIdGenerator(String prefix) {
this.prefix = prefix;
counter = new AtomicInteger(1);
}
/**
* Generates a new worker id.
*
* @return returns a new id.
*/
public String generate() {
return String.format("%s%04d", prefix, counter.getAndIncrement());
}
/**
* Generates a new worker id without the prefix.
*
* @return returns a new id without the prefix.
*/
public int generateNum() {
return counter.getAndIncrement();
}
/**
* Removes the prefix to a generated Id.
*
* @param id the prefixed id
* @return the num value of the id
*/
public int stripPrefix(String id) {
return Integer.parseInt(id.substring(prefix.length()));
}
}
|
0
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm
|
java-sources/ai/djl/serving/wlm/0.28.0/ai/djl/serving/wlm/util/EventManager.java
|
/*
* Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.serving.wlm.util;
import ai.djl.Device;
import ai.djl.serving.wlm.Adapter;
import ai.djl.serving.wlm.ModelInfo;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
/** A class manages model server events. */
public final class EventManager {
private static final EventManager INSTANCE = new EventManager();
private List<ModelServerListener> listeners;
private EventManager() {
listeners = new ArrayList<>();
}
/**
* Returns a singleton {@code EventManager} instance.
*
* @return ths {@code EventManager} instance
*/
public static EventManager getInstance() {
return INSTANCE;
}
/**
* Adds the listener to the {@code EventManager}.
*
* @param listener the {@code ModelServerListener}
*/
public void addListener(ModelServerListener listener) {
listeners.add(listener);
}
/**
* Invoked when model downloading started.
*
* @param model the model
*/
public void onModelDownloading(ModelInfo<?, ?> model) {
for (ModelServerListener l : listeners) {
l.onModelDownloading(model);
}
}
/**
* Invoked when model downloading finished.
*
* @param model the model
* @param downloadPath the model download directory
*/
public void onModelDownloaded(ModelInfo<?, ?> model, Path downloadPath) {
for (ModelServerListener l : listeners) {
l.onModelDownloaded(model, downloadPath);
}
}
/**
* Invoked when model conversion started.
*
* @param model the model
* @param type the conversion type
*/
public void onModelConverting(ModelInfo<?, ?> model, String type) {
for (ModelServerListener l : listeners) {
l.onModelConverting(model, type);
}
}
/**
* Invoked when model conversion finished.
*
* @param model the model
* @param type the conversion type
*/
public void onModelConverted(ModelInfo<?, ?> model, String type) {
for (ModelServerListener l : listeners) {
l.onModelConverted(model, type);
}
}
/**
* Invoked when model properties configuration finished.
*
* @param model the model
*/
public void onModelConfigured(ModelInfo<?, ?> model) {
for (ModelServerListener l : listeners) {
l.onModelConfigured(model);
}
}
/**
* Invoked when model loading start.
*
* @param model the model
* @param device the device to load the model
*/
public void onModelLoading(ModelInfo<?, ?> model, Device device) {
for (ModelServerListener l : listeners) {
l.onModelLoading(model, device);
}
}
/**
* Invoked when model loading finished.
*
* @param model the model
*/
public void onModelLoaded(ModelInfo<?, ?> model) {
for (ModelServerListener l : listeners) {
l.onModelLoaded(model);
}
}
/**
* Invoked when adapter loading start.
*
* @param model the model
* @param adapterPath the adapter path
*/
public void onAdapterLoading(ModelInfo<?, ?> model, Path adapterPath) {
for (ModelServerListener l : listeners) {
l.onAdapterLoading(model, adapterPath);
}
}
/**
* Invoked when adapter loading finished.
*
* @param model the model
* @param adapter the adapter
*/
public void onAdapterLoaded(ModelInfo<?, ?> model, Adapter adapter) {
for (ModelServerListener l : listeners) {
l.onAdapterLoaded(model, adapter);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.