index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/lic/LicenseExpiredException.java
|
package ai.h2o.mojos.runtime.lic;
import java.util.Date;
/**
* The exception reports an expired license.
*/
public class LicenseExpiredException extends LicenseException {
public LicenseExpiredException(Date expirationDate) {
super("The license expired on " + expirationDate, null);
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/lic/LicenseNotFoundException.java
|
package ai.h2o.mojos.runtime.lic;
/**
* The exception reports that MOJO runtime was not able to find a license.
*/
public class LicenseNotFoundException extends LicenseException {
static final String MESSAGE = "\nMake sure that license is available for MOJO runtime!\n\n"
+ "The license can be specified in the following ways:\n"
+ " * Environment variable: \n"
+ " - '" + LicenseConsts.ENV_PROP_LICENSE_FILE + "' : A location of file with a license\n"
+ " - '" + LicenseConsts.ENV_PROP_LICENSE_KEY + "' : A license key\n"
+ " * System properties of JVM (-D option): \n"
+ " - '" + LicenseConsts.SYS_PROP_LICENSE_FILE + "' : A location of license file.\n"
+ " - '" + LicenseConsts.SYS_PROP_LICENSE_KEY + "' : A license key.\n"
+ " * Classpath\n"
+ " - The license is loaded from resource called '/" + LicenseConsts.LICENSE_FILENAME + "'\n"
+ " - The default resource name can be changed via system property '"+ LicenseConsts.SYS_PROP_LICENSE_FILENAME+"'\n";
public LicenseNotFoundException() {
super("License could not be found!\n" + MESSAGE, null);
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/lic/MojoSignatureConsts.java
|
package ai.h2o.mojos.runtime.lic;
/**
* The default configuration properties used by license manager
* to look for mojo signature.
*/
public class MojoSignatureConsts {
/** Name of JVM system property which overrides default signature
* file name ({@link #MOJO_SIGNATURE_FILENAME}.
*/
public static final String SYS_PROP_SIGNATURE_FILENAME = "ai.h2o.mojos.runtime.signature.filename";
/** Name of JVM system property which overrides default watermark
* file name ({@link #MOJO_WATERMARK_FILENAME}.
*/
public static final String SYS_PROP_WATERMARK_FILENAME = "ai.h2o.mojos.runtime.watermark.filename";
/** The default name of MOJO signature file. */
public static final String MOJO_WATERMARK_FILENAME = "mojo/pipeline.wm";
public static final String MOJO_SIGNATURE_FILENAME = "mojo/pipeline.sig";
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/lic/MojoSignatureNotFoundException.java
|
package ai.h2o.mojos.runtime.lic;
public class MojoSignatureNotFoundException extends LicenseException {
public MojoSignatureNotFoundException(String filename) {
super(String.format("Mojo signature '%s' not found!", filename), null);
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/lic/MojoWatermarkNotFoundException.java
|
package ai.h2o.mojos.runtime.lic;
public class MojoWatermarkNotFoundException extends LicenseException {
public MojoWatermarkNotFoundException(String name) {
super(String.format("Mojo watermark '%s' not found!", name), null);
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/lic/package-info.java
|
/**
* Contains license verification related exceptions.
*/
package ai.h2o.mojos.runtime.lic;
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/readers/FolderMojoReaderBackend.java
|
package ai.h2o.mojos.runtime.readers;
import ai.h2o.mojos.runtime.api.backend.DirReaderBackend;
import ai.h2o.mojos.runtime.api.backend.MemoryReaderBackend;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
/**
* The backend serves MOJO content from a folder.
*
* @deprecated use {@link DirReaderBackend} instead
*/
@Deprecated
public class FolderMojoReaderBackend extends MojoReaderBackend {
public FolderMojoReaderBackend(String folder) {
this(folder, null);
}
public FolderMojoReaderBackend(String folder, String pipelineFile) {
super(nonthrowingDirReaderBackend(new File(folder)), folder, File.separator, pipelineFile);
}
/**
* Helper allowing to keep strict API compatibility on above constructors.
* Probably an overkill, as adding exception would not be much pain, and moreover, this class is essentially unused.
* But strict compatibility is the goal for now.
*/
private static ReaderBackend nonthrowingDirReaderBackend(File dir) {
try {
return DirReaderBackend.open(dir);
} catch (IOException e) {
e.printStackTrace();
// supply empty fileset if anything failed
return MemoryReaderBackend.open(Collections.<String, byte[]>emptyMap());
}
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/readers/InMemoryMojoReaderBackend.java
|
package ai.h2o.mojos.runtime.readers;
import ai.h2o.mojos.runtime.api.backend.MemoryReaderBackend;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import java.io.IOException;
import java.io.InputStream;
import java.util.Map;
/**
* The backend serves MOJO content from in memory file system.
*
* @deprecated use {@link MemoryReaderBackend} instead
*/
@Deprecated
public class InMemoryMojoReaderBackend extends MojoReaderBackend {
public InMemoryMojoReaderBackend(Map<String, byte[]> mojoContent) {
this(mojoContent, null);
}
public InMemoryMojoReaderBackend(Map<String, byte[]> mojoContent, String baseDir) {
this(mojoContent, baseDir, null);
}
public InMemoryMojoReaderBackend(Map<String, byte[]> mojoContent, String baseDir, String ignored) {
this(mojoContent, baseDir, "/", null);
}
public InMemoryMojoReaderBackend(Map<String, byte[]> mojoContent, String baseDir,
String ignored, String pipelineFileName) {
super(MemoryReaderBackend.open(mojoContent), baseDir, "/", pipelineFileName);
}
private InMemoryMojoReaderBackend(ReaderBackend backend, String baseDir,
String ignored, String pipelineFileName) {
super(backend, baseDir, "/", pipelineFileName);
}
public static InMemoryMojoReaderBackend createFrom(InputStream inputStream) throws IOException {
final ReaderBackend backend = MemoryReaderBackend.fromZipStream(inputStream);
if (backend.exists(DEFAULT_PROTO_PIPELINE_FILE_PATH)) {
return new InMemoryMojoReaderBackend(backend, null, null, null);
} else if (backend.exists(DEFAULT_TOML_PIPELINE_FILE_PATH)) {
return new InMemoryMojoReaderBackend(backend, DEFAULT_BASE_DIR, "/", DEFAULT_TOML_PIPELINE_FILENAME);
} else {
throw new IOException("Cannot find any pipeline file!");
}
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/readers/MojoPipelineReaderBackendFactory.java
|
package ai.h2o.mojos.runtime.readers;
import java.io.IOException;
import java.io.InputStream;
/** Backend factory.
*
* @deprecated in favor of creation backends directly.
*/
@Deprecated
public class MojoPipelineReaderBackendFactory {
public static MojoReaderBackend createReaderBackend(InputStream stream) throws IOException {
return InMemoryMojoReaderBackend.createFrom(stream);
}
public static MojoReaderBackend createReaderBackend(String archivename) throws IOException {
return new MojofileMojoReaderBackend(archivename);
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/readers/MojoReaderBackend.java
|
package ai.h2o.mojos.runtime.readers;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import ai.h2o.mojos.runtime.api.backend.ReaderBackendUtils;
import java.io.BufferedReader;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
/**
* Interface representing a class capable of reading mojos. Usually the class
* implementing this interface will have a constructor taking an "address" of
* the mojo file that should be read. The class would then have to implement
* the logic for reading the referred mojo from the source.
* <p>
* For example, a hypothetical <code>MysqlMojoReaderBackend</code> may have
* a constructor taking the URL of the server, connection credentials, and
* a SQL query for retrieving the mojo record. The class would then implement
* the logic for connecting to the database and fetching the mojo (whole or in
* parts). It would also throw an {@link IOException} if anything fails.
* <p>
* The actual interface that the class needs to implement is for reading either
* text or binary fragments from within the mojo. This is because a
* <code>.mojo</code> file is actually a zip archive, and hence it contains
* several "files" inside. The user may decide to repackage the mojo contents
* into a different container: a plain directory for easier access, a
* <code>.7z</code> file for better compression, an encrypted Zip file for
* better security, etc. If the reader doesn't wish to re-package the mojo
* contents and only retrieve it from non-filesystem source, then it may
* create a temporary .mojo file and pass it to the
* {@link MojofileMojoReaderBackend} reader.
*/
/**
* @deprecated use {@link ReaderBackend} instead.
*/
@Deprecated
public abstract class MojoReaderBackend implements Closeable {
private String _pipelineFileName;
private String _baseDir;
/**
* Default top-level directory, where mojo content is stored.
*/
public static String DEFAULT_BASE_DIR = "mojo/";
/* Default name of protobuffer-based pipeline */
public static String DEFAULT_PROTO_PIPELINE_FILENAME = "pipeline.pb";
/* Default name of TOML-based pipeline */
public static String DEFAULT_TOML_PIPELINE_FILENAME = "pipeline.toml";
/**
* Default file path for proto-based pipeline
*/
public static String DEFAULT_PROTO_PIPELINE_FILE_PATH = DEFAULT_BASE_DIR + DEFAULT_PROTO_PIPELINE_FILENAME;
/**
* Default file path for toml-based pipeline
*/
public static String DEFAULT_TOML_PIPELINE_FILE_PATH = DEFAULT_BASE_DIR + DEFAULT_TOML_PIPELINE_FILENAME;
protected final ReaderBackend backend;
protected MojoReaderBackend(ReaderBackend backend, String baseDir, String ignored, String pipelineFileName) {
this.backend = backend;
_pipelineFileName = pipelineFileName == null
? (baseDir == null
? DEFAULT_PROTO_PIPELINE_FILE_PATH
: DEFAULT_PROTO_PIPELINE_FILENAME)
: pipelineFileName;
if (baseDir == null) {
_baseDir = "";
} else {
if (baseDir.isEmpty() || endsWithSeparator(baseDir)) {
_baseDir = baseDir;
} else {
_baseDir = baseDir + "/";
}
}
}
@Override
public final void close() throws IOException {
backend.close();
}
/**
* Retrieve content inside the mojo, as a {@link InputStream}.
*/
public final InputStream getFile(String filename) throws IOException {
return backend.getInputStream(filename);
}
/**
* Retrieve text content inside the mojo, as a {@link BufferedReader}.
*/
public final BufferedReader getTextFile(String filename) throws IOException {
return ReaderBackendUtils.asReader(backend.getInputStream(filename));
}
/**
* Retrieve binary content inside the mojo, as a <code>byte[]</code> array.
*/
public final byte[] getBinaryFile(String filename) throws IOException {
return ReaderBackendUtils.getBytes(backend.getInputStream(filename));
}
public final boolean exists(String filename) {
return backend.exists(filename);
}
/**
* Do not use. Not useful outside mojo2 project.<br/>
* Exists only for retaining API compatibility inside mojo2 classes, and will be removed
* in future versions, without notice, deprecation phase and replacement.
*/
public ReaderBackend internalGetReaderBackend() {
return this.backend;
}
/**
* Get pipeline file path relative to baseDir.
*/
public String getPipelineFileName() {
return _pipelineFileName;
}
public String getBaseDir() {
return _baseDir;
}
/**
* @deprecated use just slash (<code>/</code>) instead, it is supported on all systems equally well, or use {@link File#separator} if that makes you feel better
*/
@Deprecated
public String getSeparator() {
return "/";
}
/**
* This is very safe way to check if a path ends with separator character, no matter which platform was used to produce the path
* and which platform we use it on.
*
* @param path -
* @return true if the last character is separator
*/
public static boolean endsWithSeparator(String path) {
if (path == null) return false;
if (path.isEmpty()) return false;
final char lastChar = path.charAt(path.length() - 1);
switch (lastChar) {
case '/':
case '\\':
return true;
}
return lastChar == File.separatorChar;
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/readers/MojofileMojoReaderBackend.java
|
package ai.h2o.mojos.runtime.readers;
import ai.h2o.mojos.runtime.api.backend.ZipFileReaderBackend;
import java.io.File;
import java.io.IOException;
/**
* The backend serves MOJO content from a ZIP file.
*
* @deprecated use {@link ZipFileReaderBackend} instead
*/
@Deprecated
public class MojofileMojoReaderBackend extends MojoReaderBackend {
public MojofileMojoReaderBackend(String archivename) throws IOException {
this(archivename, null);
}
public MojofileMojoReaderBackend(String archivename, String baseDir) throws IOException {
this(archivename, baseDir, null);
}
public MojofileMojoReaderBackend(String archivename, String baseDir, String pipelineFileName) throws IOException {
super(ZipFileReaderBackend.open(new File(archivename)), baseDir, "/", pipelineFileName);
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/readers/package-info.java
|
/**
* @deprecated use classes based on {@link ai.h2o.mojos.runtime.api.backend.ReaderBackend}
*/
package ai.h2o.mojos.runtime.readers;
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/transforms/MojoTransform.java
|
package ai.h2o.mojos.runtime.transforms;
import ai.h2o.mojos.runtime.api.MojoTransformationGroup;
import ai.h2o.mojos.runtime.frame.MojoFrame;
import ai.h2o.mojos.runtime.frame.MojoFrameMeta;
import java.util.Arrays;
/**
* Representation of one transformation operation in the pipeline.
*/
public abstract class MojoTransform {
public final int[] iindices;
public final int[] oindices;
private String id;
private String name;
private MojoTransformationGroup transformationGroup;
public MojoTransform(int[] iindices, int[] oindices) {
assert iindices != null;
assert oindices != null;
this.iindices = iindices;
this.oindices = oindices;
}
/**
* <p>Hierarchical transformer index identification.</p>
* <p>
* To compute the identification, each transformer in the pipeline receives an index based on its physical order, starting from 0.<br/>
* This index is formatted with zero-padded 3 digit format, and this forms one component in the hierarchical index.<br/>
* Top-level transforms are prefixed with letter <code>T</code>, indicating the root pipeline as its container.<br/>
* Nested transforms (those inside <code>EXEC_OP</code> contained) use the container's ID as its prefix, followed by underscore, followed by own index.<br/>
* <p>
* Examples:
* <ul>
* <li><code>T000</code> = first transform in the root pipeline</li>
* <li><code>T005_032</code> = in the root pipeline's EXEC_OP at position 005, the nested pipeline has this transform at position 032</li>
* </ul>
*/
public String getId() {
return id;
}
public void setId(final String id) {
this.id = id;
}
public MojoTransformationGroup getTransformationGroup() {
return transformationGroup;
}
public void setTransformationGroup(final MojoTransformationGroup transformationGroup) {
this.transformationGroup = transformationGroup;
}
public String getName() {
return name;
}
public void setName(final String name) {
this.name = name;
}
public abstract void transform(MojoFrame frame);
@Override
public String toString() {
return String.format("%s:%s:%s->%s", id, getName(), Arrays.toString(iindices), Arrays.toString(oindices));
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/transforms/MojoTransformBuilderFactory.java
|
package ai.h2o.mojos.runtime.transforms;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import ai.h2o.mojos.runtime.frame.MojoFrameMeta;
import java.util.Map;
/**
* Binds a custom transformer to a unique name, by which it can be referenced from within mojo2 file.
* Every custom transformer must define a class implementing this interface.
* That class must be registered using {@link java.util.ServiceLoader} mechanism (= listed in META-INF/services/ai.h2o.mojos.runtime.transforms.MojoTransformBuilderFactory resource).
*/
public interface MojoTransformBuilderFactory {
/**
* Identification of custom transformer.
*
* Used to identify transformer instantiator when loading from ProtoBuf state.
* Must be unique per JVM.
*
* @return unique identification of the builder (default is canonical class name)
*/
String transformerName();
/**
* Instantiate a transformation builder.
*
* @param meta
* @param iindcies
* @param oindices
* @param params parameters of the custom transformer
* @param backend reader backend to read content of stateFile
* @return a new transformation builder
*/
MojoTransform createBuilder(MojoFrameMeta meta,
int[] iindcies, int[] oindices,
Map<String, Object> params,
ReaderBackend backend);
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/utils/Base64Utils.java
|
package ai.h2o.mojos.runtime.utils;
import java.util.Base64;
/**
* Helper class for Base64 operations.
*/
public class Base64Utils {
private static final Base64.Decoder DECODER = Base64.getDecoder();
private static final Base64.Decoder DECODER_URL = Base64.getUrlDecoder();
private static final Base64.Encoder ENCODER = Base64.getEncoder();
public static byte[] encode(byte[] src) {
return ENCODER.encode(src);
}
public static byte[] decode(byte[] src) {
return DECODER.decode(src);
}
public static byte[] decodeUrl(byte[] src) {
return DECODER_URL.decode(src);
}
public static byte[] decodeUrl(String src) {
return !isEmpty(src) ? decodeUrl(src.getBytes()) : null;
}
public static boolean isEmpty(String s) {
return s == null || s.isEmpty();
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/utils/ClassLoaderUtils.java
|
package ai.h2o.mojos.runtime.utils;
public class ClassLoaderUtils {
public static final String PROP_USE_CONTEXT_CLASSLOADER_FOR_SPI = "runtime.useContextClassloaderForSpi";
public static final String PROP_USE_CONTEXT_CLASSLOADER_FOR_JAVASSIST = "runtime.useContextClassloaderForJavassist";
public static final boolean DEFAULT_USE_CONTEXT_CLASSLOADER_FOR_SPI = false;
public static final boolean DEFAULT_USE_CONTEXT_CLASSLOADER_FOR_JAVASSIST = false;
static boolean getPropUseContextClassloaderForSpi() {
return Consts.getSysProp(PROP_USE_CONTEXT_CLASSLOADER_FOR_SPI,
DEFAULT_USE_CONTEXT_CLASSLOADER_FOR_SPI);
}
static boolean getPropUseContextClassloaderForJavassist() {
return Consts.getSysProp(PROP_USE_CONTEXT_CLASSLOADER_FOR_JAVASSIST,
DEFAULT_USE_CONTEXT_CLASSLOADER_FOR_JAVASSIST);
}
public static ClassLoader getPreferredSpiClassLoader(Class<?> usedClass) {
ClassLoader cl = ClassLoaderUtils.getPropUseContextClassloaderForSpi()
? Thread.currentThread().getContextClassLoader()
: usedClass.getClassLoader();
return cl;
}
public static ClassLoader getPreferredJavassistClassLoader(Class<?> usedClass) {
ClassLoader cl = ClassLoaderUtils.getPropUseContextClassloaderForJavassist()
? Thread.currentThread().getContextClassLoader()
: usedClass.getClassLoader();
return cl;
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/utils/Consts.java
|
package ai.h2o.mojos.runtime.utils;
public class Consts {
/** Prefix for system configuration variables */
public static final String SYS_PROP_PREFIX = "sys.ai.h2o.mojos.";
public static String prop(String suffix) {
return SYS_PROP_PREFIX + suffix;
}
public static boolean propExist(String suffix) {
return System.getProperty(prop(suffix)) != null;
}
public static String getSysProp(String suffix, String defaultValue) {
return System.getProperty(prop(suffix), defaultValue);
}
public static boolean getSysProp(String suffix, boolean defaultValue) {
return Boolean.valueOf(System.getProperty(prop(suffix), Boolean.toString(defaultValue)));
}
public static byte getSysProp(String suffix, byte defaultValue) {
return Byte.valueOf(System.getProperty(prop(suffix), Byte.toString(defaultValue)));
}
public static int getSysProp(String suffix, int defaultValue) {
return Integer.valueOf(System.getProperty(prop(suffix), Integer.toString(defaultValue)));
}
public static long getSysProp(String suffix, long defaultValue) {
return Long.valueOf(System.getProperty(prop(suffix), Long.toString(defaultValue)));
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/utils/DateParser.java
|
package ai.h2o.mojos.runtime.utils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.DateTimeFormatterBuilder;
import org.joda.time.format.DateTimeParser;
public class DateParser {
private final DateTimeFormatter _dtf;
/**
* User can define their own formats.
* Note that this is a non-public feature - users should not use it; intended as a backdoor for fast resolution of date format issues.
*/
static String[] USER_DEFINED_FORMATS = Consts.getSysProp("date.formats", "").split(",");
public final static String[] SUPPORTED_FORMATS = {
"M[M]/d[d]/yyyy[' ']['T'][' ']h[h]:mm:ss[' ']a",
"M[M]/d[d]/yyyy[' ']['T'][' ']h[h]:mm:ss.SSS[' ']a",
"M[M]/d[d]/yyyy[' ']['T'][' ']H[H]:mm:ss",
"M[M]/d[d]/yyyy[' ']['T'][' ']H[H]:mm:ss.SSS",
"M[M]/d[d]/yyyy",
"M[M]-d[d]-yyyy[' ']['T'][' ']h[h]:mm:ss[' ']a",
"M[M]-d[d]-yyyy[' ']['T'][' ']h[h]:mm:ss.SSS[' ']a",
"M[M]-d[d]-yyyy[' ']['T'][' ']H[H]:mm:ss",
"M[M]-d[d]-yyyy[' ']['T'][' ']H[H]:mm:ss.SSS",
"M[M]-d[d]-yyyy",
"d[d].M[M].yyyy[' ']['T'][' ']h[h]:mm:ss[' ']a",
"d[d].M[M].yyyy[' ']['T'][' ']h[h]:mm:ss.SSS[' ']a",
"d[d].M[M].yyyy[' ']['T'][' ']H[H]:mm:ss",
"d[d].M[M].yyyy[' ']['T'][' ']H[H]:mm:ss.SSS",
"d[d].M[M].yyyy",
"yyyy/M[M]/d[d]' 'HH:mm:ss",
"yyyy/M[M]/d[d]' 'HH:mm:ss.SSS",
"yyyy/M[M]/d[d]",
"yyyy-M[M]-d[d]' 'HH:mm:ss",
"yyyy-M[M]-d[d]' 'HH:mm:ss.SSS",
"yyyy-M[M]-d[d]'T'HH:mm:ss.SSSSSSSSS", // Datatable format
"yyyyMMdd", // %Y%m%d Needed for backwards compatibility
"yyyy-M[M]-d[d]",
"h[h]:mm:ss[' ']a",
"h[h]:mm:ss.SSS[' ']a",
"H[H]:mm:ss",
"H[H]:mm:ss.SSS",
"h[h]:mm[' ']a",
"H[H]:mm",
};
public DateParser(DateTimeParser dtp) {
this(dtp, true);
}
public DateParser(DateTimeParser dtp, boolean useDefaultAsBackup) {
if (useDefaultAsBackup) {
_dtf = new DateTimeFormatterBuilder()
.append(null, new DateTimeParser[]{dtp, DTF.getParser()}).toFormatter();
} else {
_dtf = new DateTimeFormatter(null, dtp);
}
}
/**
* Parse given string based on configured format.
*
* @param s
* @return instance of MojoDateTime if parse was successful or `null` in case of a problem.
*/
public MojoDateTime parse(String s) {
return MojoDateTime.create(_dtf.withZone(MojoDateTime.TIMEZONE).parseDateTime(s));
}
public MojoDateTime parseOrNull(String s) {
try {
return parse(s);
} catch (IllegalArgumentException e) {
return null;
}
}
public static DateTimeFormatter DTF;
static {
final List<String> formats = new ArrayList<>(Arrays.asList(SUPPORTED_FORMATS));
formats.addAll(Arrays.asList(USER_DEFINED_FORMATS));
DTF = new DateTimeFormatterBuilder()
.append(null, toFormatter(formats))
.toFormatter();
}
static DateTimeParser[] toFormatter(List<String> formats) {
final List<String> expandedFormats = expandFormats(formats);
DateTimeParser[] result = new DateTimeParser[expandedFormats.size()];
int i = 0;
for (String f : expandedFormats) {
result[i++] = DateTimeFormat.forPattern(f).getParser();
}
return result;
}
static List<String> expandFormats(List<String> formats) {
ArrayList<String> expandedFormats = new ArrayList<>();
for (String format : formats) {
expandFormat(new StringBuilder(format), 0, expandedFormats);
}
return expandedFormats;
}
static void expandFormat(StringBuilder format, int startIdx, List<String> result) {
StringBuilder prefix = new StringBuilder();
String suffix = null;
int openGroupIdx = -1;
int closeGroupIdx = -1;
int openingCnt = 0;
for (int i = startIdx; i < format.length(); i++) {
char c = format.charAt(i);
switch (c) {
case '[':
if (openGroupIdx < 0) {
openGroupIdx = i;
prefix.append(format, 0, i);
}
openingCnt++;
break;
case ']': {
if (openGroupIdx >= 0) openingCnt--;
if (openingCnt == 0) {
closeGroupIdx = i;
suffix = format.substring(i+1);
}
}
break;
default:
}
if (closeGroupIdx > 0) break;
}
if (openGroupIdx >= 0) {
// Generate formats with optional groups
String[] groups = format.substring(openGroupIdx+1, closeGroupIdx).split(",");
for (String group : groups) {
expandFormat(new StringBuilder(prefix).append(group).append(suffix), openGroupIdx, result);
}
// Generate formats without the optional group
expandFormat(format.delete(openGroupIdx, closeGroupIdx + 1), closeGroupIdx - (closeGroupIdx - openGroupIdx), result);
} else {
if (format.length() > 0)
result.add(format.toString());
}
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/utils/MojoDateTime.java
|
package ai.h2o.mojos.runtime.utils;
import org.joda.time.DateTime;
import org.joda.time.DateTimeFieldType;
import org.joda.time.DateTimeZone;
import org.joda.time.LocalDate;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatterBuilder;
import java.util.Objects;
/**
* Date/Time holder, storing times in UTC timezone.
*/
public class MojoDateTime {
// Default to fill missing date fields
public static final DateTimeZone TIMEZONE = DateTimeZone.UTC;
static private DateTime NOW = DateTime.now(TIMEZONE);
public enum DatePart {
YEAR("year") {
@Override public String write(String var) { return "(" + var + ".getYear())"; }
},
QUARTER("quarter") {
@Override public String write(String var) { return "(" + var + ".getQuarter())"; }
},
MONTH("month") {
@Override public String write(String var) { return "(" + var + ".getMonth())"; }
},
WEEK("week") {
@Override public String write(String var) { return "(" + var + ".getWeekOfYear())"; }
},
WEEKDAY("weekday") {
@Override public String write(String var) { return "(" + var + ".getDayOfWeek())"; }
},
DAY("day") {
@Override public String write(String var) { return "(" + var + ".getDay())"; }
},
DAYOFYEAR("dayofyear") {
@Override public String write(String var) { return "(" + var + ".getDayOfYear())"; }
},
HOUR("hour") {
@Override public String write(String var) { return "(" + var + ".getHour())"; }
},
MINUTE("minute") {
@Override public String write(String var) { return "(" + var + ".getMinute())"; }
},
SECOND("second") {
@Override public String write(String var) { return "(" + var + ".getSecond())"; }
};
private final String _name;
DatePart(String name) { _name = name; }
@Override public String toString() { return _name; }
public abstract String write(String var);
}
private int _YEAR, _MONTH, _WEEK_OF_YEAR, _DAY, _DAY_OF_YEAR, _DAY_OF_WEEK, _HOUR, _MINUTE, _SECOND;
private long _MILLIS_FROM_UNIX;
public MojoDateTime(long millisFromUnix) {
this(new DateTime(millisFromUnix, TIMEZONE));
}
private MojoDateTime(DateTime dt) {
if (dt.isSupported(DateTimeFieldType.year())) {
_YEAR = dt.get(DateTimeFieldType.year());
if (dt.isSupported(DateTimeFieldType.monthOfYear())) {
_MONTH = dt.get(DateTimeFieldType.monthOfYear());
if (dt.isSupported(DateTimeFieldType.dayOfMonth())) {
_DAY = dt.get(DateTimeFieldType.dayOfMonth());
} else {
_DAY = 1;
}
} else {
_MONTH = 1;
_DAY = 1;
}
} else {
_YEAR = NOW.getYear();
_MONTH = NOW.getMonthOfYear();
_DAY = NOW.getDayOfMonth();
}
LocalDate tmp = new LocalDate(_YEAR, _MONTH, _DAY);
if (dt.isSupported(DateTimeFieldType.dayOfYear()) && dt.isSupported(DateTimeFieldType.dayOfWeek())) {
_DAY_OF_YEAR = dt.get(DateTimeFieldType.dayOfYear());
_DAY_OF_WEEK = dt.get(DateTimeFieldType.dayOfWeek());
} else {
_DAY_OF_YEAR = tmp.getDayOfYear();
_DAY_OF_WEEK = tmp.getDayOfWeek();
}
if (dt.isSupported(DateTimeFieldType.weekOfWeekyear())) {
_WEEK_OF_YEAR = dt.get(DateTimeFieldType.weekOfWeekyear());
}
if (dt.isSupported(DateTimeFieldType.hourOfDay())) { // FIXME: what about halfHourOfDay
_HOUR = dt.get(DateTimeFieldType.hourOfDay());
if (dt.isSupported(DateTimeFieldType.minuteOfHour())) {
_MINUTE = dt.get(DateTimeFieldType.minuteOfHour());
if (dt.isSupported(DateTimeFieldType.secondOfMinute())) {
_SECOND = dt.get(DateTimeFieldType.secondOfMinute());
} else {
_SECOND = 0;
}
} else {
_MINUTE = 0;
_SECOND = 0;
}
} else {
_HOUR = 0;
_MINUTE = 0;
_SECOND = 0;
}
_MILLIS_FROM_UNIX = dt.getMillis();
}
public static MojoDateTime parse(String s) {
DateTime out = DateParser.DTF.withZone(TIMEZONE).parseDateTime(s);
return new MojoDateTime(out);
}
public static MojoDateTime parse(String s, DateParser parser) {
return parser.parse(s);
}
public static MojoDateTime create(DateTime s) {
return new MojoDateTime(s);
}
public int getYear() {
return _YEAR;
}
public int getQuarter() {
return (_MONTH - 1) / 3 + 1;
}
public int getMonth() {
return _MONTH;
}
public int getDay() {
return _DAY;
}
public int getDayOfYear() {
return _DAY_OF_YEAR;
}
public int getWeekOfYear() {
return _WEEK_OF_YEAR;
}
public int getDayOfWeek() {
return _DAY_OF_WEEK - 1;
}
public int getHour() {
return _HOUR;
}
public int getMinute() {
return _MINUTE;
}
public int getSecond() {
return _SECOND;
}
public long getMillis() {
return _MILLIS_FROM_UNIX;
}
public static final DateTimeFormatterBuilder genFormatterBuilder(DateTimeFormatterBuilder dtfb, String[] formats) {
for (String f : formats) {
dtfb.appendOptional(DateTimeFormat.forPattern(f).getParser());
}
return dtfb;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
MojoDateTime that = (MojoDateTime) o;
return _YEAR == that._YEAR &&
_MONTH == that._MONTH &&
_WEEK_OF_YEAR == that._WEEK_OF_YEAR &&
_DAY == that._DAY &&
_DAY_OF_YEAR == that._DAY_OF_YEAR &&
_DAY_OF_WEEK == that._DAY_OF_WEEK &&
_HOUR == that._HOUR &&
_MINUTE == that._MINUTE &&
_SECOND == that._SECOND;
}
@Override
public int hashCode() {
return Objects
.hash(_YEAR, _MONTH, _WEEK_OF_YEAR, _DAY, _DAY_OF_YEAR, _DAY_OF_WEEK, _HOUR, _MINUTE,
_SECOND);
}
@Override
public String toString() {
return "MojoDateTime{" +
"_YEAR=" + _YEAR +
", _MONTH=" + _MONTH +
", _WEEK_OF_YEAR=" + _WEEK_OF_YEAR +
", _DAY=" + _DAY +
", _DAY_OF_YEAR=" + _DAY_OF_YEAR +
", _DAY_OF_WEEK=" + _DAY_OF_WEEK +
", _HOUR=" + _HOUR +
", _MINUTE=" + _MINUTE +
", _SECOND=" + _SECOND +
'}';
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/utils/ParserUtils.java
|
package ai.h2o.mojos.runtime.utils;
public class ParserUtils {
/**
* Trim leading and ending ' '.
*
* @param s input string
* @return modified string if it contains leading or ending whitespaces
* or original string.
*/
public static String trimSpace(String s) {
int len = s.length();
int start = 0;
while ((start < len) && (s.charAt(start) == ' ')) {
start++;
}
while ((start < len) && (s.charAt(len-1) == ' ')) {
len--;
}
return ((start > 0) || (len < s.length())) ? s.substring(start, len) : s;
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-h2o3-impl/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-h2o3-impl/2.8.9/ai/h2o/mojos/runtime/h2o3/H2O3BackendAdapter.java
|
package ai.h2o.mojos.runtime.h2o3;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import ai.h2o.mojos.runtime.api.backend.ReaderBackendUtils;
import hex.genmodel.MojoReaderBackend;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
/**
* Wraps our own {@link ReaderBackend} into the H2O3 interfacce {@link MojoReaderBackend} that can be passed to H2O3 methods.
*/
class H2O3BackendAdapter implements MojoReaderBackend {
private final ReaderBackend backend;
public H2O3BackendAdapter(ReaderBackend backend) {
this.backend = backend;
}
@Override
public BufferedReader getTextFile(String filename) throws IOException {
final InputStream is = backend.getInputStream(filename);
return new BufferedReader(new InputStreamReader(is));
}
@Override
public byte[] getBinaryFile(String filename) throws IOException {
try (final InputStream is = backend.getInputStream(filename);
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
) {
ReaderBackendUtils.copy(is, baos, 8192);
return baos.toByteArray();
}
}
@Override
public boolean exists(String filename) {
return backend.exists(filename);
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-h2o3-impl/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-h2o3-impl/2.8.9/ai/h2o/mojos/runtime/h2o3/H2O3PipelineLoader.java
|
package ai.h2o.mojos.runtime.h2o3;
import ai.h2o.mojos.runtime.AbstractPipelineLoader;
import ai.h2o.mojos.runtime.MojoPipeline;
import ai.h2o.mojos.runtime.MojoPipelineMeta;
import ai.h2o.mojos.runtime.MojoPipelineProtoImpl;
import ai.h2o.mojos.runtime.api.MojoColumnMeta;
import ai.h2o.mojos.runtime.api.MojoTransformMeta;
import ai.h2o.mojos.runtime.api.PipelineConfig;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import ai.h2o.mojos.runtime.frame.MojoColumn;
import ai.h2o.mojos.runtime.frame.MojoFrameMeta;
import ai.h2o.mojos.runtime.transforms.MojoTransform;
import ai.h2o.mojos.runtime.transforms.MojoTransformExecPipeBuilder;
import hex.genmodel.GenModel;
import hex.genmodel.MojoModel;
import hex.genmodel.MojoReaderBackend;
import hex.genmodel.easy.EasyPredictModelWrapper;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.joda.time.DateTime;
class H2O3PipelineLoader extends AbstractPipelineLoader {
private final List<MojoColumnMeta> globalColumns;
private final MojoTransformExecPipeBuilder root;
public H2O3PipelineLoader(ReaderBackend backend, PipelineConfig config) throws IOException {
super(backend, config);
final MojoReaderBackend mojoReader = new H2O3BackendAdapter(backend);
final MojoModel model = MojoModel.load(mojoReader);
final EasyPredictModelWrapper easyPredictModelWrapper = wrapModelForPrediction(model, config.isShapEnabled());
final String name = "h2o3:" + model.getModelCategory().toString();
this.globalColumns = new ArrayList<>();
final int[] inputIndices = readInputIndices(globalColumns, model);
final int[] outputIndices = readOutputIndices(globalColumns, model);
final MojoFrameMeta globalMeta = new MojoFrameMeta(globalColumns);
final MojoTransform transform = new H2O3Transform(globalMeta, inputIndices, outputIndices, easyPredictModelWrapper);
transform.setId("h2o3-main");
transform.setName(name);
final DateTime creationTime = new DateTime(1970, 1, 1, 0, 0);
final MojoPipelineMeta pipelineMeta = new MojoPipelineMeta(
model.getUUID(), creationTime);
pipelineMeta.license = "H2O-3 Opensource";
pipelineMeta.outputClassLabels = getOutputLabels(model);
this.root = new MojoTransformExecPipeBuilder(inputIndices, outputIndices, transform, pipelineMeta);
// TODO: Is this correct, it adds same transform again to the root.
// this.root.transforms.add(transform);
}
static int[] readInputIndices(final List<MojoColumnMeta> columns, final GenModel genModel) {
final int[] inputIndices = new int[genModel.getNumCols()];
for (int i = 0; i < genModel.getNumCols(); i += 1) {
final String columnName = genModel.getNames()[i];
final MojoColumn.Type columnType = (genModel.getDomainValues(i) == null) ? MojoColumn.Type.Float64 : MojoColumn.Type.Str;
inputIndices[i] = columns.size();
columns.add(MojoColumnMeta.create(columnName, columnType));
}
return inputIndices;
}
private static int[] readOutputIndices(final List<MojoColumnMeta> columns, final GenModel genModel) {
final int[] outputIndices;
switch (genModel.getModelCategory()) {
case Binomial:
case Multinomial: {
outputIndices = new int[genModel.getNumResponseClasses()];
for (int i = 0; i < genModel.getNumResponseClasses(); i += 1) {
final String columnName = genModel.getResponseName() + "." + genModel.getDomainValues(genModel.getResponseIdx())[i];
outputIndices[i] = columns.size();
columns.add(MojoColumnMeta.create(columnName, MojoColumn.Type.Float64));
}
return outputIndices;
}
case Regression: {
final MojoColumnMeta column = MojoColumnMeta.create(genModel.getResponseName(), MojoColumn.Type.Float64);
outputIndices = new int[]{columns.size()};
columns.add(column);
return outputIndices;
}
case AutoEncoder: {
String[] columnNames = genModel.getOutputNames();
outputIndices = new int[columnNames.length];
for (int i = 0; i < columnNames.length; i++) {
final String columnName = columnNames[i];
outputIndices[i] = columns.size();
// AutoEncoder output are stored in double arrays, which can store both float and integer
columns.add(MojoColumnMeta.create(columnName, MojoColumn.Type.Float64));
}
return outputIndices;
}
default:
throw new UnsupportedOperationException("Unsupported ModelCategory: " + genModel.getModelCategory().toString());
}
}
private static List<String> getOutputLabels(final GenModel genModel) {
String [] outputLabels;
switch (genModel.getModelCategory()) {
case Binomial:
case Multinomial: {
outputLabels = new String[genModel.getNumResponseClasses()];
for (int i = 0; i < genModel.getNumResponseClasses(); i += 1) {
final String columnName = genModel.getResponseName() + "." + genModel.getDomainValues(genModel.getResponseIdx())[i];
outputLabels[i] = columnName;
}
return new ArrayList<>(Arrays.asList(outputLabels));
}
case Regression: {
outputLabels = new String[]{genModel.getResponseName()};
return new ArrayList<>(Arrays.asList(outputLabels));
}
case AutoEncoder: {
String[] columnNames = genModel.getOutputNames();
return new ArrayList<>(Arrays.asList(columnNames));
}
default:
throw new UnsupportedOperationException("Unsupported ModelCategory: " + genModel.getModelCategory().toString());
}
}
@Override
public List<MojoColumnMeta> getColumns() {
return globalColumns;
}
@Override
public List<MojoTransformMeta> getTransformations() {
return root.metaTransforms;
}
@Override
protected final MojoPipeline internalLoad() {
return new MojoPipelineProtoImpl(globalColumns, root, config);
}
/**
* Wraps the specified {@link MojoModel} as an {@link EasyPredictModelWrapper} with
* configuration to behave similar to Mojo2 behavior.
* <p>
* This includes configuring the wrapper to tolerate and ignore (by forcing to NA) bad input
* without throwing an exception.
*/
static EasyPredictModelWrapper wrapModelForPrediction(MojoModel model, boolean shapEnabled) throws IOException {
EasyPredictModelWrapper.Config config = new EasyPredictModelWrapper.Config()
.setModel(model)
.setConvertUnknownCategoricalLevelsToNa(true)
.setConvertInvalidNumbersToNa(true);
if (shapEnabled) config.setEnableContributions(true);
return new EasyPredictModelWrapper(config);
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-h2o3-impl/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-h2o3-impl/2.8.9/ai/h2o/mojos/runtime/h2o3/H2O3PipelineLoaderFactory.java
|
package ai.h2o.mojos.runtime.h2o3;
import ai.h2o.mojos.runtime.api.PipelineConfig;
import ai.h2o.mojos.runtime.api.PipelineLoader;
import ai.h2o.mojos.runtime.api.PipelineLoaderFactory;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import java.io.IOException;
public class H2O3PipelineLoaderFactory implements PipelineLoaderFactory {
@Override
public PipelineLoader createLoader(ReaderBackend backend, String optionalMainFile, PipelineConfig config) throws IOException {
if (optionalMainFile == null || optionalMainFile.equals(getRootResource())) {
return new H2O3PipelineLoader(backend, config);
}
throw new UnsupportedOperationException(optionalMainFile);
}
@Override
public String getName() {
return "h2o3";
}
@Override
public String getRootResource() {
return "model.ini";
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-h2o3-impl/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-h2o3-impl/2.8.9/ai/h2o/mojos/runtime/h2o3/H2O3Transform.java
|
package ai.h2o.mojos.runtime.h2o3;
import ai.h2o.mojos.runtime.frame.MojoColumnFloat64;
import ai.h2o.mojos.runtime.frame.MojoFrame;
import ai.h2o.mojos.runtime.frame.MojoFrameMeta;
import ai.h2o.mojos.runtime.transforms.ShapCapableTransform;
import hex.ModelCategory;
import hex.genmodel.GenModel;
import hex.genmodel.easy.EasyPredictModelWrapper;
import hex.genmodel.easy.RowData;
import hex.genmodel.easy.exception.PredictException;
import hex.genmodel.easy.prediction.AutoEncoderModelPrediction;
import hex.genmodel.easy.prediction.BinomialModelPrediction;
import hex.genmodel.easy.prediction.MultinomialModelPrediction;
import hex.genmodel.easy.prediction.RegressionModelPrediction;
/**
* A MOJO2 pipeline implementation that uses an H2O-3 (or Sparkling Water)
* MOJO as the predictor inside. The intent is to provide as identical an
* experience to the MOJO2 API as possible.
* <p>
* A non-goal is to expose every possible low-level H2O-3 MOJO API capability.
* If you want to do that, call the H2O-3 MOJO API directly, instead.
*/
public class H2O3Transform extends ShapCapableTransform {
private final GenModel genModel;
private final EasyPredictModelWrapper easyPredictModelWrapper;
/**
* A MOJO2 transformer implementation that uses an H2O-3 (or Sparkling Water) MOJO as the predictor inside.
* <p>
* Must provide a valid Binomial, Multinomial or Regression model.
* Other model types not currently supported.
* <p>
* Note: later, we might consider splitting this into one class per each supported model type, to more closely represent underlying H2O-3 algos.
*
* @param easyPredictModelWrapper H2O-3 MOJO model.
*/
H2O3Transform(MojoFrameMeta meta, int[] iindices, int[] oindices, EasyPredictModelWrapper easyPredictModelWrapper) {
super(iindices, oindices);
this.easyPredictModelWrapper = easyPredictModelWrapper;
this.genModel = easyPredictModelWrapper.m;
}
@Override
public void transform(final MojoFrame frame) {
final ModelCategory modelCategory = genModel.getModelCategory();
final int colCount = iindices.length;
final int rowCount = frame.getNrows();
final String[][] columns = new String[colCount][];
for (int j = 0; j < colCount; j += 1) {
final int iidx = iindices[j];
columns[j] = frame.getColumn(iidx).getDataAsStrings();
}
// TODO: Need to verify whether row by row prediction is H2O3 limitation and improve it
for (int rowIdx = 0; rowIdx < rowCount; rowIdx++) {
final RowData rowData = new RowData();
for (int colIdx = 0; colIdx < colCount; colIdx++) {
final int iidx = iindices[colIdx];
final String key = frame.getColumnName(iidx);
final String value = columns[colIdx][rowIdx];
if (value != null) {
rowData.put(key, value);
}
}
try {
switch (modelCategory) {
case Binomial: {
final BinomialModelPrediction p = easyPredictModelWrapper.predictBinomial(rowData);
setPrediction(frame, rowIdx, p.classProbabilities);
}
break;
case Multinomial: {
final MultinomialModelPrediction p = easyPredictModelWrapper.predictMultinomial(rowData);
setPrediction(frame, rowIdx, p.classProbabilities);
}
break;
case AutoEncoder: {
final AutoEncoderModelPrediction p = easyPredictModelWrapper.predictAutoEncoder(rowData);
setPrediction(frame, rowIdx, p.reconstructed);
}
break;
case Regression: {
final RegressionModelPrediction p = easyPredictModelWrapper.predictRegression(rowData);
final MojoColumnFloat64 col = (MojoColumnFloat64) frame.getColumn(oindices[0]);
final double[] darr = (double[]) col.getData();
darr[rowIdx] = p.value;
}
break;
default:
throw new UnsupportedOperationException("Unsupported ModelCategory: " + modelCategory.toString());
}
} catch (UnsupportedOperationException e) {
throw e;
} catch (PredictException e) {
if (ai.h2o.mojos.runtime.utils.Debug.getPrintH2O3Exceptions()) e.printStackTrace();
throw new UnsupportedOperationException(String.format("%s failed: %s", modelCategory, e.getMessage()));
} catch (Exception e) {
if (ai.h2o.mojos.runtime.utils.Debug.getPrintH2O3Exceptions()) e.printStackTrace();
throw new UnsupportedOperationException(String.format("%s failed with %s: %s", modelCategory, e.getClass().getName(), e.getMessage()));
}
}
}
private void setPrediction(MojoFrame frame, int rowIdx, double[] classProbabilities) {
for (int outputColIdx = 0; outputColIdx < oindices.length; outputColIdx++) {
final int oidx = oindices[outputColIdx];
final MojoColumnFloat64 col = (MojoColumnFloat64) frame.getColumn(oidx);
final double[] darr = (double[]) col.getData();
darr[rowIdx] = classProbabilities[outputColIdx];
}
}
public void computeShap(final double[] inputs, final double[][] shapContribs) {
final ModelCategory modelCategory = genModel.getModelCategory();
try {
switch (modelCategory) {
case Binomial:
case Regression: {
float bias;
for (double[] contribs : shapContribs) {
RowData rowData = new RowData();
final String[] columns = genModel.features();
final int colCount = iindices.length;
for (int colIdx = 0; colIdx < colCount; colIdx++) {
final int iidx = iindices[colIdx];
final String key = columns[iidx];
final String input = String.valueOf(inputs[colIdx]);
rowData.put(key, input);
}
float[] contrib_preds = easyPredictModelWrapper.predictContributions(rowData);
for (int j = 0; j < contrib_preds.length; j++) {contribs[j] = contrib_preds[j];}
// contribs[inputs.length] holds the bias
assert inputs.length + 1 == contrib_preds.length;
}
}
break;
default:
throw new UnsupportedOperationException("Unsupported ModelCategory: " + modelCategory.toString());
}
} catch (Exception e) {
if (ai.h2o.mojos.runtime.utils.Debug.getPrintH2O3Exceptions()) e.printStackTrace();
throw new UnsupportedOperationException(String.format("%s failed with %s: %s", modelCategory, e.getClass().getName(), e.getMessage()));
}
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-h2o3-impl/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-h2o3-impl/2.8.9/ai/h2o/mojos/runtime/h2o3/KLimePipelineLoader.java
|
package ai.h2o.mojos.runtime.h2o3;
import ai.h2o.mojos.runtime.AbstractPipelineLoader;
import ai.h2o.mojos.runtime.MojoPipeline;
import ai.h2o.mojos.runtime.MojoPipelineMeta;
import ai.h2o.mojos.runtime.MojoPipelineProtoImpl;
import ai.h2o.mojos.runtime.api.MojoColumnMeta;
import ai.h2o.mojos.runtime.api.MojoTransformMeta;
import ai.h2o.mojos.runtime.api.PipelineConfig;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import ai.h2o.mojos.runtime.frame.MojoColumn;
import ai.h2o.mojos.runtime.frame.MojoFrameMeta;
import ai.h2o.mojos.runtime.transforms.MojoTransform;
import ai.h2o.mojos.runtime.transforms.MojoTransformExecPipeBuilder;
import hex.genmodel.GenModel;
import hex.genmodel.MojoModel;
import hex.genmodel.MojoReaderBackend;
import hex.genmodel.easy.EasyPredictModelWrapper;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.joda.time.DateTime;
import static ai.h2o.mojos.runtime.h2o3.H2O3PipelineLoader.wrapModelForPrediction;
class KLimePipelineLoader extends AbstractPipelineLoader {
private final List<MojoColumnMeta> globalColumns;
private final MojoTransformExecPipeBuilder root;
public KLimePipelineLoader(ReaderBackend backend, PipelineConfig config) throws IOException {
super(backend, config);
final MojoReaderBackend mojoReader = new H2O3BackendAdapter(backend);
final MojoModel model = MojoModel.load(mojoReader);
final EasyPredictModelWrapper easyPredictModelWrapper = wrapModelForPrediction(model, config.isShapEnabled());
final String name = "klime:" + model.getModelCategory().toString();
this.globalColumns = new ArrayList<>();
final int[] inputIndices = readInputIndices(globalColumns, model);
final int[] outputIndices = readOutputIndices(globalColumns, model);
final MojoFrameMeta globalMeta = new MojoFrameMeta(globalColumns);
final MojoTransform transform = new KlimeTransform(globalMeta, inputIndices, outputIndices, easyPredictModelWrapper);
transform.setId("klime-main");
transform.setName(name);
final DateTime creationTime = new DateTime(1970, 1, 1, 0, 0); //TODO
final MojoPipelineMeta pipelineMeta = new MojoPipelineMeta(
model.getUUID(), creationTime);
pipelineMeta.license = "H2O-3 Opensource";
this.root = new MojoTransformExecPipeBuilder(inputIndices, outputIndices, transform, pipelineMeta);
this.root.transforms.add(transform);
}
@Override
public List<MojoColumnMeta> getColumns() {
return globalColumns;
}
@Override
public List<MojoTransformMeta> getTransformations() {
return root.metaTransforms;
}
@Override
protected final MojoPipeline internalLoad() {
return new MojoPipelineProtoImpl(globalColumns, root, PipelineConfig.DEFAULT);
}
static int[] readInputIndices(final List<MojoColumnMeta> columns, final GenModel genModel) {
final int[] inputIndices = new int[genModel.getNumCols()];
for (int i = 0; i < genModel.getNumCols(); i += 1) {
final String columnName = genModel.getNames()[i];
final MojoColumn.Type columnType = (genModel.getDomainValues(i) == null) ? MojoColumn.Type.Float64 : MojoColumn.Type.Str;
inputIndices[i] = columns.size();
columns.add(MojoColumnMeta.create(columnName, columnType));
}
return inputIndices;
}
private static int[] readOutputIndices(final List<MojoColumnMeta> columns, final GenModel genModel) {
// TODO following is a bit strange exercise, let's check with MM and/or Navdeep why is that
final List<String> mypredictorsList = new ArrayList<>(Arrays.asList(genModel.getNames()));
mypredictorsList.remove(genModel.getResponseName());
final String[] predictors = mypredictorsList.toArray(new String[0]);
final int predsSize = genModel.getPredsSize();
final int[] outputIndices = new int[predsSize];
outputIndices[0] = columns.size();
columns.add(MojoColumnMeta.create(genModel.getResponseName(), MojoColumn.Type.Float64));
outputIndices[1] = columns.size();
columns.add(MojoColumnMeta.create("cluster", MojoColumn.Type.Float64));
for (int i = 2; i < predsSize; i += 1) {
outputIndices[i] = columns.size();
final String outputColumnName = "rc_" + predictors[i - 2]; // "rc" stands for Reason Code
columns.add(MojoColumnMeta.create(outputColumnName, MojoColumn.Type.Float64));
}
return outputIndices;
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-h2o3-impl/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-h2o3-impl/2.8.9/ai/h2o/mojos/runtime/h2o3/KLimePipelineLoaderFactory.java
|
package ai.h2o.mojos.runtime.h2o3;
import ai.h2o.mojos.runtime.api.PipelineConfig;
import ai.h2o.mojos.runtime.api.PipelineLoader;
import ai.h2o.mojos.runtime.api.PipelineLoaderFactory;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import java.io.IOException;
public class KLimePipelineLoaderFactory implements PipelineLoaderFactory {
@Override
public PipelineLoader createLoader(ReaderBackend backend, String optionalMainFile, PipelineConfig config) throws IOException {
if (optionalMainFile == null || optionalMainFile.equals(getRootResource())) {
return new KLimePipelineLoader(backend, config);
}
throw new UnsupportedOperationException(optionalMainFile);
}
@Override
public String getName() {
return "klime";
}
@Override
public String getRootResource() {
return "klime.ini";
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-h2o3-impl/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-h2o3-impl/2.8.9/ai/h2o/mojos/runtime/h2o3/KlimeTransform.java
|
package ai.h2o.mojos.runtime.h2o3;
import ai.h2o.mojos.runtime.frame.MojoColumnFloat64;
import ai.h2o.mojos.runtime.frame.MojoFrame;
import ai.h2o.mojos.runtime.frame.MojoFrameMeta;
import ai.h2o.mojos.runtime.transforms.MojoTransform;
import hex.ModelCategory;
import hex.genmodel.GenModel;
import hex.genmodel.easy.EasyPredictModelWrapper;
import hex.genmodel.easy.RowData;
import hex.genmodel.easy.exception.PredictException;
import hex.genmodel.easy.prediction.KLimeModelPrediction;
/**
* A MOJO2 pipeline implementation that uses a k-LIME MOJO (built on H2O-3 MOJO framework)
* as the predictor inside. The intent is to provide as identical an
* experience to the MOJO2 API as possible.
*/
public class KlimeTransform extends MojoTransform {
private final EasyPredictModelWrapper easyPredictModelWrapper;
private final GenModel genModel;
/**
* A MOJO2 transformer implementation that uses a k-LIME MOJO (built on H2O-3 MOJO framework) as the predictor inside.
*
* @param easyPredictModelWrapper H2O-3 MOJO model.
*/
KlimeTransform(MojoFrameMeta meta, int[] iindices, int[] oindices, EasyPredictModelWrapper easyPredictModelWrapper) {
super(iindices, oindices);
this.easyPredictModelWrapper = easyPredictModelWrapper;
this.genModel = easyPredictModelWrapper.m;
}
@Override
public void transform(final MojoFrame frame) {
final ModelCategory modelCategory = genModel.getModelCategory();
final int colCount = iindices.length;
final int rowCount = frame.getNrows();
final String[][] columns = new String[colCount][];
for (int j = 0; j < colCount; j += 1) {
final int iidx = iindices[j];
columns[j] = frame.getColumn(iidx).getDataAsStrings();
}
for (int rowIdx = 0; rowIdx < rowCount; rowIdx++) {
final RowData rowData = new RowData();
for (int colIdx = 0; colIdx < colCount; colIdx++) {
final int iidx = iindices[colIdx];
final String key = frame.getColumnName(iidx);
final String value = columns[colIdx][rowIdx];
if (value != null) {
rowData.put(key, value);
}
}
try {
final KLimeModelPrediction p = easyPredictModelWrapper.predictKLime(rowData);
for (int outputColIdx = 0; outputColIdx < genModel.getPredsSize(); outputColIdx++) {
final MojoColumnFloat64 col = (MojoColumnFloat64) frame.getColumn(oindices[outputColIdx]);
final double[] darr = (double[]) col.getData();
switch (outputColIdx) {
case 0:
darr[rowIdx] = p.value;
break;
case 1:
darr[rowIdx] = p.cluster;
break;
default:
darr[rowIdx] = p.reasonCodes[outputColIdx - 2];
break;
}
}
} catch (PredictException e) {
if (ai.h2o.mojos.runtime.utils.Debug.getPrintH2O3Exceptions()) e.printStackTrace();
throw new UnsupportedOperationException(String.format("%s failed: %s", modelCategory, e.getMessage()));
} catch (Exception e) {
if (ai.h2o.mojos.runtime.utils.Debug.getPrintH2O3Exceptions()) e.printStackTrace();
throw new UnsupportedOperationException(String.format("%s failed with %s: %s", modelCategory, e.getClass().getName(), e.getMessage()));
}
}
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/Configuration.java
|
package org.reflections;
import com.google.common.base.Predicate;
import org.reflections.adapters.MetadataAdapter;
import org.reflections.scanners.Scanner;
import org.reflections.serializers.Serializer;
import javax.annotation.Nullable;
import java.net.URL;
import java.util.Set;
import java.util.concurrent.ExecutorService;
/**
* Configuration is used to create a configured instance of {@link Reflections}
* <p>it is preferred to use {@link org.reflections.util.ConfigurationBuilder}
*/
public interface Configuration {
/** the scanner instances used for scanning different metadata */
Set<Scanner> getScanners();
/** the urls to be scanned */
Set<URL> getUrls();
/** the metadata adapter used to fetch metadata from classes */
@SuppressWarnings({"RawUseOfParameterizedType"})
MetadataAdapter getMetadataAdapter();
/** get the fully qualified name filter used to filter types to be scanned */
@Nullable
Predicate<String> getInputsFilter();
/** executor service used to scan files. if null, scanning is done in a simple for loop */
ExecutorService getExecutorService();
/** the default serializer to use when saving Reflection */
Serializer getSerializer();
/** get class loaders, might be used for resolving methods/fields */
@Nullable
ClassLoader[] getClassLoaders();
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/ReflectionUtils.java
|
package org.reflections;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.reflections.util.ClasspathHelper;
import javax.annotation.Nullable;
import java.lang.annotation.Annotation;
import java.lang.reflect.*;
import java.util.*;
import java.util.regex.Pattern;
import static org.reflections.util.Utils.isEmpty;
/** convenient java reflection helper methods
* <p>
* 1. some helper methods to get type by name: {@link #forName(String, ClassLoader...)} and {@link #forNames(Iterable, ClassLoader...)}
* <p>
* 2. some helper methods to get all types/methods/fields/constructors/properties matching some predicates, generally:
* <pre> Set<?> result = getAllXXX(type/s, withYYY) </pre>
* <p>where get methods are:
* <ul>
* <li>{@link #getAllSuperTypes(Class, com.google.common.base.Predicate...)}
* <li>{@link #getAllFields(Class, com.google.common.base.Predicate...)}
* <li>{@link #getAllMethods(Class, com.google.common.base.Predicate...)}
* <li>{@link #getAllConstructors(Class, com.google.common.base.Predicate...)}
* </ul>
* <p>and predicates included here all starts with "with", such as
* <ul>
* <li>{@link #withAnnotation(java.lang.annotation.Annotation)}
* <li>{@link #withModifier(int)}
* <li>{@link #withName(String)}
* <li>{@link #withParameters(Class[])}
* <li>{@link #withAnyParameterAnnotation(Class)}
* <li>{@link #withParametersAssignableTo(Class[])}
* <li>{@link #withPrefix(String)}
* <li>{@link #withReturnType(Class)}
* <li>{@link #withType(Class)}
* <li>{@link #withTypeAssignableTo}
* </ul>
*
* <p><br>
* for example, getting all getters would be:
* <pre>
* Set<Method> getters = getAllMethods(someClasses,
* Predicates.and(
* withModifier(Modifier.PUBLIC),
* withPrefix("get"),
* withParametersCount(0)));
* </pre>
* */
@SuppressWarnings("unchecked")
public abstract class ReflectionUtils {
/** would include {@code Object.class} when {@link #getAllSuperTypes(Class, com.google.common.base.Predicate[])}. default is false. */
public static boolean includeObject = false;
/** get all super types of given {@code type}, including, optionally filtered by {@code predicates}
* <p> include {@code Object.class} if {@link #includeObject} is true */
public static Set<Class<?>> getAllSuperTypes(final Class<?> type, Predicate<? super Class<?>>... predicates) {
Set<Class<?>> result = Sets.newLinkedHashSet();
if (type != null && (includeObject || !type.equals(Object.class))) {
result.add(type);
result.addAll(getAllSuperTypes(type.getSuperclass()));
for (Class<?> ifc : type.getInterfaces()) result.addAll(getAllSuperTypes(ifc));
}
return filter(result, predicates);
}
/** get all methods of given {@code type}, up the super class hierarchy, optionally filtered by {@code predicates} */
public static Set<Method> getAllMethods(final Class<?> type, Predicate<? super Method>... predicates) {
Set<Method> result = Sets.newHashSet();
for (Class<?> t : getAllSuperTypes(type)) {
result.addAll(getMethods(t, predicates));
}
return result;
}
/** get methods of given {@code type}, optionally filtered by {@code predicates} */
public static Set<Method> getMethods(Class<?> t, Predicate<? super Method>... predicates) {
return filter(t.isInterface() ? t.getMethods() : t.getDeclaredMethods(), predicates);
}
/** get all constructors of given {@code type}, up the super class hierarchy, optionally filtered by {@code predicates} */
public static Set<Constructor> getAllConstructors(final Class<?> type, Predicate<? super Constructor>... predicates) {
Set<Constructor> result = Sets.newHashSet();
for (Class<?> t : getAllSuperTypes(type)) {
result.addAll(getConstructors(t, predicates));
}
return result;
}
/** get constructors of given {@code type}, optionally filtered by {@code predicates} */
public static Set<Constructor> getConstructors(Class<?> t, Predicate<? super Constructor>... predicates) {
return ReflectionUtils.<Constructor>filter(t.getDeclaredConstructors(), predicates); //explicit needed only for jdk1.5
}
/** get all fields of given {@code type}, up the super class hierarchy, optionally filtered by {@code predicates} */
public static Set<Field> getAllFields(final Class<?> type, Predicate<? super Field>... predicates) {
Set<Field> result = Sets.newHashSet();
for (Class<?> t : getAllSuperTypes(type)) result.addAll(getFields(t, predicates));
return result;
}
/** get fields of given {@code type}, optionally filtered by {@code predicates} */
public static Set<Field> getFields(Class<?> type, Predicate<? super Field>... predicates) {
return filter(type.getDeclaredFields(), predicates);
}
/** get all annotations of given {@code type}, up the super class hierarchy, optionally filtered by {@code predicates} */
public static <T extends AnnotatedElement> Set<Annotation> getAllAnnotations(T type, Predicate<Annotation>... predicates) {
Set<Annotation> result = Sets.newHashSet();
if (type instanceof Class) {
for (Class<?> t : getAllSuperTypes((Class<?>) type)) {
result.addAll(getAnnotations(t, predicates));
}
} else {
result.addAll(getAnnotations(type, predicates));
}
return result;
}
/** get annotations of given {@code type}, optionally honorInherited, optionally filtered by {@code predicates} */
public static <T extends AnnotatedElement> Set<Annotation> getAnnotations(T type, Predicate<Annotation>... predicates) {
return filter(type.getDeclaredAnnotations(), predicates);
}
/** filter all given {@code elements} with {@code predicates}, if given */
public static <T extends AnnotatedElement> Set<T> getAll(final Set<T> elements, Predicate<? super T>... predicates) {
return isEmpty(predicates) ? elements : Sets.newHashSet(Iterables.filter(elements, Predicates.and(predicates)));
}
//predicates
/** where member name equals given {@code name} */
public static <T extends Member> Predicate<T> withName(final String name) {
return new Predicate<T>() {
public boolean apply(@Nullable T input) {
return input != null && input.getName().equals(name);
}
};
}
/** where member name startsWith given {@code prefix} */
public static <T extends Member> Predicate<T> withPrefix(final String prefix) {
return new Predicate<T>() {
public boolean apply(@Nullable T input) {
return input != null && input.getName().startsWith(prefix);
}
};
}
/** where member's {@code toString} matches given {@code regex}
* <p>for example:
* <pre>
* getAllMethods(someClass, withPattern("public void .*"))
* </pre>
* */
public static <T extends AnnotatedElement> Predicate<T> withPattern(final String regex) {
return new Predicate<T>() {
public boolean apply(@Nullable T input) {
return Pattern.matches(regex, input.toString());
}
};
}
/** where element is annotated with given {@code annotation} */
public static <T extends AnnotatedElement> Predicate<T> withAnnotation(final Class<? extends Annotation> annotation) {
return new Predicate<T>() {
public boolean apply(@Nullable T input) {
return input != null && input.isAnnotationPresent(annotation);
}
};
}
/** where element is annotated with given {@code annotations} */
public static <T extends AnnotatedElement> Predicate<T> withAnnotations(final Class<? extends Annotation>... annotations) {
return new Predicate<T>() {
public boolean apply(@Nullable T input) {
return input != null && Arrays.equals(annotations, annotationTypes(input.getAnnotations()));
}
};
}
/** where element is annotated with given {@code annotation}, including member matching */
public static <T extends AnnotatedElement> Predicate<T> withAnnotation(final Annotation annotation) {
return new Predicate<T>() {
public boolean apply(@Nullable T input) {
return input != null && input.isAnnotationPresent(annotation.annotationType()) &&
areAnnotationMembersMatching(input.getAnnotation(annotation.annotationType()), annotation);
}
};
}
/** where element is annotated with given {@code annotations}, including member matching */
public static <T extends AnnotatedElement> Predicate<T> withAnnotations(final Annotation... annotations) {
return new Predicate<T>() {
public boolean apply(@Nullable T input) {
if (input != null) {
Annotation[] inputAnnotations = input.getAnnotations();
if (inputAnnotations.length == annotations.length) {
for (int i = 0; i < inputAnnotations.length; i++) {
if (!areAnnotationMembersMatching(inputAnnotations[i], annotations[i])) return false;
}
}
}
return true;
}
};
}
/** when method/constructor parameter types equals given {@code types} */
public static Predicate<Member> withParameters(final Class<?>... types) {
return new Predicate<Member>() {
public boolean apply(@Nullable Member input) {
return Arrays.equals(parameterTypes(input), types);
}
};
}
/** when member parameter types assignable to given {@code types} */
public static Predicate<Member> withParametersAssignableTo(final Class... types) {
return new Predicate<Member>() {
public boolean apply(@Nullable Member input) {
if (input != null) {
Class<?>[] parameterTypes = parameterTypes(input);
if (parameterTypes.length == types.length) {
for (int i = 0; i < parameterTypes.length; i++) {
if (!parameterTypes[i].isAssignableFrom(types[i]) ||
(parameterTypes[i] == Object.class && types[i] != Object.class)) {
return false;
}
}
return true;
}
}
return false;
}
};
}
/** when method/constructor parameters count equal given {@code count} */
public static Predicate<Member> withParametersCount(final int count) {
return new Predicate<Member>() {
public boolean apply(@Nullable Member input) {
return input != null && parameterTypes(input).length == count;
}
};
}
/** when method/constructor has any parameter with an annotation matches given {@code annotations} */
public static Predicate<Member> withAnyParameterAnnotation(final Class<? extends Annotation> annotationClass) {
return new Predicate<Member>() {
public boolean apply(@Nullable Member input) {
return input != null && Iterables.any(annotationTypes(parameterAnnotations(input)), new Predicate<Class<? extends Annotation>>() {
public boolean apply(@Nullable Class<? extends Annotation> input) {
return input.equals(annotationClass);
}
});
}
};
}
/** when method/constructor has any parameter with an annotation matches given {@code annotations}, including member matching */
public static Predicate<Member> withAnyParameterAnnotation(final Annotation annotation) {
return new Predicate<Member>() {
public boolean apply(@Nullable Member input) {
return input != null && Iterables.any(parameterAnnotations(input), new Predicate<Annotation>() {
public boolean apply(@Nullable Annotation input) {
return areAnnotationMembersMatching(annotation, input);
}
});
}
};
}
/** when field type equal given {@code type} */
public static <T> Predicate<Field> withType(final Class<T> type) {
return new Predicate<Field>() {
public boolean apply(@Nullable Field input) {
return input != null && input.getType().equals(type);
}
};
}
/** when field type assignable to given {@code type} */
public static <T> Predicate<Field> withTypeAssignableTo(final Class<T> type) {
return new Predicate<Field>() {
public boolean apply(@Nullable Field input) {
return input != null && type.isAssignableFrom(input.getType());
}
};
}
/** when method return type equal given {@code type} */
public static <T> Predicate<Method> withReturnType(final Class<T> type) {
return new Predicate<Method>() {
public boolean apply(@Nullable Method input) {
return input != null && input.getReturnType().equals(type);
}
};
}
/** when method return type assignable from given {@code type} */
public static <T> Predicate<Method> withReturnTypeAssignableTo(final Class<T> type) {
return new Predicate<Method>() {
public boolean apply(@Nullable Method input) {
return input != null && type.isAssignableFrom(input.getReturnType());
}
};
}
/** when member modifier matches given {@code mod}
* <p>for example:
* <pre>
* withModifier(Modifier.PUBLIC)
* </pre>
*/
public static <T extends Member> Predicate<T> withModifier(final int mod) {
return new Predicate<T>() {
public boolean apply(@Nullable T input) {
return input != null && (input.getModifiers() & mod) != 0;
}
};
}
/** when class modifier matches given {@code mod}
* <p>for example:
* <pre>
* withModifier(Modifier.PUBLIC)
* </pre>
*/
public static Predicate<Class<?>> withClassModifier(final int mod) {
return new Predicate<Class<?>>() {
public boolean apply(@Nullable Class<?> input) {
return input != null && (input.getModifiers() & mod) != 0;
}
};
}
//
/** tries to resolve a java type name to a Class
* <p>if optional {@link ClassLoader}s are not specified, then both {@link org.reflections.util.ClasspathHelper#contextClassLoader()} and {@link org.reflections.util.ClasspathHelper#staticClassLoader()} are used
* */
public static Class<?> forName(String typeName, ClassLoader... classLoaders) {
if (getPrimitiveNames().contains(typeName)) {
return getPrimitiveTypes().get(getPrimitiveNames().indexOf(typeName));
} else {
String type;
if (typeName.contains("[")) {
int i = typeName.indexOf("[");
type = typeName.substring(0, i);
String array = typeName.substring(i).replace("]", "");
if (getPrimitiveNames().contains(type)) {
type = getPrimitiveDescriptors().get(getPrimitiveNames().indexOf(type));
} else {
type = "L" + type + ";";
}
type = array + type;
} else {
type = typeName;
}
List<ReflectionsException> reflectionsExceptions = Lists.newArrayList();
for (ClassLoader classLoader : ClasspathHelper.classLoaders(classLoaders)) {
if (type.contains("[")) {
try { return Class.forName(type, false, classLoader); }
catch (Throwable e) {
reflectionsExceptions.add(new ReflectionsException("could not get type for name " + typeName, e));
}
}
try { return classLoader.loadClass(type); }
catch (Throwable e) {
reflectionsExceptions.add(new ReflectionsException("could not get type for name " + typeName, e));
}
}
if (Reflections.log != null) {
for (ReflectionsException reflectionsException : reflectionsExceptions) {
Reflections.log.warn("could not get type for name " + typeName + " from any class loader",
reflectionsException);
}
}
return null;
}
}
/** try to resolve all given string representation of types to a list of java types */
public static <T> List<Class<? extends T>> forNames(final Iterable<String> classes, ClassLoader... classLoaders) {
List<Class<? extends T>> result = new ArrayList<Class<? extends T>>();
for (String className : classes) {
Class<?> type = forName(className, classLoaders);
if (type != null) {
result.add((Class<? extends T>) type);
}
}
return result;
}
private static Class[] parameterTypes(Member member) {
return member != null ?
member.getClass() == Method.class ? ((Method) member).getParameterTypes() :
member.getClass() == Constructor.class ? ((Constructor) member).getParameterTypes() : null : null;
}
private static Set<Annotation> parameterAnnotations(Member member) {
Set<Annotation> result = Sets.newHashSet();
Annotation[][] annotations =
member instanceof Method ? ((Method) member).getParameterAnnotations() :
member instanceof Constructor ? ((Constructor) member).getParameterAnnotations() : null;
for (Annotation[] annotation : annotations) Collections.addAll(result, annotation);
return result;
}
private static Set<Class<? extends Annotation>> annotationTypes(Iterable<Annotation> annotations) {
Set<Class<? extends Annotation>> result = Sets.newHashSet();
for (Annotation annotation : annotations) result.add(annotation.annotationType());
return result;
}
private static Class<? extends Annotation>[] annotationTypes(Annotation[] annotations) {
Class<? extends Annotation>[] result = new Class[annotations.length];
for (int i = 0; i < annotations.length; i++) result[i] = annotations[i].annotationType();
return result;
}
//
private static List<String> primitiveNames;
private static List<Class> primitiveTypes;
private static List<String> primitiveDescriptors;
private static void initPrimitives() {
if (primitiveNames == null) {
primitiveNames = Lists.newArrayList("boolean", "char", "byte", "short", "int", "long", "float", "double", "void");
primitiveTypes = Lists.<Class>newArrayList(boolean.class, char.class, byte.class, short.class, int.class, long.class, float.class, double.class, void.class);
primitiveDescriptors = Lists.newArrayList("Z", "C", "B", "S", "I", "J", "F", "D", "V");
}
}
private static List<String> getPrimitiveNames() { initPrimitives(); return primitiveNames; }
private static List<Class> getPrimitiveTypes() { initPrimitives(); return primitiveTypes; }
private static List<String> getPrimitiveDescriptors() { initPrimitives(); return primitiveDescriptors; }
//
static <T> Set<T> filter(final T[] elements, Predicate<? super T>... predicates) {
return isEmpty(predicates) ? Sets.newHashSet(elements) :
Sets.newHashSet(Iterables.filter(Arrays.asList(elements), Predicates.and(predicates)));
}
static <T> Set<T> filter(final Iterable<T> elements, Predicate<? super T>... predicates) {
return isEmpty(predicates) ? Sets.newHashSet(elements) :
Sets.newHashSet(Iterables.filter(elements, Predicates.and(predicates)));
}
private static boolean areAnnotationMembersMatching(Annotation annotation1, Annotation annotation2) {
if (annotation2 != null && annotation1.annotationType() == annotation2.annotationType()) {
for (Method method : annotation1.annotationType().getDeclaredMethods()) {
try {
if (!method.invoke(annotation1).equals(method.invoke(annotation2))) return false;
} catch (Exception e) {
throw new ReflectionsException(String.format("could not invoke method %s on annotation %s", method.getName(), annotation1.annotationType()), e);
}
}
return true;
}
return false;
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/Reflections.java
|
package org.reflections;
import com.google.common.base.Joiner;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import org.reflections.scanners.*;
import org.reflections.scanners.Scanner;
import org.reflections.serializers.Serializer;
import org.reflections.serializers.XmlSerializer;
import org.reflections.util.ClasspathHelper;
import org.reflections.util.ConfigurationBuilder;
import org.reflections.util.FilterBuilder;
import org.reflections.util.Utils;
import org.reflections.vfs.Vfs;
import org.slf4j.Logger;
import javax.annotation.Nullable;
import java.io.*;
import java.lang.annotation.Annotation;
import java.lang.annotation.Inherited;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Member;
import java.lang.reflect.Method;
import java.net.URL;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.regex.Pattern;
import static com.google.common.base.Predicates.in;
import static com.google.common.base.Predicates.not;
import static com.google.common.collect.Iterables.concat;
import static java.lang.String.format;
import static org.reflections.ReflectionUtils.*;
import static org.reflections.util.Utils.*;
/**
* Reflections one-stop-shop object
* <p>Reflections scans your classpath, indexes the metadata, allows you to query it on runtime and may save and collect that information for many modules within your project.
* <p>Using Reflections you can query your metadata such as:
* <ul>
* <li>get all subtypes of some type
* <li>get all types/constructors/methods/fields annotated with some annotation, optionally with annotation parameters matching
* <li>get all resources matching matching a regular expression
* <li>get all methods with specific signature including parameters, parameter annotations and return type
* <li>get all methods parameter names
* <li>get all fields/methods/constructors usages in code
* </ul>
* <p>A typical use of Reflections would be:
* <pre>
* Reflections reflections = new Reflections("my.project.prefix");
*
* Set<Class<? extends SomeType>> subTypes = reflections.getSubTypesOf(SomeType.class);
*
* Set<Class<?>> annotated = reflections.getTypesAnnotatedWith(SomeAnnotation.class);
* </pre>
* <p>Basically, to use Reflections first instantiate it with one of the constructors, then depending on the scanners, use the convenient query methods:
* <pre>
* Reflections reflections = new Reflections("my.package.prefix");
* //or
* Reflections reflections = new Reflections(ClasspathHelper.forPackage("my.package.prefix"),
* new SubTypesScanner(), new TypesAnnotationScanner(), new FilterBuilder().include(...), ...);
*
* //or using the ConfigurationBuilder
* new Reflections(new ConfigurationBuilder()
* .filterInputsBy(new FilterBuilder().include(FilterBuilder.prefix("my.project.prefix")))
* .setUrls(ClasspathHelper.forPackage("my.project.prefix"))
* .setScanners(new SubTypesScanner(), new TypeAnnotationsScanner().filterResultsBy(optionalFilter), ...));
* </pre>
* And then query, for example:
* <pre>
* Set<Class<? extends Module>> modules = reflections.getSubTypesOf(com.google.inject.Module.class);
* Set<Class<?>> singletons = reflections.getTypesAnnotatedWith(javax.inject.Singleton.class);
*
* Set<String> properties = reflections.getResources(Pattern.compile(".*\\.properties"));
* Set<Constructor> injectables = reflections.getConstructorsAnnotatedWith(javax.inject.Inject.class);
* Set<Method> deprecateds = reflections.getMethodsAnnotatedWith(javax.ws.rs.Path.class);
* Set<Field> ids = reflections.getFieldsAnnotatedWith(javax.persistence.Id.class);
*
* Set<Method> someMethods = reflections.getMethodsMatchParams(long.class, int.class);
* Set<Method> voidMethods = reflections.getMethodsReturn(void.class);
* Set<Method> pathParamMethods = reflections.getMethodsWithAnyParamAnnotated(PathParam.class);
* Set<Method> floatToString = reflections.getConverters(Float.class, String.class);
* List<String> parameterNames = reflections.getMethodsParamNames(Method.class);
*
* Set<Member> fieldUsage = reflections.getFieldUsage(Field.class);
* Set<Member> methodUsage = reflections.getMethodUsage(Method.class);
* Set<Member> constructorUsage = reflections.getConstructorUsage(Constructor.class);
* </pre>
* <p>You can use other scanners defined in Reflections as well, such as: SubTypesScanner, TypeAnnotationsScanner (both default),
* ResourcesScanner, MethodAnnotationsScanner, ConstructorAnnotationsScanner, FieldAnnotationsScanner,
* MethodParameterScanner, MethodParameterNamesScanner, MemberUsageScanner or any custom scanner.
* <p>Use {@link #getStore()} to access and query the store directly
* <p>In order to save the store metadata, use {@link #save(String)} or {@link #save(String, org.reflections.serializers.Serializer)}
* for example with {@link org.reflections.serializers.XmlSerializer} or {@link org.reflections.serializers.JavaCodeSerializer}
* <p>In order to collect pre saved metadata and avoid re-scanning, use {@link #collect(String, com.google.common.base.Predicate, org.reflections.serializers.Serializer...)}}
* <p><i>Make sure to scan all the transitively relevant packages.
* <br>for instance, given your class C extends B extends A, and both B and A are located in another package than C,
* when only the package of C is scanned - then querying for sub types of A returns nothing (transitive), but querying for sub types of B returns C (direct).
* In that case make sure to scan all relevant packages a priori.</i>
* <p><p><p>For Javadoc, source code, and more information about Reflections Library, see http://github.com/ronmamo/reflections/
*/
public class Reflections {
@Nullable public static Logger log = findLogger(Reflections.class);
protected final transient Configuration configuration;
protected Store store;
/**
* constructs a Reflections instance and scan according to given {@link org.reflections.Configuration}
* <p>it is preferred to use {@link org.reflections.util.ConfigurationBuilder}
*/
public Reflections(final Configuration configuration) {
this.configuration = configuration;
store = new Store(configuration);
if (configuration.getScanners() != null && !configuration.getScanners().isEmpty()) {
//inject to scanners
for (Scanner scanner : configuration.getScanners()) {
scanner.setConfiguration(configuration);
scanner.setStore(store.getOrCreate(scanner.getClass().getSimpleName()));
}
scan();
}
}
/**
* a convenient constructor for scanning within a package prefix.
* <p>this actually create a {@link org.reflections.Configuration} with:
* <br> - urls that contain resources with name {@code prefix}
* <br> - filterInputsBy where name starts with the given {@code prefix}
* <br> - scanners set to the given {@code scanners}, otherwise defaults to {@link org.reflections.scanners.TypeAnnotationsScanner} and {@link org.reflections.scanners.SubTypesScanner}.
* @param prefix package prefix, to be used with {@link org.reflections.util.ClasspathHelper#forPackage(String, ClassLoader...)} )}
* @param scanners optionally supply scanners, otherwise defaults to {@link org.reflections.scanners.TypeAnnotationsScanner} and {@link org.reflections.scanners.SubTypesScanner}
*/
public Reflections(final String prefix, @Nullable final Scanner... scanners) {
this((Object) prefix, scanners);
}
/**
* a convenient constructor for Reflections, where given {@code Object...} parameter types can be either:
* <ul>
* <li>{@link String} - would add urls using {@link org.reflections.util.ClasspathHelper#forPackage(String, ClassLoader...)} ()}</li>
* <li>{@link Class} - would add urls using {@link org.reflections.util.ClasspathHelper#forClass(Class, ClassLoader...)} </li>
* <li>{@link ClassLoader} - would use this classloaders in order to find urls in {@link org.reflections.util.ClasspathHelper#forPackage(String, ClassLoader...)} and {@link org.reflections.util.ClasspathHelper#forClass(Class, ClassLoader...)}</li>
* <li>{@link org.reflections.scanners.Scanner} - would use given scanner, overriding the default scanners</li>
* <li>{@link java.net.URL} - would add the given url for scanning</li>
* <li>{@link Object[]} - would use each element as above</li>
* </ul>
*
* use any parameter type in any order. this constructor uses instanceof on each param and instantiate a {@link org.reflections.util.ConfigurationBuilder} appropriately.
* if you prefer the usual statically typed constructor, don't use this, although it can be very useful.
*
* <br><br>for example:
* <pre>
* new Reflections("my.package", classLoader);
* //or
* new Reflections("my.package", someScanner, anotherScanner, classLoader);
* //or
* new Reflections(myUrl, myOtherUrl);
* </pre>
*/
public Reflections(final Object... params) {
this(ConfigurationBuilder.build(params));
}
protected Reflections() {
configuration = new ConfigurationBuilder();
store = new Store(configuration);
}
//
protected void scan() {
if (configuration.getUrls() == null || configuration.getUrls().isEmpty()) {
if (log != null) log.warn("given scan urls are empty. set urls in the configuration");
return;
}
if (log != null && log.isDebugEnabled()) {
log.debug("going to scan these urls:\n" + Joiner.on("\n").join(configuration.getUrls()));
}
long time = System.currentTimeMillis();
int scannedUrls = 0;
ExecutorService executorService = configuration.getExecutorService();
List<Future<?>> futures = Lists.newArrayList();
for (final URL url : configuration.getUrls()) {
try {
if (executorService != null) {
futures.add(executorService.submit(new Runnable() {
public void run() {
if (log != null && log.isDebugEnabled()) log.debug("[" + Thread.currentThread().toString() + "] scanning " + url);
scan(url);
}
}));
} else {
scan(url);
}
scannedUrls++;
} catch (ReflectionsException e) {
if (log != null && log.isWarnEnabled()) log.warn("could not create Vfs.Dir from url. ignoring the exception and continuing", e);
}
}
//todo use CompletionService
if (executorService != null) {
for (Future future : futures) {
try { future.get(); } catch (Exception e) { throw new RuntimeException(e); }
}
}
time = System.currentTimeMillis() - time;
if (log != null) {
int keys = 0;
int values = 0;
for (String index : store.keySet()) {
keys += store.get(index).keySet().size();
values += store.get(index).size();
}
log.info(format("Reflections took %d ms to scan %d urls, producing %d keys and %d values %s",
time, scannedUrls, keys, values,
executorService != null && executorService instanceof ThreadPoolExecutor ?
format("[using %d cores]", ((ThreadPoolExecutor) executorService).getMaximumPoolSize()) : ""));
}
}
protected void scan(URL url) {
Vfs.Dir dir = Vfs.fromURL(url);
try {
for (final Vfs.File file : dir.getFiles()) {
// scan if inputs filter accepts file relative path or fqn
Predicate<String> inputsFilter = configuration.getInputsFilter();
String path = file.getRelativePath();
String fqn = path.replace('/', '.');
if (inputsFilter == null || inputsFilter.apply(path) || inputsFilter.apply(fqn)) {
Object classObject = null;
for (Scanner scanner : configuration.getScanners()) {
try {
if (scanner.acceptsInput(path) || scanner.acceptResult(fqn)) {
classObject = scanner.scan(file, classObject);
}
} catch (Exception e) {
if (log != null && log.isDebugEnabled())
log.debug("could not scan file " + file.getRelativePath() + " in url " + url.toExternalForm() + " with scanner " + scanner.getClass().getSimpleName(), e);
}
}
}
}
} finally {
dir.close();
}
}
/** collect saved Reflection xml resources and merge it into a Reflections instance
* <p>by default, resources are collected from all urls that contains the package META-INF/reflections
* and includes files matching the pattern .*-reflections.xml
* */
public static Reflections collect() {
return collect("META-INF/reflections/", new FilterBuilder().include(".*-reflections.xml"));
}
/**
* collect saved Reflections resources from all urls that contains the given packagePrefix and matches the given resourceNameFilter
* and de-serializes them using the default serializer {@link org.reflections.serializers.XmlSerializer} or using the optionally supplied optionalSerializer
* <p>
* it is preferred to use a designated resource prefix (for example META-INF/reflections but not just META-INF),
* so that relevant urls could be found much faster
* @param optionalSerializer - optionally supply one serializer instance. if not specified or null, {@link org.reflections.serializers.XmlSerializer} will be used
*/
public static Reflections collect(final String packagePrefix, final Predicate<String> resourceNameFilter, @Nullable Serializer... optionalSerializer) {
Serializer serializer = optionalSerializer != null && optionalSerializer.length == 1 ? optionalSerializer[0] : new XmlSerializer();
Collection<URL> urls = ClasspathHelper.forPackage(packagePrefix);
if (urls.isEmpty()) return null;
long start = System.currentTimeMillis();
final Reflections reflections = new Reflections();
Iterable<Vfs.File> files = Vfs.findFiles(urls, packagePrefix, resourceNameFilter);
for (final Vfs.File file : files) {
InputStream inputStream = null;
try {
inputStream = file.openInputStream();
reflections.merge(serializer.read(inputStream));
} catch (IOException e) {
throw new ReflectionsException("could not merge " + file, e);
} finally {
close(inputStream);
}
}
if (log != null) {
Store store = reflections.getStore();
int keys = 0;
int values = 0;
for (String index : store.keySet()) {
keys += store.get(index).keySet().size();
values += store.get(index).size();
}
log.info(format("Reflections took %d ms to collect %d url%s, producing %d keys and %d values [%s]",
System.currentTimeMillis() - start, urls.size(), urls.size() > 1 ? "s" : "", keys, values, Joiner.on(", ").join(urls)));
}
return reflections;
}
/** merges saved Reflections resources from the given input stream, using the serializer configured in this instance's Configuration
* <br> useful if you know the serialized resource location and prefer not to look it up the classpath
* */
public Reflections collect(final InputStream inputStream) {
try {
merge(configuration.getSerializer().read(inputStream));
if (log != null) log.info("Reflections collected metadata from input stream using serializer " + configuration.getSerializer().getClass().getName());
} catch (Exception ex) {
throw new ReflectionsException("could not merge input stream", ex);
}
return this;
}
/** merges saved Reflections resources from the given file, using the serializer configured in this instance's Configuration
* <p> useful if you know the serialized resource location and prefer not to look it up the classpath
* */
public Reflections collect(final File file) {
FileInputStream inputStream = null;
try {
inputStream = new FileInputStream(file);
return collect(inputStream);
} catch (FileNotFoundException e) {
throw new ReflectionsException("could not obtain input stream from file " + file, e);
} finally {
Utils.close(inputStream);
}
}
/**
* merges a Reflections instance metadata into this instance
*/
public Reflections merge(final Reflections reflections) {
if (reflections.store != null) {
for (String indexName : reflections.store.keySet()) {
Multimap<String, String> index = reflections.store.get(indexName);
for (String key : index.keySet()) {
for (String string : index.get(key)) {
store.getOrCreate(indexName).put(key, string);
}
}
}
}
return this;
}
//query
/**
* gets all sub types in hierarchy of a given type
* <p/>depends on SubTypesScanner configured
*/
public <T> Set<Class<? extends T>> getSubTypesOf(final Class<T> type) {
return Sets.newHashSet(ReflectionUtils.<T>forNames(
store.getAll(index(SubTypesScanner.class), Arrays.asList(type.getName())), loaders()));
}
/**
* get types annotated with a given annotation, both classes and annotations
* <p>{@link java.lang.annotation.Inherited} is not honored by default.
* <p>when honoring @Inherited, meta-annotation should only effect annotated super classes and its sub types
* <p><i>Note that this (@Inherited) meta-annotation type has no effect if the annotated type is used for anything other then a class.
* Also, this meta-annotation causes annotations to be inherited only from superclasses; annotations on implemented interfaces have no effect.</i>
* <p/>depends on TypeAnnotationsScanner and SubTypesScanner configured
*/
public Set<Class<?>> getTypesAnnotatedWith(final Class<? extends Annotation> annotation) {
return getTypesAnnotatedWith(annotation, false);
}
/**
* get types annotated with a given annotation, both classes and annotations
* <p>{@link java.lang.annotation.Inherited} is honored according to given honorInherited.
* <p>when honoring @Inherited, meta-annotation should only effect annotated super classes and it's sub types
* <p>when not honoring @Inherited, meta annotation effects all subtypes, including annotations interfaces and classes
* <p><i>Note that this (@Inherited) meta-annotation type has no effect if the annotated type is used for anything other then a class.
* Also, this meta-annotation causes annotations to be inherited only from superclasses; annotations on implemented interfaces have no effect.</i>
* <p/>depends on TypeAnnotationsScanner and SubTypesScanner configured
*/
public Set<Class<?>> getTypesAnnotatedWith(final Class<? extends Annotation> annotation, boolean honorInherited) {
Iterable<String> annotated = store.get(index(TypeAnnotationsScanner.class), annotation.getName());
Iterable<String> classes = getAllAnnotated(annotated, annotation.isAnnotationPresent(Inherited.class), honorInherited);
return Sets.newHashSet(concat(forNames(annotated, loaders()), forNames(classes, loaders())));
}
/**
* get types annotated with a given annotation, both classes and annotations, including annotation member values matching
* <p>{@link java.lang.annotation.Inherited} is not honored by default
* <p/>depends on TypeAnnotationsScanner configured
*/
public Set<Class<?>> getTypesAnnotatedWith(final Annotation annotation) {
return getTypesAnnotatedWith(annotation, false);
}
/**
* get types annotated with a given annotation, both classes and annotations, including annotation member values matching
* <p>{@link java.lang.annotation.Inherited} is honored according to given honorInherited
* <p/>depends on TypeAnnotationsScanner configured
*/
public Set<Class<?>> getTypesAnnotatedWith(final Annotation annotation, boolean honorInherited) {
Iterable<String> annotated = store.get(index(TypeAnnotationsScanner.class), annotation.annotationType().getName());
Iterable<Class<?>> filter = filter(forNames(annotated, loaders()), withAnnotation(annotation));
Iterable<String> classes = getAllAnnotated(names(filter), annotation.annotationType().isAnnotationPresent(Inherited.class), honorInherited);
return Sets.newHashSet(concat(filter, forNames(filter(classes, not(in(Sets.newHashSet(annotated)))), loaders())));
}
protected Iterable<String> getAllAnnotated(Iterable<String> annotated, boolean inherited, boolean honorInherited) {
if (honorInherited) {
if (inherited) {
Iterable<String> subTypes = store.get(index(SubTypesScanner.class), filter(annotated, new Predicate<String>() {
public boolean apply(@Nullable String input) {
return !ReflectionUtils.forName(input, loaders()).isInterface();
}
}));
return concat(subTypes, store.getAll(index(SubTypesScanner.class), subTypes));
} else {
return annotated;
}
} else {
Iterable<String> subTypes = concat(annotated, store.getAll(index(TypeAnnotationsScanner.class), annotated));
return concat(subTypes, store.getAll(index(SubTypesScanner.class), subTypes));
}
}
/**
* get all methods annotated with a given annotation
* <p/>depends on MethodAnnotationsScanner configured
*/
public Set<Method> getMethodsAnnotatedWith(final Class<? extends Annotation> annotation) {
Iterable<String> methods = store.get(index(MethodAnnotationsScanner.class), annotation.getName());
return getMethodsFromDescriptors(methods, loaders());
}
/**
* get all methods annotated with a given annotation, including annotation member values matching
* <p/>depends on MethodAnnotationsScanner configured
*/
public Set<Method> getMethodsAnnotatedWith(final Annotation annotation) {
return filter(getMethodsAnnotatedWith(annotation.annotationType()), withAnnotation(annotation));
}
/** get methods with parameter types matching given {@code types}*/
public Set<Method> getMethodsMatchParams(Class<?>... types) {
return getMethodsFromDescriptors(store.get(index(MethodParameterScanner.class), names(types).toString()), loaders());
}
/** get methods with return type match given type */
public Set<Method> getMethodsReturn(Class returnType) {
return getMethodsFromDescriptors(store.get(index(MethodParameterScanner.class), names(returnType)), loaders());
}
/** get methods with any parameter annotated with given annotation */
public Set<Method> getMethodsWithAnyParamAnnotated(Class<? extends Annotation> annotation) {
return getMethodsFromDescriptors(store.get(index(MethodParameterScanner.class), annotation.getName()), loaders());
}
/** get methods with any parameter annotated with given annotation, including annotation member values matching */
public Set<Method> getMethodsWithAnyParamAnnotated(Annotation annotation) {
return filter(getMethodsWithAnyParamAnnotated(annotation.annotationType()), withAnyParameterAnnotation(annotation));
}
/**
* get all constructors annotated with a given annotation
* <p/>depends on MethodAnnotationsScanner configured
*/
public Set<Constructor> getConstructorsAnnotatedWith(final Class<? extends Annotation> annotation) {
Iterable<String> methods = store.get(index(MethodAnnotationsScanner.class), annotation.getName());
return getConstructorsFromDescriptors(methods, loaders());
}
/**
* get all constructors annotated with a given annotation, including annotation member values matching
* <p/>depends on MethodAnnotationsScanner configured
*/
public Set<Constructor> getConstructorsAnnotatedWith(final Annotation annotation) {
return filter(getConstructorsAnnotatedWith(annotation.annotationType()), withAnnotation(annotation));
}
/** get constructors with parameter types matching given {@code types}*/
public Set<Constructor> getConstructorsMatchParams(Class<?>... types) {
return getConstructorsFromDescriptors(store.get(index(MethodParameterScanner.class), names(types).toString()), loaders());
}
/** get constructors with any parameter annotated with given annotation */
public Set<Constructor> getConstructorsWithAnyParamAnnotated(Class<? extends Annotation> annotation) {
return getConstructorsFromDescriptors(store.get(index(MethodParameterScanner.class), annotation.getName()), loaders());
}
/** get constructors with any parameter annotated with given annotation, including annotation member values matching */
public Set<Constructor> getConstructorsWithAnyParamAnnotated(Annotation annotation) {
return filter(getConstructorsWithAnyParamAnnotated(annotation.annotationType()), withAnyParameterAnnotation(annotation));
}
/**
* get all fields annotated with a given annotation
* <p/>depends on FieldAnnotationsScanner configured
*/
public Set<Field> getFieldsAnnotatedWith(final Class<? extends Annotation> annotation) {
final Set<Field> result = Sets.newHashSet();
for (String annotated : store.get(index(FieldAnnotationsScanner.class), annotation.getName())) {
result.add(getFieldFromString(annotated, loaders()));
}
return result;
}
/**
* get all methods annotated with a given annotation, including annotation member values matching
* <p/>depends on FieldAnnotationsScanner configured
*/
public Set<Field> getFieldsAnnotatedWith(final Annotation annotation) {
return filter(getFieldsAnnotatedWith(annotation.annotationType()), withAnnotation(annotation));
}
/** get resources relative paths where simple name (key) matches given namePredicate
* <p>depends on ResourcesScanner configured
* */
public Set<String> getResources(final Predicate<String> namePredicate) {
Iterable<String> resources = Iterables.filter(store.get(index(ResourcesScanner.class)).keySet(), namePredicate);
return Sets.newHashSet(store.get(index(ResourcesScanner.class), resources));
}
/** get resources relative paths where simple name (key) matches given regular expression
* <p>depends on ResourcesScanner configured
* <pre>Set<String> xmls = reflections.getResources(".*\\.xml");</pre>
*/
public Set<String> getResources(final Pattern pattern) {
return getResources(new Predicate<String>() {
public boolean apply(String input) {
return pattern.matcher(input).matches();
}
});
}
/** get parameter names of given {@code method}
* <p>depends on MethodParameterNamesScanner configured
*/
public List<String> getMethodParamNames(Method method) {
Iterable<String> names = store.get(index(MethodParameterNamesScanner.class), name(method));
return !Iterables.isEmpty(names) ? Arrays.asList(Iterables.getOnlyElement(names).split(", ")) : Arrays.<String>asList();
}
/** get parameter names of given {@code constructor}
* <p>depends on MethodParameterNamesScanner configured
*/
public List<String> getConstructorParamNames(Constructor constructor) {
Iterable<String> names = store.get(index(MethodParameterNamesScanner.class), Utils.name(constructor));
return !Iterables.isEmpty(names) ? Arrays.asList(Iterables.getOnlyElement(names).split(", ")) : Arrays.<String>asList();
}
/** get all given {@code field} usages in methods and constructors
* <p>depends on MemberUsageScanner configured
*/
public Set<Member> getFieldUsage(Field field) {
return getMembersFromDescriptors(store.get(index(MemberUsageScanner.class), name(field)));
}
/** get all given {@code method} usages in methods and constructors
* <p>depends on MemberUsageScanner configured
*/
public Set<Member> getMethodUsage(Method method) {
return getMembersFromDescriptors(store.get(index(MemberUsageScanner.class), name(method)));
}
/** get all given {@code constructors} usages in methods and constructors
* <p>depends on MemberUsageScanner configured
*/
public Set<Member> getConstructorUsage(Constructor constructor) {
return getMembersFromDescriptors(store.get(index(MemberUsageScanner.class), name(constructor)));
}
/** get all types scanned. this is effectively similar to getting all subtypes of Object.
* <p>depends on SubTypesScanner configured with {@code SubTypesScanner(false)}, otherwise {@code ReflectionsException} is thrown
* <p><i>note using this might be a bad practice. it is better to get types matching some criteria,
* such as {@link #getSubTypesOf(Class)} or {@link #getTypesAnnotatedWith(Class)}</i>
* @return Set of String, and not of Class, in order to avoid definition of all types in PermGen
*/
public Set<String> getAllTypes() {
Set<String> allTypes = Sets.newHashSet(store.getAll(index(SubTypesScanner.class), Object.class.getName()));
if (allTypes.isEmpty()) {
throw new ReflectionsException("Couldn't find subtypes of Object. " +
"Make sure SubTypesScanner initialized to include Object class - new SubTypesScanner(false)");
}
return allTypes;
}
/** returns the {@link org.reflections.Store} used for storing and querying the metadata */
public Store getStore() {
return store;
}
/** returns the {@link org.reflections.Configuration} object of this instance */
public Configuration getConfiguration() {
return configuration;
}
/**
* serialize to a given directory and filename
* <p>* it is preferred to specify a designated directory (for example META-INF/reflections),
* so that it could be found later much faster using the load method
* <p>see the documentation for the save method on the configured {@link org.reflections.serializers.Serializer}
*/
public File save(final String filename) {
return save(filename, configuration.getSerializer());
}
/**
* serialize to a given directory and filename using given serializer
* <p>* it is preferred to specify a designated directory (for example META-INF/reflections),
* so that it could be found later much faster using the load method
*/
public File save(final String filename, final Serializer serializer) {
File file = serializer.save(this, filename);
if (log != null) //noinspection ConstantConditions
log.info("Reflections successfully saved in " + file.getAbsolutePath() + " using " + serializer.getClass().getSimpleName());
return file;
}
private static String index(Class<? extends Scanner> scannerClass) { return scannerClass.getSimpleName(); }
private ClassLoader[] loaders() { return configuration.getClassLoaders(); }
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/ReflectionsException.java
|
/*
* User: ophir
* Date: Mar 28, 2009
* Time: 12:52:22 AM
*/
package org.reflections;
public class ReflectionsException extends RuntimeException {
public ReflectionsException(String message) {
super(message);
}
public ReflectionsException(String message, Throwable cause) {
super(message, cause);
}
public ReflectionsException(Throwable cause) {
super(cause);
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/Store.java
|
package org.reflections;
import com.google.common.base.Supplier;
import com.google.common.collect.*;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
/**
* stores metadata information in multimaps
* <p>use the different query methods (getXXX) to query the metadata
* <p>the query methods are string based, and does not cause the class loader to define the types
* <p>use {@link org.reflections.Reflections#getStore()} to access this store
*/
public class Store {
private transient boolean concurrent;
private final Map<String, Multimap<String, String>> storeMap;
//used via reflection
@SuppressWarnings("UnusedDeclaration")
protected Store() {
storeMap = new HashMap<String, Multimap<String, String>>();
concurrent = false;
}
public Store(Configuration configuration) {
storeMap = new HashMap<String, Multimap<String, String>>();
concurrent = configuration.getExecutorService() != null;
}
/** return all indices */
public Set<String> keySet() {
return storeMap.keySet();
}
/** get or create the multimap object for the given {@code index} */
public Multimap<String, String> getOrCreate(String index) {
Multimap<String, String> mmap = storeMap.get(index);
if (mmap == null) {
SetMultimap<String, String> multimap =
Multimaps.newSetMultimap(new HashMap<String, Collection<String>>(),
new Supplier<Set<String>>() {
public Set<String> get() {
return Sets.newSetFromMap(new ConcurrentHashMap<String, Boolean>());
}
});
mmap = concurrent ? Multimaps.synchronizedSetMultimap(multimap) : multimap;
storeMap.put(index,mmap);
}
return mmap;
}
/** get the multimap object for the given {@code index}, otherwise throws a {@link org.reflections.ReflectionsException} */
public Multimap<String, String> get(String index) {
Multimap<String, String> mmap = storeMap.get(index);
if (mmap == null) {
throw new ReflectionsException("Scanner " + index + " was not configured");
}
return mmap;
}
/** get the values stored for the given {@code index} and {@code keys} */
public Iterable<String> get(String index, String... keys) {
return get(index, Arrays.asList(keys));
}
/** get the values stored for the given {@code index} and {@code keys} */
public Iterable<String> get(String index, Iterable<String> keys) {
Multimap<String, String> mmap = get(index);
IterableChain<String> result = new IterableChain<String>();
for (String key : keys) {
result.addAll(mmap.get(key));
}
return result;
}
/** recursively get the values stored for the given {@code index} and {@code keys}, including keys */
private Iterable<String> getAllIncluding(String index, Iterable<String> keys, IterableChain<String> result) {
result.addAll(keys);
for (String key : keys) {
Iterable<String> values = get(index, key);
if (values.iterator().hasNext()) {
getAllIncluding(index, values, result);
}
}
return result;
}
/** recursively get the values stored for the given {@code index} and {@code keys}, not including keys */
public Iterable<String> getAll(String index, String key) {
return getAllIncluding(index, get(index, key), new IterableChain<String>());
}
/** recursively get the values stored for the given {@code index} and {@code keys}, not including keys */
public Iterable<String> getAll(String index, Iterable<String> keys) {
return getAllIncluding(index, get(index, keys), new IterableChain<String>());
}
private static class IterableChain<T> implements Iterable<T> {
private final List<Iterable<T>> chain = Lists.newArrayList();
private void addAll(Iterable<T> iterable) { chain.add(iterable); }
public Iterator<T> iterator() { return Iterables.concat(chain).iterator(); }
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/adapters/JavaReflectionAdapter.java
|
package org.reflections.adapters;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import org.reflections.util.Utils;
import org.reflections.vfs.Vfs;
import javax.annotation.Nullable;
import java.lang.annotation.Annotation;
import java.lang.reflect.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.reflections.ReflectionUtils.forName;
/** */
public class JavaReflectionAdapter implements MetadataAdapter<Class, Field, Member> {
public List<Field> getFields(Class cls) {
return Lists.newArrayList(cls.getDeclaredFields());
}
public List<Member> getMethods(Class cls) {
List<Member> methods = Lists.newArrayList();
methods.addAll(Arrays.asList(cls.getDeclaredMethods()));
methods.addAll(Arrays.asList(cls.getDeclaredConstructors()));
return methods;
}
public String getMethodName(Member method) {
return method instanceof Method ? method.getName() :
method instanceof Constructor ? "<init>" : null;
}
public List<String> getParameterNames(final Member member) {
List<String> result = Lists.newArrayList();
Class<?>[] parameterTypes = member instanceof Method ? ((Method) member).getParameterTypes() :
member instanceof Constructor ? ((Constructor) member).getParameterTypes() : null;
if (parameterTypes != null) {
for (Class<?> paramType : parameterTypes) {
String name = getName(paramType);
result.add(name);
}
}
return result;
}
public List<String> getClassAnnotationNames(Class aClass) {
return getAnnotationNames(aClass.getDeclaredAnnotations());
}
public List<String> getFieldAnnotationNames(Field field) {
return getAnnotationNames(field.getDeclaredAnnotations());
}
public List<String> getMethodAnnotationNames(Member method) {
Annotation[] annotations =
method instanceof Method ? ((Method) method).getDeclaredAnnotations() :
method instanceof Constructor ? ((Constructor) method).getDeclaredAnnotations() : null;
return getAnnotationNames(annotations);
}
public List<String> getParameterAnnotationNames(Member method, int parameterIndex) {
Annotation[][] annotations =
method instanceof Method ? ((Method) method).getParameterAnnotations() :
method instanceof Constructor ? ((Constructor) method).getParameterAnnotations() : null;
return getAnnotationNames(annotations != null ? annotations[parameterIndex] : null);
}
public String getReturnTypeName(Member method) {
return ((Method) method).getReturnType().getName();
}
public String getFieldName(Field field) {
return field.getName();
}
public Class getOfCreateClassObject(Vfs.File file) throws Exception {
return getOfCreateClassObject(file, null);
}
public Class getOfCreateClassObject(Vfs.File file, @Nullable ClassLoader... loaders) throws Exception {
String name = file.getRelativePath().replace("/", ".").replace(".class", "");
return forName(name, loaders);
}
public String getMethodModifier(Member method) {
return Modifier.toString(method.getModifiers());
}
public String getMethodKey(Class cls, Member method) {
return getMethodName(method) + "(" + Joiner.on(", ").join(getParameterNames(method)) + ")";
}
public String getMethodFullKey(Class cls, Member method) {
return getClassName(cls) + "." + getMethodKey(cls, method);
}
public boolean isPublic(Object o) {
Integer mod =
o instanceof Class ? ((Class) o).getModifiers() :
o instanceof Member ? ((Member) o).getModifiers() : null;
return mod != null && Modifier.isPublic(mod);
}
public String getClassName(Class cls) {
return cls.getName();
}
public String getSuperclassName(Class cls) {
Class superclass = cls.getSuperclass();
return superclass != null ? superclass.getName() : "";
}
public List<String> getInterfacesNames(Class cls) {
Class[] classes = cls.getInterfaces();
List<String> names = new ArrayList<String>(classes != null ? classes.length : 0);
if (classes != null) for (Class cls1 : classes) names.add(cls1.getName());
return names;
}
public boolean acceptsInput(String file) {
return file.endsWith(".class");
}
//
private List<String> getAnnotationNames(Annotation[] annotations) {
List<String> names = new ArrayList<String>(annotations.length);
for (Annotation annotation : annotations) {
names.add(annotation.annotationType().getName());
}
return names;
}
public static String getName(Class type) {
if (type.isArray()) {
try {
Class cl = type;
int dim = 0; while (cl.isArray()) { dim++; cl = cl.getComponentType(); }
return cl.getName() + Utils.repeat("[]", dim);
} catch (Throwable e) {
//
}
}
return type.getName();
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/adapters/JavassistAdapter.java
|
package org.reflections.adapters;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import javassist.bytecode.*;
import javassist.bytecode.annotation.Annotation;
import org.reflections.ReflectionsException;
import org.reflections.util.Utils;
import org.reflections.vfs.Vfs;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.List;
import static javassist.bytecode.AccessFlag.isPrivate;
import static javassist.bytecode.AccessFlag.isProtected;
/**
*
*/
public class JavassistAdapter implements MetadataAdapter<ClassFile, FieldInfo, MethodInfo> {
/**setting this to false will result in returning only visible annotations from the relevant methods here (only {@link java.lang.annotation.RetentionPolicy#RUNTIME})*/
public static boolean includeInvisibleTag = true;
public List<FieldInfo> getFields(final ClassFile cls) {
//noinspection unchecked
return cls.getFields();
}
public List<MethodInfo> getMethods(final ClassFile cls) {
//noinspection unchecked
return cls.getMethods();
}
public String getMethodName(final MethodInfo method) {
return method.getName();
}
public List<String> getParameterNames(final MethodInfo method) {
String descriptor = method.getDescriptor();
descriptor = descriptor.substring(descriptor.indexOf("(") + 1, descriptor.lastIndexOf(")"));
return splitDescriptorToTypeNames(descriptor);
}
public List<String> getClassAnnotationNames(final ClassFile aClass) {
return getAnnotationNames((AnnotationsAttribute) aClass.getAttribute(AnnotationsAttribute.visibleTag),
includeInvisibleTag ? (AnnotationsAttribute) aClass.getAttribute(AnnotationsAttribute.invisibleTag) : null);
}
public List<String> getFieldAnnotationNames(final FieldInfo field) {
return getAnnotationNames((AnnotationsAttribute) field.getAttribute(AnnotationsAttribute.visibleTag),
includeInvisibleTag ? (AnnotationsAttribute) field.getAttribute(AnnotationsAttribute.invisibleTag) : null);
}
public List<String> getMethodAnnotationNames(final MethodInfo method) {
return getAnnotationNames((AnnotationsAttribute) method.getAttribute(AnnotationsAttribute.visibleTag),
includeInvisibleTag ? (AnnotationsAttribute) method.getAttribute(AnnotationsAttribute.invisibleTag) : null);
}
public List<String> getParameterAnnotationNames(final MethodInfo method, final int parameterIndex) {
List<String> result = Lists.newArrayList();
List<ParameterAnnotationsAttribute> parameterAnnotationsAttributes = Lists.newArrayList((ParameterAnnotationsAttribute) method.getAttribute(ParameterAnnotationsAttribute.visibleTag),
(ParameterAnnotationsAttribute) method.getAttribute(ParameterAnnotationsAttribute.invisibleTag));
if (parameterAnnotationsAttributes != null) {
for (ParameterAnnotationsAttribute parameterAnnotationsAttribute : parameterAnnotationsAttributes) {
if (parameterAnnotationsAttribute != null) {
Annotation[][] annotations = parameterAnnotationsAttribute.getAnnotations();
if (parameterIndex < annotations.length) {
Annotation[] annotation = annotations[parameterIndex];
result.addAll(getAnnotationNames(annotation));
}
}
}
}
return result;
}
public String getReturnTypeName(final MethodInfo method) {
String descriptor = method.getDescriptor();
descriptor = descriptor.substring(descriptor.lastIndexOf(")") + 1);
return splitDescriptorToTypeNames(descriptor).get(0);
}
public String getFieldName(final FieldInfo field) {
return field.getName();
}
public ClassFile getOfCreateClassObject(final Vfs.File file) {
InputStream inputStream = null;
try {
inputStream = file.openInputStream();
DataInputStream dis = new DataInputStream(new BufferedInputStream(inputStream));
return new ClassFile(dis);
} catch (IOException e) {
throw new ReflectionsException("could not create class file from " + file.getName(), e);
} finally {
Utils.close(inputStream);
}
}
public String getMethodModifier(MethodInfo method) {
int accessFlags = method.getAccessFlags();
return isPrivate(accessFlags) ? "private" :
isProtected(accessFlags) ? "protected" :
isPublic(accessFlags) ? "public" : "";
}
public String getMethodKey(ClassFile cls, MethodInfo method) {
return getMethodName(method) + "(" + Joiner.on(", ").join(getParameterNames(method)) + ")";
}
public String getMethodFullKey(ClassFile cls, MethodInfo method) {
return getClassName(cls) + "." + getMethodKey(cls, method);
}
public boolean isPublic(Object o) {
Integer accessFlags =
o instanceof ClassFile ? ((ClassFile) o).getAccessFlags() :
o instanceof FieldInfo ? ((FieldInfo) o).getAccessFlags() :
o instanceof MethodInfo ? ((MethodInfo) o).getAccessFlags() : null;
return accessFlags != null && AccessFlag.isPublic(accessFlags);
}
//
public String getClassName(final ClassFile cls) {
return cls.getName();
}
public String getSuperclassName(final ClassFile cls) {
return cls.getSuperclass();
}
public List<String> getInterfacesNames(final ClassFile cls) {
return Arrays.asList(cls.getInterfaces());
}
public boolean acceptsInput(String file) {
return file.endsWith(".class");
}
//
private List<String> getAnnotationNames(final AnnotationsAttribute... annotationsAttributes) {
List<String> result = Lists.newArrayList();
if (annotationsAttributes != null) {
for (AnnotationsAttribute annotationsAttribute : annotationsAttributes) {
if (annotationsAttribute != null) {
for (Annotation annotation : annotationsAttribute.getAnnotations()) {
result.add(annotation.getTypeName());
}
}
}
}
return result;
}
private List<String> getAnnotationNames(final Annotation[] annotations) {
List<String> result = Lists.newArrayList();
for (Annotation annotation : annotations) {
result.add(annotation.getTypeName());
}
return result;
}
private List<String> splitDescriptorToTypeNames(final String descriptors) {
List<String> result = Lists.newArrayList();
if (descriptors != null && descriptors.length() != 0) {
List<Integer> indices = Lists.newArrayList();
Descriptor.Iterator iterator = new Descriptor.Iterator(descriptors);
while (iterator.hasNext()) {
indices.add(iterator.next());
}
indices.add(descriptors.length());
for (int i = 0; i < indices.size() - 1; i++) {
String s1 = Descriptor.toString(descriptors.substring(indices.get(i), indices.get(i + 1)));
result.add(s1);
}
}
return result;
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/adapters/MetadataAdapter.java
|
package org.reflections.adapters;
import org.reflections.vfs.Vfs;
import java.util.List;
/**
*
*/
public interface MetadataAdapter<C,F,M> {
//
String getClassName(final C cls);
String getSuperclassName(final C cls);
List<String> getInterfacesNames(final C cls);
//
List<F> getFields(final C cls);
List<M> getMethods(final C cls);
String getMethodName(final M method);
List<String> getParameterNames(final M method);
List<String> getClassAnnotationNames(final C aClass);
List<String> getFieldAnnotationNames(final F field);
List<String> getMethodAnnotationNames(final M method);
List<String> getParameterAnnotationNames(final M method, final int parameterIndex);
String getReturnTypeName(final M method);
String getFieldName(final F field);
C getOfCreateClassObject(Vfs.File file) throws Exception;
String getMethodModifier(M method);
String getMethodKey(C cls, M method);
String getMethodFullKey(C cls, M method);
boolean isPublic(Object o);
boolean acceptsInput(String file);
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/scanners/AbstractScanner.java
|
package org.reflections.scanners;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.Multimap;
import org.reflections.Configuration;
import org.reflections.ReflectionsException;
import org.reflections.adapters.MetadataAdapter;
import org.reflections.vfs.Vfs;
import static org.reflections.Reflections.log;
/**
*
*/
@SuppressWarnings({"RawUseOfParameterizedType", "unchecked"})
public abstract class AbstractScanner implements Scanner {
private Configuration configuration;
private Multimap<String, String> store;
private Predicate<String> resultFilter = Predicates.alwaysTrue(); //accept all by default
public boolean acceptsInput(String file) {
return getMetadataAdapter().acceptsInput(file);
}
public Object scan(Vfs.File file, Object classObject) {
if (classObject == null) {
try {
classObject = configuration.getMetadataAdapter().getOfCreateClassObject(file);
} catch (Exception e) {
throw new ReflectionsException("could not create class object from file " + file.getRelativePath(), e);
}
}
scan(classObject);
return classObject;
}
public abstract void scan(Object cls);
//
public Configuration getConfiguration() {
return configuration;
}
public void setConfiguration(final Configuration configuration) {
this.configuration = configuration;
}
public Multimap<String, String> getStore() {
return store;
}
public void setStore(final Multimap<String, String> store) {
this.store = store;
}
public Predicate<String> getResultFilter() {
return resultFilter;
}
public void setResultFilter(Predicate<String> resultFilter) {
this.resultFilter = resultFilter;
}
public Scanner filterResultsBy(Predicate<String> filter) {
this.setResultFilter(filter); return this;
}
//
public boolean acceptResult(final String fqn) {
return fqn != null && resultFilter.apply(fqn);
}
protected MetadataAdapter getMetadataAdapter() {
return configuration.getMetadataAdapter();
}
//
@Override public boolean equals(Object o) {
return this == o || o != null && getClass() == o.getClass();
}
@Override public int hashCode() {
return getClass().hashCode();
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/scanners/FieldAnnotationsScanner.java
|
package org.reflections.scanners;
import java.util.List;
/** scans for field's annotations */
@SuppressWarnings({"unchecked"})
public class FieldAnnotationsScanner extends AbstractScanner {
public void scan(final Object cls) {
final String className = getMetadataAdapter().getClassName(cls);
List<Object> fields = getMetadataAdapter().getFields(cls);
for (final Object field : fields) {
List<String> fieldAnnotations = getMetadataAdapter().getFieldAnnotationNames(field);
for (String fieldAnnotation : fieldAnnotations) {
if (acceptResult(fieldAnnotation)) {
String fieldName = getMetadataAdapter().getFieldName(field);
getStore().put(fieldAnnotation, String.format("%s.%s", className, fieldName));
}
}
}
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/scanners/MemberUsageScanner.java
|
package org.reflections.scanners;
import com.google.common.base.Joiner;
import javassist.*;
import javassist.bytecode.MethodInfo;
import javassist.expr.*;
import org.reflections.ReflectionsException;
import org.reflections.util.ClasspathHelper;
/** scans methods/constructors/fields usage
* <p><i> depends on {@link org.reflections.adapters.JavassistAdapter} configured </i>*/
@SuppressWarnings("unchecked")
public class MemberUsageScanner extends AbstractScanner {
private ClassPool classPool;
@Override
public void scan(Object cls) {
try {
CtClass ctClass = getClassPool().get(getMetadataAdapter().getClassName(cls));
for (CtBehavior member : ctClass.getDeclaredConstructors()) {
scanMember(member);
}
for (CtBehavior member : ctClass.getDeclaredMethods()) {
scanMember(member);
}
ctClass.detach();
} catch (Exception e) {
throw new ReflectionsException("Could not scan method usage for " + getMetadataAdapter().getClassName(cls), e);
}
}
void scanMember(CtBehavior member) throws CannotCompileException {
//key contains this$/val$ means local field/parameter closure
final String key = member.getDeclaringClass().getName() + "." + member.getMethodInfo().getName() +
"(" + parameterNames(member.getMethodInfo()) + ")"; //+ " #" + member.getMethodInfo().getLineNumber(0)
member.instrument(new ExprEditor() {
@Override
public void edit(NewExpr e) throws CannotCompileException {
try {
put(e.getConstructor().getDeclaringClass().getName() + "." + "<init>" +
"(" + parameterNames(e.getConstructor().getMethodInfo()) + ")", e.getLineNumber(), key);
} catch (NotFoundException e1) {
throw new ReflectionsException("Could not find new instance usage in " + key, e1);
}
}
@Override
public void edit(MethodCall m) throws CannotCompileException {
try {
put(m.getMethod().getDeclaringClass().getName() + "." + m.getMethodName() +
"(" + parameterNames(m.getMethod().getMethodInfo()) + ")", m.getLineNumber(), key);
} catch (NotFoundException e) {
throw new ReflectionsException("Could not find member " + m.getClassName() + " in " + key, e);
}
}
@Override
public void edit(ConstructorCall c) throws CannotCompileException {
try {
put(c.getConstructor().getDeclaringClass().getName() + "." + "<init>" +
"(" + parameterNames(c.getConstructor().getMethodInfo()) + ")", c.getLineNumber(), key);
} catch (NotFoundException e) {
throw new ReflectionsException("Could not find member " + c.getClassName() + " in " + key, e);
}
}
@Override
public void edit(FieldAccess f) throws CannotCompileException {
try {
put(f.getField().getDeclaringClass().getName() + "." + f.getFieldName(), f.getLineNumber(), key);
} catch (NotFoundException e) {
throw new ReflectionsException("Could not find member " + f.getFieldName() + " in " + key, e);
}
}
});
}
private void put(String key, int lineNumber, String value) {
if (acceptResult(key)) {
getStore().put(key, value + " #" + lineNumber);
}
}
String parameterNames(MethodInfo info) {
return Joiner.on(", ").join(getMetadataAdapter().getParameterNames(info));
}
private ClassPool getClassPool() {
if (classPool == null) {
synchronized (this) {
classPool = new ClassPool();
ClassLoader[] classLoaders = getConfiguration().getClassLoaders();
if (classLoaders == null) {
classLoaders = ClasspathHelper.classLoaders();
}
for (ClassLoader classLoader : classLoaders) {
classPool.appendClassPath(new LoaderClassPath(classLoader));
}
}
}
return classPool;
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/scanners/MethodAnnotationsScanner.java
|
package org.reflections.scanners;
import java.util.List;
@SuppressWarnings({"unchecked"})
/** scans for method's annotations */
public class MethodAnnotationsScanner extends AbstractScanner {
public void scan(final Object cls) {
for (Object method : getMetadataAdapter().getMethods(cls)) {
for (String methodAnnotation : (List<String>) getMetadataAdapter().getMethodAnnotationNames(method)) {
if (acceptResult(methodAnnotation)) {
getStore().put(methodAnnotation, getMetadataAdapter().getMethodFullKey(cls, method));
}
}
}
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/scanners/MethodParameterNamesScanner.java
|
package org.reflections.scanners;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import javassist.bytecode.LocalVariableAttribute;
import javassist.bytecode.MethodInfo;
import org.reflections.adapters.MetadataAdapter;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.List;
/** scans methods/constructors and indexes parameter names */
@SuppressWarnings("unchecked")
public class MethodParameterNamesScanner extends AbstractScanner {
@Override
public void scan(Object cls) {
final MetadataAdapter md = getMetadataAdapter();
for (Object method : md.getMethods(cls)) {
String key = md.getMethodFullKey(cls, method);
if (acceptResult(key)) {
LocalVariableAttribute table = (LocalVariableAttribute) ((MethodInfo) method).getCodeAttribute().getAttribute(LocalVariableAttribute.tag);
int length = table.tableLength();
int i = Modifier.isStatic(((MethodInfo) method).getAccessFlags()) ? 0 : 1; //skip this
if (i < length) {
List<String> names = new ArrayList<String>(length - i);
while (i < length) names.add(((MethodInfo) method).getConstPool().getUtf8Info(table.nameIndex(i++)));
getStore().put(key, Joiner.on(", ").join(names));
}
}
}
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/scanners/MethodParameterScanner.java
|
package org.reflections.scanners;
import org.reflections.adapters.MetadataAdapter;
import java.util.List;
/** scans methods/constructors and indexes parameters, return type and parameter annotations */
@SuppressWarnings("unchecked")
public class MethodParameterScanner extends AbstractScanner {
@Override
public void scan(Object cls) {
final MetadataAdapter md = getMetadataAdapter();
for (Object method : md.getMethods(cls)) {
String signature = md.getParameterNames(method).toString();
if (acceptResult(signature)) {
getStore().put(signature, md.getMethodFullKey(cls, method));
}
String returnTypeName = md.getReturnTypeName(method);
if (acceptResult(returnTypeName)) {
getStore().put(returnTypeName, md.getMethodFullKey(cls, method));
}
List<String> parameterNames = md.getParameterNames(method);
for (int i = 0; i < parameterNames.size(); i++) {
for (Object paramAnnotation : md.getParameterAnnotationNames(method, i)) {
if (acceptResult((String) paramAnnotation)) {
getStore().put((String) paramAnnotation, md.getMethodFullKey(cls, method));
}
}
}
}
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/scanners/ResourcesScanner.java
|
package org.reflections.scanners;
import org.reflections.vfs.Vfs;
/** collects all resources that are not classes in a collection
* <p>key: value - {web.xml: WEB-INF/web.xml} */
public class ResourcesScanner extends AbstractScanner {
public boolean acceptsInput(String file) {
return !file.endsWith(".class"); //not a class
}
@Override public Object scan(Vfs.File file, Object classObject) {
getStore().put(file.getName(), file.getRelativePath());
return classObject;
}
public void scan(Object cls) {
throw new UnsupportedOperationException(); //shouldn't get here
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/scanners/Scanner.java
|
package org.reflections.scanners;
import com.google.common.base.Predicate;
import com.google.common.collect.Multimap;
import org.reflections.Configuration;
import org.reflections.vfs.Vfs;
import javax.annotation.Nullable;
/**
*
*/
public interface Scanner {
void setConfiguration(Configuration configuration);
Multimap<String, String> getStore();
void setStore(Multimap<String, String> store);
Scanner filterResultsBy(Predicate<String> filter);
boolean acceptsInput(String file);
Object scan(Vfs.File file, @Nullable Object classObject);
boolean acceptResult(String fqn);
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/scanners/SubTypesScanner.java
|
package org.reflections.scanners;
import org.reflections.util.FilterBuilder;
import java.util.List;
/** scans for superclass and interfaces of a class, allowing a reverse lookup for subtypes */
public class SubTypesScanner extends AbstractScanner {
/** created new SubTypesScanner. will exclude direct Object subtypes */
public SubTypesScanner() {
this(true); //exclude direct Object subtypes by default
}
/** created new SubTypesScanner.
* @param excludeObjectClass if false, include direct {@link Object} subtypes in results. */
public SubTypesScanner(boolean excludeObjectClass) {
if (excludeObjectClass) {
filterResultsBy(new FilterBuilder().exclude(Object.class.getName())); //exclude direct Object subtypes
}
}
@SuppressWarnings({"unchecked"})
public void scan(final Object cls) {
String className = getMetadataAdapter().getClassName(cls);
String superclass = getMetadataAdapter().getSuperclassName(cls);
if (acceptResult(superclass)) {
getStore().put(superclass, className);
}
for (String anInterface : (List<String>) getMetadataAdapter().getInterfacesNames(cls)) {
if (acceptResult(anInterface)) {
getStore().put(anInterface, className);
}
}
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/scanners/TypeAnnotationsScanner.java
|
package org.reflections.scanners;
import java.lang.annotation.Inherited;
import java.util.List;
/** scans for class's annotations, where @Retention(RetentionPolicy.RUNTIME) */
@SuppressWarnings({"unchecked"})
public class TypeAnnotationsScanner extends AbstractScanner {
public void scan(final Object cls) {
final String className = getMetadataAdapter().getClassName(cls);
for (String annotationType : (List<String>) getMetadataAdapter().getClassAnnotationNames(cls)) {
if (acceptResult(annotationType) ||
annotationType.equals(Inherited.class.getName())) { //as an exception, accept Inherited as well
getStore().put(annotationType, className);
}
}
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/scanners/TypeElementsScanner.java
|
package org.reflections.scanners;
import com.google.common.base.Joiner;
/** scans fields and methods and stores fqn as key and elements as values */
@SuppressWarnings({"unchecked"})
public class TypeElementsScanner extends AbstractScanner {
private boolean includeFields = true;
private boolean includeMethods = true;
private boolean includeAnnotations = true;
private boolean publicOnly = true;
public void scan(Object cls) {
String className = getMetadataAdapter().getClassName(cls);
if (!acceptResult(className)) return;
getStore().put(className, "");
if (includeFields) {
for (Object field : getMetadataAdapter().getFields(cls)) {
String fieldName = getMetadataAdapter().getFieldName(field);
getStore().put(className, fieldName);
}
}
if (includeMethods) {
for (Object method : getMetadataAdapter().getMethods(cls)) {
if (!publicOnly || getMetadataAdapter().isPublic(method)) {
String methodKey = getMetadataAdapter().getMethodName(method) + "(" +
Joiner.on(", ").join(getMetadataAdapter().getParameterNames(method)) + ")";
getStore().put(className, methodKey);
}
}
}
if (includeAnnotations) {
for (Object annotation : getMetadataAdapter().getClassAnnotationNames(cls)) {
getStore().put(className, "@" + annotation);
}
}
}
//
public TypeElementsScanner includeFields() { return includeFields(true); }
public TypeElementsScanner includeFields(boolean include) { includeFields = include; return this; }
public TypeElementsScanner includeMethods() { return includeMethods(true); }
public TypeElementsScanner includeMethods(boolean include) { includeMethods = include; return this; }
public TypeElementsScanner includeAnnotations() { return includeAnnotations(true); }
public TypeElementsScanner includeAnnotations(boolean include) { includeAnnotations = include; return this; }
public TypeElementsScanner publicOnly(boolean only) { publicOnly = only; return this; }
public TypeElementsScanner publicOnly() { return publicOnly(true); }
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/scanners/TypesScanner.java
|
package org.reflections.scanners;
import org.reflections.ReflectionsException;
import org.reflections.vfs.Vfs;
/** scans classes and stores fqn as key and full path as value.
* <p>Deprecated. use {@link org.reflections.scanners.TypeElementsScanner} */
@Deprecated
public class TypesScanner extends AbstractScanner {
@Override
public Object scan(Vfs.File file, Object classObject) {
classObject = super.scan(file, classObject);
String className = getMetadataAdapter().getClassName(classObject);
getStore().put(className, className);
return classObject;
}
@Override
public void scan(Object cls) {
throw new UnsupportedOperationException("should not get here");
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/serializers/JavaCodeSerializer.java
|
package org.reflections.serializers;
import com.google.common.base.Joiner;
import com.google.common.base.Supplier;
import com.google.common.collect.Lists;
import com.google.common.collect.Multimap;
import com.google.common.collect.Multimaps;
import com.google.common.collect.Sets;
import com.google.common.io.Files;
import org.reflections.ReflectionUtils;
import org.reflections.Reflections;
import org.reflections.ReflectionsException;
import org.reflections.scanners.TypeElementsScanner;
import org.reflections.util.Utils;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.nio.charset.Charset;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.reflections.Reflections.log;
import static org.reflections.util.Utils.prepareFile;
import static org.reflections.util.Utils.repeat;
/** serialization of Reflections to java code
* <p> serializes types and types elements into interfaces respectively to fully qualified name,
* <p> for example:
* <pre>
* public interface MyTestModelStore {
* public interface <b>org</b> extends IPackage {
* public interface <b>reflections</b> extends IPackage {
* public interface <b>TestModel$AC1</b> extends IClass {}
* public interface <b>TestModel$C4</b> extends IClass {
* public interface <b>f1</b> extends IField {}
* public interface <b>m1</b> extends IMethod {}
* public interface <b>m1_int_java$lang$String$$$$</b> extends IMethod {}
* ...
* }
* </pre>
* <p> use the different resolve methods to resolve the serialized element into Class, Field or Method. for example:
* <pre>
* Class<? extends IMethod> imethod = MyTestModelStore.org.reflections.TestModel$C4.m1.class;
* Method method = JavaCodeSerializer.resolve(imethod);
* </pre>
* <p>depends on Reflections configured with {@link org.reflections.scanners.TypeElementsScanner}
* <p><p>the {@link #save(org.reflections.Reflections, String)} method filename should be in the pattern: path/path/path/package.package.classname
* */
public class JavaCodeSerializer implements Serializer {
private static final String pathSeparator = "_";
private static final String doubleSeparator = "__";
private static final String dotSeparator = ".";
private static final String arrayDescriptor = "$$";
private static final String tokenSeparator = "_";
public Reflections read(InputStream inputStream) {
throw new UnsupportedOperationException("read is not implemented on JavaCodeSerializer");
}
/**
* name should be in the pattern: path/path/path/package.package.classname,
* for example <pre>/data/projects/my/src/main/java/org.my.project.MyStore</pre>
* would create class MyStore in package org.my.project in the path /data/projects/my/src/main/java
*/
public File save(Reflections reflections, String name) {
if (name.endsWith("/")) {
name = name.substring(0, name.length() - 1); //trim / at the end
}
//prepare file
String filename = name.replace('.', '/').concat(".java");
File file = prepareFile(filename);
//get package and class names
String packageName;
String className;
int lastDot = name.lastIndexOf('.');
if (lastDot == -1) {
packageName = "";
className = name.substring(name.lastIndexOf('/') + 1);
} else {
packageName = name.substring(name.lastIndexOf('/') + 1, lastDot);
className = name.substring(lastDot + 1);
}
//generate
try {
StringBuilder sb = new StringBuilder();
sb.append("//generated using Reflections JavaCodeSerializer")
.append(" [").append(new Date()).append("]")
.append("\n");
if (packageName.length() != 0) {
sb.append("package ").append(packageName).append(";\n");
sb.append("\n");
}
sb.append("public interface ").append(className).append(" {\n\n");
sb.append(toString(reflections));
sb.append("}\n");
Files.write(sb.toString(), new File(filename), Charset.defaultCharset());
} catch (IOException e) {
throw new RuntimeException();
}
return file;
}
public String toString(Reflections reflections) {
if (reflections.getStore().get(TypeElementsScanner.class.getSimpleName()).isEmpty()) {
if (log != null) log.warn("JavaCodeSerializer needs TypeElementsScanner configured");
}
StringBuilder sb = new StringBuilder();
List<String> prevPaths = Lists.newArrayList();
int indent = 1;
List<String> keys = Lists.newArrayList(reflections.getStore().get(TypeElementsScanner.class.getSimpleName()).keySet());
Collections.sort(keys);
for (String fqn : keys) {
List<String> typePaths = Lists.newArrayList(fqn.split("\\."));
//skip indention
int i = 0;
while (i < Math.min(typePaths.size(), prevPaths.size()) && typePaths.get(i).equals(prevPaths.get(i))) {
i++;
}
//indent left
for (int j = prevPaths.size(); j > i; j--) {
sb.append(repeat("\t", --indent)).append("}\n");
}
//indent right - add packages
for (int j = i; j < typePaths.size() - 1; j++) {
sb.append(repeat("\t", indent++)).append("public interface ").append(getNonDuplicateName(typePaths.get(j), typePaths, j)).append(" {\n");
}
//indent right - add class
String className = typePaths.get(typePaths.size() - 1);
//get fields and methods
List<String> annotations = Lists.newArrayList();
List<String> fields = Lists.newArrayList();
final Multimap<String,String> methods = Multimaps.newSetMultimap(new HashMap<String, Collection<String>>(), new Supplier<Set<String>>() {
public Set<String> get() {
return Sets.newHashSet();
}
});
for (String element : reflections.getStore().get(TypeElementsScanner.class.getSimpleName(), fqn)) {
if (element.startsWith("@")) {
annotations.add(element.substring(1));
} else if (element.contains("(")) {
//method
if (!element.startsWith("<")) {
int i1 = element.indexOf('(');
String name = element.substring(0, i1);
String params = element.substring(i1 + 1, element.indexOf(")"));
String paramsDescriptor = "";
if (params.length() != 0) {
paramsDescriptor = tokenSeparator + params.replace(dotSeparator, tokenSeparator).replace(", ", doubleSeparator).replace("[]", arrayDescriptor);
}
String normalized = name + paramsDescriptor;
methods.put(name, normalized);
}
} else if (!Utils.isEmpty(element)) {
//field
fields.add(element);
}
}
//add class and it's fields and methods
sb.append(repeat("\t", indent++)).append("public interface ").append(getNonDuplicateName(className, typePaths, typePaths.size() - 1)).append(" {\n");
//add fields
if (!fields.isEmpty()) {
sb.append(repeat("\t", indent++)).append("public interface fields {\n");
for (String field : fields) {
sb.append(repeat("\t", indent)).append("public interface ").append(getNonDuplicateName(field, typePaths)).append(" {}\n");
}
sb.append(repeat("\t", --indent)).append("}\n");
}
//add methods
if (!methods.isEmpty()) {
sb.append(repeat("\t", indent++)).append("public interface methods {\n");
for (Map.Entry<String, String> entry : methods.entries()) {
String simpleName = entry.getKey();
String normalized = entry.getValue();
String methodName = methods.get(simpleName).size() == 1 ? simpleName : normalized;
methodName = getNonDuplicateName(methodName, fields);
sb.append(repeat("\t", indent)).append("public interface ").append(getNonDuplicateName(methodName, typePaths)).append(" {}\n");
}
sb.append(repeat("\t", --indent)).append("}\n");
}
//add annotations
if (!annotations.isEmpty()) {
sb.append(repeat("\t", indent++)).append("public interface annotations {\n");
for (String annotation : annotations) {
String nonDuplicateName = annotation;
nonDuplicateName = getNonDuplicateName(nonDuplicateName, typePaths);
sb.append(repeat("\t", indent)).append("public interface ").append(nonDuplicateName).append(" {}\n");
}
sb.append(repeat("\t", --indent)).append("}\n");
}
prevPaths = typePaths;
}
//close indention
for (int j = prevPaths.size(); j >= 1; j--) {
sb.append(repeat("\t", j)).append("}\n");
}
return sb.toString();
}
private String getNonDuplicateName(String candidate, List<String> prev, int offset) {
String normalized = normalize(candidate);
for (int i = 0; i < offset; i++) {
if (normalized.equals(prev.get(i))) {
return getNonDuplicateName(normalized + tokenSeparator, prev, offset);
}
}
return normalized;
}
private String normalize(String candidate) {
return candidate.replace(dotSeparator, pathSeparator);
}
private String getNonDuplicateName(String candidate, List<String> prev) {
return getNonDuplicateName(candidate, prev, prev.size());
}
//
public static Class<?> resolveClassOf(final Class element) throws ClassNotFoundException {
Class<?> cursor = element;
LinkedList<String> ognl = Lists.newLinkedList();
while (cursor != null) {
ognl.addFirst(cursor.getSimpleName());
cursor = cursor.getDeclaringClass();
}
String classOgnl = Joiner.on(".").join(ognl.subList(1, ognl.size())).replace(".$", "$");
return Class.forName(classOgnl);
}
public static Class<?> resolveClass(final Class aClass) {
try {
return resolveClassOf(aClass);
} catch (Exception e) {
throw new ReflectionsException("could not resolve to class " + aClass.getName(), e);
}
}
public static Field resolveField(final Class aField) {
try {
String name = aField.getSimpleName();
Class<?> declaringClass = aField.getDeclaringClass().getDeclaringClass();
return resolveClassOf(declaringClass).getDeclaredField(name);
} catch (Exception e) {
throw new ReflectionsException("could not resolve to field " + aField.getName(), e);
}
}
public static Annotation resolveAnnotation(Class annotation) {
try {
String name = annotation.getSimpleName().replace(pathSeparator, dotSeparator);
Class<?> declaringClass = annotation.getDeclaringClass().getDeclaringClass();
Class<?> aClass = resolveClassOf(declaringClass);
Class<? extends Annotation> aClass1 = (Class<? extends Annotation>) ReflectionUtils.forName(name);
Annotation annotation1 = aClass.getAnnotation(aClass1);
return annotation1;
} catch (Exception e) {
throw new ReflectionsException("could not resolve to annotation " + annotation.getName(), e);
}
}
public static Method resolveMethod(final Class aMethod) {
String methodOgnl = aMethod.getSimpleName();
try {
String methodName;
Class<?>[] paramTypes;
if (methodOgnl.contains(tokenSeparator)) {
methodName = methodOgnl.substring(0, methodOgnl.indexOf(tokenSeparator));
String[] params = methodOgnl.substring(methodOgnl.indexOf(tokenSeparator) + 1).split(doubleSeparator);
paramTypes = new Class<?>[params.length];
for (int i = 0; i < params.length; i++) {
String typeName = params[i].replace(arrayDescriptor, "[]").replace(pathSeparator, dotSeparator);
paramTypes[i] = ReflectionUtils.forName(typeName);
}
} else {
methodName = methodOgnl;
paramTypes = null;
}
Class<?> declaringClass = aMethod.getDeclaringClass().getDeclaringClass();
return resolveClassOf(declaringClass).getDeclaredMethod(methodName, paramTypes);
} catch (Exception e) {
throw new ReflectionsException("could not resolve to method " + aMethod.getName(), e);
}
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/serializers/JsonSerializer.java
|
package org.reflections.serializers;
import com.google.common.base.Supplier;
import com.google.common.collect.*;
import com.google.common.io.Files;
import com.google.gson.*;
import org.reflections.Reflections;
import org.reflections.util.Utils;
import java.io.*;
import java.lang.reflect.Type;
import java.nio.charset.Charset;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/** serialization of Reflections to json
*
* <p>an example of produced json:
* <pre>
* {"store":{"storeMap":
* {"org.reflections.scanners.TypeAnnotationsScanner":{
* "org.reflections.TestModel$AC1":["org.reflections.TestModel$C1"],
* "org.reflections.TestModel$AC2":["org.reflections.TestModel$I3",
* ...
* </pre>
* */
public class JsonSerializer implements Serializer {
private Gson gson;
public Reflections read(InputStream inputStream) {
return getGson().fromJson(new InputStreamReader(inputStream), Reflections.class);
}
public File save(Reflections reflections, String filename) {
try {
File file = Utils.prepareFile(filename);
Files.write(toString(reflections), file, Charset.defaultCharset());
return file;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public String toString(Reflections reflections) {
return getGson().toJson(reflections);
}
private Gson getGson() {
if (gson == null) {
gson = new GsonBuilder()
.registerTypeAdapter(Multimap.class, new com.google.gson.JsonSerializer<Multimap>() {
public JsonElement serialize(Multimap multimap, Type type, JsonSerializationContext jsonSerializationContext) {
return jsonSerializationContext.serialize(multimap.asMap());
}
})
.registerTypeAdapter(Multimap.class, new JsonDeserializer<Multimap>() {
public Multimap deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException {
final SetMultimap<String,String> map = Multimaps.newSetMultimap(new HashMap<String, Collection<String>>(), new Supplier<Set<String>>() {
public Set<String> get() {
return Sets.newHashSet();
}
});
for (Map.Entry<String, JsonElement> entry : ((JsonObject) jsonElement).entrySet()) {
for (JsonElement element : (JsonArray) entry.getValue()) {
map.get(entry.getKey()).add(element.getAsString());
}
}
return map;
}
})
.setPrettyPrinting()
.create();
}
return gson;
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/serializers/Serializer.java
|
package org.reflections.serializers;
import org.reflections.Reflections;
import java.io.File;
import java.io.InputStream;
/** Serilizer of a {@link org.reflections.Reflections} instance */
public interface Serializer {
/** reads the input stream into a new Reflections instance, populating it's store */
Reflections read(InputStream inputStream);
/** saves a Reflections instance into the given filename */
File save(Reflections reflections, String filename);
/** returns a string serialization of the given Reflections instance */
String toString(Reflections reflections);
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/serializers/XmlSerializer.java
|
package org.reflections.serializers;
import org.dom4j.Document;
import org.dom4j.DocumentException;
import org.dom4j.DocumentFactory;
import org.dom4j.Element;
import org.dom4j.io.OutputFormat;
import org.dom4j.io.SAXReader;
import org.dom4j.io.XMLWriter;
import org.reflections.Reflections;
import org.reflections.ReflectionsException;
import org.reflections.Store;
import org.reflections.util.ConfigurationBuilder;
import org.reflections.util.Utils;
import java.io.*;
import java.lang.reflect.Constructor;
/** serialization of Reflections to xml
*
* <p>an example of produced xml:
* <pre>
* <?xml version="1.0" encoding="UTF-8"?>
*
* <Reflections>
* <SubTypesScanner>
* <entry>
* <key>com.google.inject.Module</key>
* <values>
* <value>fully.qualified.name.1</value>
* <value>fully.qualified.name.2</value>
* ...
* </pre>
* */
public class XmlSerializer implements Serializer {
public Reflections read(InputStream inputStream) {
Reflections reflections;
try {
Constructor<Reflections> constructor = Reflections.class.getDeclaredConstructor();
constructor.setAccessible(true);
reflections = constructor.newInstance();
} catch (Exception e) {
reflections = new Reflections(new ConfigurationBuilder());
}
try {
Document document = new SAXReader().read(inputStream);
for (Object e1 : document.getRootElement().elements()) {
Element index = (Element) e1;
for (Object e2 : index.elements()) {
Element entry = (Element) e2;
Element key = entry.element("key");
Element values = entry.element("values");
for (Object o3 : values.elements()) {
Element value = (Element) o3;
reflections.getStore().getOrCreate(index.getName()).put(key.getText(), value.getText());
}
}
}
} catch (DocumentException e) {
throw new ReflectionsException("could not read.", e);
} catch (Throwable e) {
throw new RuntimeException("Could not read. Make sure relevant dependencies exist on classpath.", e);
}
return reflections;
}
public File save(final Reflections reflections, final String filename) {
File file = Utils.prepareFile(filename);
try {
Document document = createDocument(reflections);
XMLWriter xmlWriter = new XMLWriter(new FileOutputStream(file), OutputFormat.createPrettyPrint());
xmlWriter.write(document);
xmlWriter.close();
} catch (IOException e) {
throw new ReflectionsException("could not save to file " + filename, e);
} catch (Throwable e) {
throw new RuntimeException("Could not save to file " + filename + ". Make sure relevant dependencies exist on classpath.", e);
}
return file;
}
public String toString(final Reflections reflections) {
Document document = createDocument(reflections);
try {
StringWriter writer = new StringWriter();
XMLWriter xmlWriter = new XMLWriter(writer, OutputFormat.createPrettyPrint());
xmlWriter.write(document);
xmlWriter.close();
return writer.toString();
} catch (IOException e) {
throw new RuntimeException();
}
}
private Document createDocument(final Reflections reflections) {
Store map = reflections.getStore();
Document document = DocumentFactory.getInstance().createDocument();
Element root = document.addElement("Reflections");
for (String indexName : map.keySet()) {
Element indexElement = root.addElement(indexName);
for (String key : map.get(indexName).keySet()) {
Element entryElement = indexElement.addElement("entry");
entryElement.addElement("key").setText(key);
Element valuesElement = entryElement.addElement("values");
for (String value : map.get(indexName).get(key)) {
valuesElement.addElement("value").setText(value);
}
}
}
return document;
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/util/ClasspathHelper.java
|
package org.reflections.util;
import com.google.common.collect.Sets;
import org.reflections.Reflections;
import javax.servlet.ServletContext;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.net.URLClassLoader;
import java.net.URLDecoder;
import java.util.*;
import java.util.jar.Attributes;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
/**
* Helper methods for working with the classpath.
*/
public abstract class ClasspathHelper {
/**
* Gets the current thread context class loader.
* {@code Thread.currentThread().getContextClassLoader()}.
*
* @return the context class loader, may be null
*/
public static ClassLoader contextClassLoader() {
return Thread.currentThread().getContextClassLoader();
}
/**
* Gets the class loader of this library.
* {@code Reflections.class.getClassLoader()}.
*
* @return the static library class loader, may be null
*/
public static ClassLoader staticClassLoader() {
return Reflections.class.getClassLoader();
}
/**
* Returns an array of class Loaders initialized from the specified array.
* <p>
* If the input is null or empty, it defaults to both {@link #contextClassLoader()} and {@link #staticClassLoader()}
*
* @return the array of class loaders, not null
*/
public static ClassLoader[] classLoaders(ClassLoader... classLoaders) {
if (classLoaders != null && classLoaders.length != 0) {
return classLoaders;
} else {
ClassLoader contextClassLoader = contextClassLoader(), staticClassLoader = staticClassLoader();
return contextClassLoader != null ?
staticClassLoader != null && contextClassLoader != staticClassLoader ?
new ClassLoader[]{contextClassLoader, staticClassLoader} :
new ClassLoader[]{contextClassLoader} :
new ClassLoader[] {};
}
}
/**
* Returns a distinct collection of URLs based on a package name.
* <p>
* This searches for the package name as a resource, using {@link ClassLoader#getResources(String)}.
* For example, {@code forPackage(org.reflections)} effectively returns URLs from the
* classpath containing packages starting with {@code org.reflections}.
* <p>
* If the optional {@link ClassLoader}s are not specified, then both {@link #contextClassLoader()}
* and {@link #staticClassLoader()} are used for {@link ClassLoader#getResources(String)}.
* <p>
* The returned URLs retainsthe order of the given {@code classLoaders}.
*
* @return the collection of URLs, not null
*/
public static Collection<URL> forPackage(String name, ClassLoader... classLoaders) {
return forResource(resourceName(name), classLoaders);
}
/**
* Returns a distinct collection of URLs based on a resource.
* <p>
* This searches for the resource name, using {@link ClassLoader#getResources(String)}.
* For example, {@code forResource(test.properties)} effectively returns URLs from the
* classpath containing files of that name.
* <p>
* If the optional {@link ClassLoader}s are not specified, then both {@link #contextClassLoader()}
* and {@link #staticClassLoader()} are used for {@link ClassLoader#getResources(String)}.
* <p>
* The returned URLs retains the order of the given {@code classLoaders}.
*
* @return the collection of URLs, not null
*/
public static Collection<URL> forResource(String resourceName, ClassLoader... classLoaders) {
final List<URL> result = new ArrayList<URL>();
final ClassLoader[] loaders = classLoaders(classLoaders);
for (ClassLoader classLoader : loaders) {
try {
final Enumeration<URL> urls = classLoader.getResources(resourceName);
while (urls.hasMoreElements()) {
final URL url = urls.nextElement();
int index = url.toExternalForm().lastIndexOf(resourceName);
if (index != -1) {
result.add(new URL(url.toExternalForm().substring(0, index)));
} else {
result.add(url);
}
}
} catch (IOException e) {
if (Reflections.log != null) {
Reflections.log.error("error getting resources for " + resourceName, e);
}
}
}
return distinctUrls(result);
}
/**
* Returns the URL that contains a {@code Class}.
* <p>
* This searches for the class using {@link ClassLoader#getResource(String)}.
* <p>
* If the optional {@link ClassLoader}s are not specified, then both {@link #contextClassLoader()}
* and {@link #staticClassLoader()} are used for {@link ClassLoader#getResources(String)}.
*
* @return the URL containing the class, null if not found
*/
public static URL forClass(Class<?> aClass, ClassLoader... classLoaders) {
final ClassLoader[] loaders = classLoaders(classLoaders);
final String resourceName = aClass.getName().replace(".", "/") + ".class";
for (ClassLoader classLoader : loaders) {
try {
final URL url = classLoader.getResource(resourceName);
if (url != null) {
final String normalizedUrl = url.toExternalForm().substring(0, url.toExternalForm().lastIndexOf(aClass.getPackage().getName().replace(".", "/")));
return new URL(normalizedUrl);
}
} catch (MalformedURLException e) {
if (Reflections.log != null) {
Reflections.log.warn("Could not get URL", e);
}
}
}
return null;
}
/**
* Returns a distinct collection of URLs based on URLs derived from class loaders.
* <p>
* This finds the URLs using {@link URLClassLoader#getURLs()} using both
* {@link #contextClassLoader()} and {@link #staticClassLoader()}.
* <p>
* The returned URLs retains the order of the given {@code classLoaders}.
*
* @return the collection of URLs, not null
*/
public static Collection<URL> forClassLoader() {
return forClassLoader(classLoaders());
}
/**
* Returns a distinct collection of URLs based on URLs derived from class loaders.
* <p>
* This finds the URLs using {@link URLClassLoader#getURLs()} using the specified
* class loader, searching up the parent hierarchy.
* <p>
* If the optional {@link ClassLoader}s are not specified, then both {@link #contextClassLoader()}
* and {@link #staticClassLoader()} are used for {@link ClassLoader#getResources(String)}.
* <p>
* The returned URLs retains the order of the given {@code classLoaders}.
*
* @return the collection of URLs, not null
*/
public static Collection<URL> forClassLoader(ClassLoader... classLoaders) {
final Collection<URL> result = new ArrayList<URL>();
final ClassLoader[] loaders = classLoaders(classLoaders);
for (ClassLoader classLoader : loaders) {
while (classLoader != null) {
if (classLoader instanceof URLClassLoader) {
URL[] urls = ((URLClassLoader) classLoader).getURLs();
if (urls != null) {
result.addAll(Arrays.asList(urls));
}
}
classLoader = classLoader.getParent();
}
}
return distinctUrls(result);
}
/**
* Returns a distinct collection of URLs based on the {@code java.class.path} system property.
* <p>
* This finds the URLs using the {@code java.class.path} system property.
* <p>
* The returned collection of URLs retains the classpath order.
*
* @return the collection of URLs, not null
*/
public static Collection<URL> forJavaClassPath() {
Collection<URL> urls = new ArrayList<URL>();
String javaClassPath = System.getProperty("java.class.path");
if (javaClassPath != null) {
for (String path : javaClassPath.split(File.pathSeparator)) {
try {
urls.add(new File(path).toURI().toURL());
} catch (Exception e) {
if (Reflections.log != null) {
Reflections.log.warn("Could not get URL", e);
}
}
}
}
return distinctUrls(urls);
}
/**
* Returns a distinct collection of URLs based on the {@code WEB-INF/lib} folder.
* <p>
* This finds the URLs using the {@link ServletContext}.
* <p>
* The returned URLs retains the order of the given {@code classLoaders}.
*
* @return the collection of URLs, not null
*/
public static Collection<URL> forWebInfLib(final ServletContext servletContext) {
final Collection<URL> urls = new ArrayList<URL>();
for (Object urlString : servletContext.getResourcePaths("/WEB-INF/lib")) {
try {
urls.add(servletContext.getResource((String) urlString));
} catch (MalformedURLException e) { /*fuck off*/ }
}
return distinctUrls(urls);
}
/**
* Returns the URL of the {@code WEB-INF/classes} folder.
* <p>
* This finds the URLs using the {@link ServletContext}.
*
* @return the collection of URLs, not null
*/
public static URL forWebInfClasses(final ServletContext servletContext) {
try {
final String path = servletContext.getRealPath("/WEB-INF/classes");
if (path != null) {
final File file = new File(path);
if (file.exists())
return file.toURL();
} else {
return servletContext.getResource("/WEB-INF/classes");
}
} catch (MalformedURLException e) { /*fuck off*/ }
return null;
}
/**
* Returns a distinct collection of URLs based on URLs derived from class loaders expanded with Manifest information.
* <p>
* The {@code MANIFEST.MF} file can contain a {@code Class-Path} entry that defines
* additional jar files to be included on the classpath. This method finds the jar files
* using the {@link #contextClassLoader()} and {@link #staticClassLoader()}, before
* searching for any additional manifest classpaths.
*
* @return the collection of URLs, not null
*/
public static Collection<URL> forManifest() {
return forManifest(forClassLoader());
}
/**
* Returns a distinct collection of URLs from a single URL based on the Manifest information.
* <p>
* The {@code MANIFEST.MF} file can contain a {@code Class-Path} entry that defines additional
* jar files to be included on the classpath. This method takes a single URL, tries to
* resolve it as a jar file, and if so, adds any additional manifest classpaths.
* The returned collection of URLs will always contain the input URL.
*
* @return the collection of URLs, not null
*/
public static Collection<URL> forManifest(final URL url) {
final Collection<URL> result = new ArrayList<URL>();
result.add(url);
try {
final String part = cleanPath(url);
File jarFile = new File(part);
JarFile myJar = new JarFile(part);
URL validUrl = tryToGetValidUrl(jarFile.getPath(), new File(part).getParent(), part);
if (validUrl != null) { result.add(validUrl); }
final Manifest manifest = myJar.getManifest();
if (manifest != null) {
final String classPath = manifest.getMainAttributes().getValue(new Attributes.Name("Class-Path"));
if (classPath != null) {
for (String jar : classPath.split(" ")) {
validUrl = tryToGetValidUrl(jarFile.getPath(), new File(part).getParent(), jar);
if (validUrl != null) { result.add(validUrl); }
}
}
}
} catch (IOException e) {
// don't do anything, we're going on the assumption it is a jar, which could be wrong
}
return distinctUrls(result);
}
/**
* Returns a distinct collection of URLs by expanding the specified URLs with Manifest information.
* <p>
* The {@code MANIFEST.MF} file can contain a {@code Class-Path} entry that defines additional
* jar files to be included on the classpath. This method takes each URL in turn, tries to
* resolve it as a jar file, and if so, adds any additional manifest classpaths.
* The returned collection of URLs will always contain all the input URLs.
* <p>
* The returned URLs retains the input order.
*
* @return the collection of URLs, not null
*/
public static Collection<URL> forManifest(final Iterable<URL> urls) {
Collection<URL> result = new ArrayList<URL>();
// determine if any of the URLs are JARs, and get any dependencies
for (URL url : urls) {
result.addAll(forManifest(url));
}
return distinctUrls(result);
}
//a little bit cryptic...
static URL tryToGetValidUrl(String workingDir, String path, String filename) {
try {
if (new File(filename).exists())
return new File(filename).toURI().toURL();
if (new File(path + File.separator + filename).exists())
return new File(path + File.separator + filename).toURI().toURL();
if (new File(workingDir + File.separator + filename).exists())
return new File(workingDir + File.separator + filename).toURI().toURL();
if (new File(new URL(filename).getFile()).exists())
return new File(new URL(filename).getFile()).toURI().toURL();
} catch (MalformedURLException e) {
// don't do anything, we're going on the assumption it is a jar, which could be wrong
}
return null;
}
/**
* Cleans the URL.
*
* @param url the URL to clean, not null
* @return the path, not null
*/
public static String cleanPath(final URL url) {
String path = url.getPath();
try {
path = URLDecoder.decode(path, "UTF-8");
} catch (UnsupportedEncodingException e) { /**/ }
if (path.startsWith("jar:")) {
path = path.substring("jar:".length());
}
if (path.startsWith("file:")) {
path = path.substring("file:".length());
}
if (path.endsWith("!/")) {
path = path.substring(0, path.lastIndexOf("!/")) + "/";
}
return path;
}
private static String resourceName(String name) {
if (name != null) {
String resourceName = name.replace(".", "/");
resourceName = resourceName.replace("\\", "/");
if (resourceName.startsWith("/")) {
resourceName = resourceName.substring(1);
}
return resourceName;
}
return null;
}
//http://michaelscharf.blogspot.co.il/2006/11/javaneturlequals-and-hashcode-make.html
private static Collection<URL> distinctUrls(Collection<URL> urls) {
Map<String, URL> distinct = new LinkedHashMap<String, URL>(urls.size());
for (URL url : urls) {
distinct.put(url.toExternalForm(), url);
}
return distinct.values();
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/util/ConfigurationBuilder.java
|
package org.reflections.util;
import com.google.common.base.Predicate;
import com.google.common.collect.Lists;
import com.google.common.collect.ObjectArrays;
import com.google.common.collect.Sets;
import org.reflections.Configuration;
import org.reflections.Reflections;
import org.reflections.ReflectionsException;
import org.reflections.adapters.JavaReflectionAdapter;
import org.reflections.adapters.JavassistAdapter;
import org.reflections.adapters.MetadataAdapter;
import org.reflections.scanners.Scanner;
import org.reflections.scanners.SubTypesScanner;
import org.reflections.scanners.TypeAnnotationsScanner;
import org.reflections.serializers.Serializer;
import org.reflections.serializers.XmlSerializer;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.net.URL;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
/**
* a fluent builder for {@link org.reflections.Configuration}, to be used for constructing a {@link org.reflections.Reflections} instance
* <p>usage:
* <pre>
* new Reflections(
* new ConfigurationBuilder()
* .filterInputsBy(new FilterBuilder().include("your project's common package prefix here..."))
* .setUrls(ClasspathHelper.forClassLoader())
* .setScanners(new SubTypesScanner(), new TypeAnnotationsScanner().filterResultsBy(myClassAnnotationsFilter)));
* </pre>
* <br>{@link #executorService} is used optionally used for parallel scanning. if value is null then scanning is done in a simple for loop
* <p>defaults: accept all for {@link #inputsFilter},
* {@link #executorService} is null,
* {@link #serializer} is {@link org.reflections.serializers.XmlSerializer}
*/
public class ConfigurationBuilder implements Configuration {
@Nonnull private Set<Scanner> scanners;
@Nonnull private Set<URL> urls;
/*lazy*/ protected MetadataAdapter metadataAdapter;
@Nullable private Predicate<String> inputsFilter;
/*lazy*/ private Serializer serializer;
@Nullable private ExecutorService executorService;
@Nullable private ClassLoader[] classLoaders;
public ConfigurationBuilder() {
scanners = Sets.<Scanner>newHashSet(new TypeAnnotationsScanner(), new SubTypesScanner());
urls = Sets.newHashSet();
}
/** constructs a {@link ConfigurationBuilder} using the given parameters, in a non statically typed way. that is, each element in {@code params} is
* guessed by it's type and populated into the configuration.
* <ul>
* <li>{@link String} - add urls using {@link ClasspathHelper#forPackage(String, ClassLoader...)} ()}</li>
* <li>{@link Class} - add urls using {@link ClasspathHelper#forClass(Class, ClassLoader...)} </li>
* <li>{@link ClassLoader} - use these classloaders in order to find urls in ClasspathHelper.forPackage(), ClasspathHelper.forClass() and for resolving types</li>
* <li>{@link Scanner} - use given scanner, overriding the default scanners</li>
* <li>{@link URL} - add the given url for scanning</li>
* <li>{@code Object[]} - flatten and use each element as above</li>
* </ul>
*
* use any parameter type in any order. this constructor uses instanceof on each param and instantiate a {@link ConfigurationBuilder} appropriately.
* */
@SuppressWarnings("unchecked")
public static ConfigurationBuilder build(final @Nullable Object... params) {
ConfigurationBuilder builder = new ConfigurationBuilder();
//flatten
List<Object> parameters = Lists.newArrayList();
if (params != null) {
for (Object param : params) {
if (param != null) {
if (param.getClass().isArray()) { for (Object p : (Object[]) param) if (p != null) parameters.add(p); }
else if (param instanceof Iterable) { for (Object p : (Iterable) param) if (p != null) parameters.add(p); }
else parameters.add(param);
}
}
}
List<ClassLoader> loaders = Lists.newArrayList();
for (Object param : parameters) if (param instanceof ClassLoader) loaders.add((ClassLoader) param);
ClassLoader[] classLoaders = loaders.isEmpty() ? null : loaders.toArray(new ClassLoader[loaders.size()]);
FilterBuilder filter = new FilterBuilder();
List<Scanner> scanners = Lists.newArrayList();
for (Object param : parameters) {
if (param instanceof String) {
builder.addUrls(ClasspathHelper.forPackage((String) param, classLoaders));
filter.includePackage((String) param);
}
else if (param instanceof Class) {
if (Scanner.class.isAssignableFrom((Class) param)) {
try { builder.addScanners(((Scanner) ((Class) param).newInstance())); } catch (Exception e) { /*fallback*/ }
}
builder.addUrls(ClasspathHelper.forClass((Class) param, classLoaders));
filter.includePackage(((Class) param));
}
else if (param instanceof Scanner) { scanners.add((Scanner) param); }
else if (param instanceof URL) { builder.addUrls((URL) param); }
else if (param instanceof ClassLoader) { /* already taken care */ }
else if (param instanceof Predicate) { filter.add((Predicate<String>) param); }
else if (param instanceof ExecutorService) { builder.setExecutorService((ExecutorService) param); }
else if (Reflections.log != null) { throw new ReflectionsException("could not use param " + param); }
}
if (builder.getUrls().isEmpty()) {
if (classLoaders != null) {
builder.addUrls(ClasspathHelper.forClassLoader(classLoaders)); //default urls getResources("")
} else {
builder.addUrls(ClasspathHelper.forClassLoader()); //default urls getResources("")
}
}
builder.filterInputsBy(filter);
if (!scanners.isEmpty()) { builder.setScanners(scanners.toArray(new Scanner[scanners.size()])); }
if (!loaders.isEmpty()) { builder.addClassLoaders(loaders); }
return builder;
}
public ConfigurationBuilder forPackages(String... packages) {
for (String pkg : packages) {
addUrls(ClasspathHelper.forPackage(pkg));
}
return this;
}
@Nonnull
public Set<Scanner> getScanners() {
return scanners;
}
/** set the scanners instances for scanning different metadata */
public ConfigurationBuilder setScanners(@Nonnull final Scanner... scanners) {
this.scanners.clear();
return addScanners(scanners);
}
/** set the scanners instances for scanning different metadata */
public ConfigurationBuilder addScanners(final Scanner... scanners) {
this.scanners.addAll(Sets.newHashSet(scanners));
return this;
}
@Nonnull
public Set<URL> getUrls() {
return urls;
}
/** set the urls to be scanned
* <p>use {@link org.reflections.util.ClasspathHelper} convenient methods to get the relevant urls
* */
public ConfigurationBuilder setUrls(@Nonnull final Collection<URL> urls) {
this.urls = Sets.newHashSet(urls);
return this;
}
/** set the urls to be scanned
* <p>use {@link org.reflections.util.ClasspathHelper} convenient methods to get the relevant urls
* */
public ConfigurationBuilder setUrls(final URL... urls) {
this.urls = Sets.newHashSet(urls);
return this;
}
/** add urls to be scanned
* <p>use {@link org.reflections.util.ClasspathHelper} convenient methods to get the relevant urls
* */
public ConfigurationBuilder addUrls(final Collection<URL> urls) {
this.urls.addAll(urls);
return this;
}
/** add urls to be scanned
* <p>use {@link org.reflections.util.ClasspathHelper} convenient methods to get the relevant urls
* */
public ConfigurationBuilder addUrls(final URL... urls) {
this.urls.addAll(Sets.newHashSet(urls));
return this;
}
/** returns the metadata adapter.
* if javassist library exists in the classpath, this method returns {@link JavassistAdapter} otherwise defaults to {@link JavaReflectionAdapter}.
* <p>the {@link JavassistAdapter} is preferred in terms of performance and class loading. */
public MetadataAdapter getMetadataAdapter() {
if (metadataAdapter != null) return metadataAdapter;
else {
try {
return (metadataAdapter = new JavassistAdapter());
} catch (Throwable e) {
if (Reflections.log != null)
Reflections.log.warn("could not create JavassistAdapter, using JavaReflectionAdapter", e);
return (metadataAdapter = new JavaReflectionAdapter());
}
}
}
/** sets the metadata adapter used to fetch metadata from classes */
public ConfigurationBuilder setMetadataAdapter(final MetadataAdapter metadataAdapter) {
this.metadataAdapter = metadataAdapter;
return this;
}
@Nullable
public Predicate<String> getInputsFilter() {
return inputsFilter;
}
/** sets the input filter for all resources to be scanned.
* <p> supply a {@link com.google.common.base.Predicate} or use the {@link FilterBuilder}*/
public void setInputsFilter(@Nullable Predicate<String> inputsFilter) {
this.inputsFilter = inputsFilter;
}
/** sets the input filter for all resources to be scanned.
* <p> supply a {@link com.google.common.base.Predicate} or use the {@link FilterBuilder}*/
public ConfigurationBuilder filterInputsBy(Predicate<String> inputsFilter) {
this.inputsFilter = inputsFilter;
return this;
}
@Nullable
public ExecutorService getExecutorService() {
return executorService;
}
/** sets the executor service used for scanning. */
public ConfigurationBuilder setExecutorService(@Nullable ExecutorService executorService) {
this.executorService = executorService;
return this;
}
/** sets the executor service used for scanning to ThreadPoolExecutor with core size as {@link java.lang.Runtime#availableProcessors()}
* <p>default is ThreadPoolExecutor with a single core */
public ConfigurationBuilder useParallelExecutor() {
return useParallelExecutor(Runtime.getRuntime().availableProcessors());
}
/** sets the executor service used for scanning to ThreadPoolExecutor with core size as the given availableProcessors parameter
* <p>default is ThreadPoolExecutor with a single core */
public ConfigurationBuilder useParallelExecutor(final int availableProcessors) {
setExecutorService(Executors.newFixedThreadPool(availableProcessors));
return this;
}
public Serializer getSerializer() {
return serializer != null ? serializer : (serializer = new XmlSerializer()); //lazily defaults to XmlSerializer
}
/** sets the serializer used when issuing {@link org.reflections.Reflections#save} */
public ConfigurationBuilder setSerializer(Serializer serializer) {
this.serializer = serializer;
return this;
}
/** get class loader, might be used for scanning or resolving methods/fields */
@Nullable
public ClassLoader[] getClassLoaders() {
return classLoaders;
}
/** set class loader, might be used for resolving methods/fields */
public void setClassLoaders(@Nullable ClassLoader[] classLoaders) {
this.classLoaders = classLoaders;
}
/** add class loader, might be used for resolving methods/fields */
public ConfigurationBuilder addClassLoader(ClassLoader classLoader) {
return addClassLoaders(classLoader);
}
/** add class loader, might be used for resolving methods/fields */
public ConfigurationBuilder addClassLoaders(ClassLoader... classLoaders) {
this.classLoaders = this.classLoaders == null ? classLoaders : ObjectArrays.concat(this.classLoaders, classLoaders, ClassLoader.class);
return this;
}
/** add class loader, might be used for resolving methods/fields */
public ConfigurationBuilder addClassLoaders(Collection<ClassLoader> classLoaders) {
return addClassLoaders(classLoaders.toArray(new ClassLoader[classLoaders.size()]));
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/util/FilterBuilder.java
|
package org.reflections.util;
import com.google.common.base.Predicate;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import org.reflections.ReflectionsException;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
/**
* Builds include/exclude filters for Reflections.
* <p>
* For example:
* <pre>
* Predicate<String> filter1 = FilterBuilder.parsePackages("-java, "-javax");
* Predicate<String> filter2 = new FilterBuilder().include(".*").exclude("java.*");
* </pre>
*/
public class FilterBuilder implements Predicate<String> {
private final List<Predicate<String>> chain;
public FilterBuilder() {chain = Lists.newArrayList();}
private FilterBuilder(final Iterable<Predicate<String>> filters) {chain = Lists.newArrayList(filters);}
/** include a regular expression */
public FilterBuilder include(final String regex) {return add(new Include(regex));}
/** exclude a regular expression*/
public FilterBuilder exclude(final String regex) {add(new Exclude(regex)); return this;}
/** add a Predicate to the chain of predicates*/
public FilterBuilder add(Predicate<String> filter) {chain.add(filter); return this;}
/** include a package of a given class */
public FilterBuilder includePackage(final Class<?> aClass) {return add(new Include(packageNameRegex(aClass)));}
/** exclude a package of a given class */
public FilterBuilder excludePackage(final Class<?> aClass) {return add(new Exclude(packageNameRegex(aClass)));}
/** include packages of given prefixes */
public FilterBuilder includePackage(final String... prefixes) {
for (String prefix : prefixes) {
add(new Include(prefix(prefix)));
}
return this;
}
/** exclude a package of a given prefix */
public FilterBuilder excludePackage(final String prefix) {return add(new Exclude(prefix(prefix)));}
private static String packageNameRegex(Class<?> aClass) {return prefix(aClass.getPackage().getName() + ".");}
public static String prefix(String qualifiedName) {return qualifiedName.replace(".","\\.") + ".*";}
@Override public String toString() {return Joiner.on(", ").join(chain);}
public boolean apply(String regex) {
boolean accept = chain == null || chain.isEmpty() || chain.get(0) instanceof Exclude;
if (chain != null) {
for (Predicate<String> filter : chain) {
if (accept && filter instanceof Include) {continue;} //skip if this filter won't change
if (!accept && filter instanceof Exclude) {continue;}
accept = filter.apply(regex);
if (!accept && filter instanceof Exclude) {break;} //break on first exclusion
}
}
return accept;
}
public abstract static class Matcher implements Predicate<String> {
final Pattern pattern;
public Matcher(final String regex) {pattern = Pattern.compile(regex);}
public abstract boolean apply(String regex);
@Override public String toString() {return pattern.pattern();}
}
public static class Include extends Matcher {
public Include(final String patternString) {super(patternString);}
@Override public boolean apply(final String regex) {return pattern.matcher(regex).matches();}
@Override public String toString() {return "+" + super.toString();}
}
public static class Exclude extends Matcher {
public Exclude(final String patternString) {super(patternString);}
@Override public boolean apply(final String regex) {return !pattern.matcher(regex).matches();}
@Override public String toString() {return "-" + super.toString();}
}
/**
* Parses a string representation of an include/exclude filter.
* <p>
* The given includeExcludeString is a comma separated list of regexes,
* each starting with either + or - to indicate include/exclude.
* <p>
* For example parsePackages("-java\\..*, -javax\\..*, -sun\\..*, -com\\.sun\\..*")
* or parse("+com\\.myn\\..*,-com\\.myn\\.excluded\\..*").
* Note that "-java\\..*" will block "java.foo" but not "javax.foo".
* <p>
* See also the more useful {@link FilterBuilder#parsePackages(String)} method.
*/
public static FilterBuilder parse(String includeExcludeString) {
List<Predicate<String>> filters = new ArrayList<Predicate<String>>();
if (!Utils.isEmpty(includeExcludeString)) {
for (String string : includeExcludeString.split(",")) {
String trimmed = string.trim();
char prefix = trimmed.charAt(0);
String pattern = trimmed.substring(1);
Predicate<String> filter;
switch (prefix) {
case '+':
filter = new Include(pattern);
break;
case '-':
filter = new Exclude(pattern);
break;
default:
throw new ReflectionsException("includeExclude should start with either + or -");
}
filters.add(filter);
}
return new FilterBuilder(filters);
} else {
return new FilterBuilder();
}
}
/**
* Parses a string representation of an include/exclude filter.
* <p>
* The given includeExcludeString is a comma separated list of package name segments,
* each starting with either + or - to indicate include/exclude.
* <p>
* For example parsePackages("-java, -javax, -sun, -com.sun") or parse("+com.myn,-com.myn.excluded").
* Note that "-java" will block "java.foo" but not "javax.foo".
* <p>
* The input strings "-java" and "-java." are equivalent.
*/
public static FilterBuilder parsePackages(String includeExcludeString) {
List<Predicate<String>> filters = new ArrayList<Predicate<String>>();
if (!Utils.isEmpty(includeExcludeString)) {
for (String string : includeExcludeString.split(",")) {
String trimmed = string.trim();
char prefix = trimmed.charAt(0);
String pattern = trimmed.substring(1);
if (pattern.endsWith(".") == false) {
pattern += ".";
}
pattern = prefix(pattern);
Predicate<String> filter;
switch (prefix) {
case '+':
filter = new Include(pattern);
break;
case '-':
filter = new Exclude(pattern);
break;
default:
throw new ReflectionsException("includeExclude should start with either + or -");
}
filters.add(filter);
}
return new FilterBuilder(filters);
} else {
return new FilterBuilder();
}
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/util/Utils.java
|
package org.reflections.util;
import com.google.common.base.Joiner;
import com.google.common.collect.Sets;
import org.reflections.Reflections;
import org.reflections.ReflectionsException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Member;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import static org.reflections.ReflectionUtils.forName;
/**
* a garbage can of convenient methods
*/
public abstract class Utils {
public static String repeat(String string, int times) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < times; i++) {
sb.append(string);
}
return sb.toString();
}
/**
* isEmpty compatible with Java 5
*/
public static boolean isEmpty(String s) {
return s == null || s.length() == 0;
}
public static boolean isEmpty(Object[] objects) {
return objects == null || objects.length == 0;
}
public static File prepareFile(String filename) {
File file = new File(filename);
File parent = file.getAbsoluteFile().getParentFile();
if (!parent.exists()) {
//noinspection ResultOfMethodCallIgnored
parent.mkdirs();
}
return file;
}
public static Member getMemberFromDescriptor(String descriptor, ClassLoader... classLoaders) throws ReflectionsException {
int p0 = descriptor.lastIndexOf('(');
String memberKey = p0 != -1 ? descriptor.substring(0, p0) : descriptor;
String methodParameters = p0 != -1 ? descriptor.substring(p0 + 1, descriptor.lastIndexOf(')')) : "";
int p1 = Math.max(memberKey.lastIndexOf('.'), memberKey.lastIndexOf("$"));
String className = memberKey.substring(memberKey.lastIndexOf(' ') + 1, p1);
String memberName = memberKey.substring(p1 + 1);
Class<?>[] parameterTypes = null;
if (!isEmpty(methodParameters)) {
String[] parameterNames = methodParameters.split(",");
List<Class<?>> result = new ArrayList<Class<?>>(parameterNames.length);
for (String name : parameterNames) {
result.add(forName(name.trim(), classLoaders));
}
parameterTypes = result.toArray(new Class<?>[result.size()]);
}
Class<?> aClass = forName(className, classLoaders);
while (aClass != null) {
try {
if (!descriptor.contains("(")) {
return aClass.isInterface() ? aClass.getField(memberName) : aClass.getDeclaredField(memberName);
} else if (isConstructor(descriptor)) {
return aClass.isInterface() ? aClass.getConstructor(parameterTypes) : aClass.getDeclaredConstructor(parameterTypes);
} else {
return aClass.isInterface() ? aClass.getMethod(memberName, parameterTypes) : aClass.getDeclaredMethod(memberName, parameterTypes);
}
} catch (Exception e) {
aClass = aClass.getSuperclass();
}
}
throw new ReflectionsException("Can't resolve member named " + memberName + " for class " + className);
}
public static Set<Method> getMethodsFromDescriptors(Iterable<String> annotatedWith, ClassLoader... classLoaders) {
Set<Method> result = Sets.newHashSet();
for (String annotated : annotatedWith) {
if (!isConstructor(annotated)) {
Method member = (Method) getMemberFromDescriptor(annotated, classLoaders);
if (member != null) result.add(member);
}
}
return result;
}
public static Set<Constructor> getConstructorsFromDescriptors(Iterable<String> annotatedWith, ClassLoader... classLoaders) {
Set<Constructor> result = Sets.newHashSet();
for (String annotated : annotatedWith) {
if (isConstructor(annotated)) {
Constructor member = (Constructor) getMemberFromDescriptor(annotated, classLoaders);
if (member != null) result.add(member);
}
}
return result;
}
public static Set<Member> getMembersFromDescriptors(Iterable<String> values, ClassLoader... classLoaders) {
Set<Member> result = Sets.newHashSet();
for (String value : values) {
try {
result.add(Utils.getMemberFromDescriptor(value, classLoaders));
} catch (ReflectionsException e) {
throw new ReflectionsException("Can't resolve member named " + value, e);
}
}
return result;
}
public static Field getFieldFromString(String field, ClassLoader... classLoaders) {
String className = field.substring(0, field.lastIndexOf('.'));
String fieldName = field.substring(field.lastIndexOf('.') + 1);
try {
return forName(className, classLoaders).getDeclaredField(fieldName);
} catch (NoSuchFieldException e) {
throw new ReflectionsException("Can't resolve field named " + fieldName, e);
}
}
public static void close(InputStream closeable) {
try { if (closeable != null) closeable.close(); }
catch (IOException e) {
if (Reflections.log != null) {
Reflections.log.warn("Could not close InputStream", e);
}
}
}
@Nullable
public static Logger findLogger(Class<?> aClass) {
try {
Class.forName("org.slf4j.impl.StaticLoggerBinder");
return LoggerFactory.getLogger(aClass);
} catch (Throwable e) {
return null;
}
}
public static boolean isConstructor(String fqn) {
return fqn.contains("init>");
}
public static String name(Class type) {
if (!type.isArray()) {
return type.getName();
} else {
int dim = 0;
while (type.isArray()) {
dim++;
type = type.getComponentType();
}
return type.getName() + repeat("[]", dim);
}
}
public static List<String> names(Iterable<Class<?>> types) {
List<String> result = new ArrayList<String>();
for (Class<?> type : types) result.add(name(type));
return result;
}
public static List<String> names(Class<?>... types) {
return names(Arrays.asList(types));
}
public static String name(Constructor constructor) {
return constructor.getName() + "." + "<init>" + "(" + Joiner.on(",").join(names(constructor.getParameterTypes())) + ")";
}
public static String name(Method method) {
return method.getDeclaringClass().getName() + "." + method.getName() + "(" + Joiner.on(", ").join(names(method.getParameterTypes())) + ")";
}
public static String name(Field field) {
return field.getDeclaringClass().getName() + "." + field.getName();
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/vfs/CommonsVfs2UrlType.java
|
package org.reflections.vfs;
import com.google.common.collect.AbstractIterator;
import org.apache.commons.vfs2.*;
import org.reflections.Reflections;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.*;
/**
* A {@link org.reflections.vfs.Vfs.UrlType} using the commons vfs 2 library, for creating {@link org.reflections.vfs.Vfs.Dir}.
* <p>It can be plugged into using {@link Vfs#addDefaultURLTypes(Vfs.UrlType)}, and requires the library provided in classpath
*/
public class CommonsVfs2UrlType implements Vfs.UrlType {
@Override
public boolean matches(URL url) throws Exception {
try {
final FileSystemManager manager = VFS.getManager();
final FileObject fileObject = manager.resolveFile(url.toExternalForm());
return fileObject.exists() && fileObject.getType() == FileType.FOLDER;
} catch (FileSystemException e) {
Reflections.log.warn("Could not create CommonsVfs2UrlType from url " + url.toExternalForm(), e);
return false;
}
}
@Override
public Vfs.Dir createDir(URL url) throws Exception {
final FileSystemManager manager = VFS.getManager();
final FileObject fileObject = manager.resolveFile(url.toExternalForm());
return new CommonsVfs2UrlType.Dir(fileObject);
}
public static class Dir implements Vfs.Dir {
private final FileObject file;
public Dir(FileObject file) {
this.file = file;
}
public String getPath() {
try {
return file.getURL().getPath();
} catch (FileSystemException e) {
throw new RuntimeException(e);
}
}
public Iterable<Vfs.File> getFiles() {
return new Iterable<Vfs.File>() {
public Iterator<Vfs.File> iterator() {
return new FileAbstractIterator();
}
};
}
public void close() {
try {
file.close();
} catch (FileSystemException e) {
//todo log
}
}
private class FileAbstractIterator extends AbstractIterator<Vfs.File> {
final Stack<FileObject> stack = new Stack<FileObject>();
{
listDir(file);}
protected Vfs.File computeNext() {
while (!stack.isEmpty()) {
final FileObject file = stack.pop();
try {
if (isDir(file)) listDir(file); else return getFile(file);
} catch (FileSystemException e) {
throw new RuntimeException(e);
}
}
return endOfData();
}
private File getFile(FileObject file) {
return new File(Dir.this.file, file);
}
private boolean listDir(FileObject file) {
return stack.addAll(listFiles(file));
}
private boolean isDir(FileObject file) throws FileSystemException {
return file.getType() == FileType.FOLDER;
}
protected List<FileObject> listFiles(final FileObject file) {
try {
FileObject[] files = file.getType().hasChildren() ? file.getChildren() : null;
return files != null ? Arrays.asList(files) : new ArrayList<FileObject>();
} catch (FileSystemException e) {
throw new RuntimeException(e);
}
}
}
}
public static class File implements Vfs.File {
private final FileObject root;
private final FileObject file;
public File(FileObject root, FileObject file) {
this.root = root;
this.file = file;
}
public String getName() {
return file.getName().getBaseName();
}
public String getRelativePath() {
String filepath = file.getName().getPath().replace("\\", "/");
if (filepath.startsWith(root.getName().getPath())) {
return filepath.substring(root.getName().getPath().length() + 1);
}
return null; //should not get here
}
public InputStream openInputStream() throws IOException {
return file.getContent().getInputStream();
}
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/vfs/JarInputDir.java
|
package org.reflections.vfs;
import com.google.common.collect.AbstractIterator;
import org.reflections.ReflectionsException;
import org.reflections.util.Utils;
import java.io.IOException;
import java.net.URL;
import java.util.Iterator;
import java.util.jar.JarInputStream;
import java.util.zip.ZipEntry;
/**
*
*/
public class JarInputDir implements Vfs.Dir {
private final URL url;
JarInputStream jarInputStream;
long cursor = 0;
long nextCursor = 0;
public JarInputDir(URL url) {
this.url = url;
}
public String getPath() {
return url.getPath();
}
public Iterable<Vfs.File> getFiles() {
return new Iterable<Vfs.File>() {
public Iterator<Vfs.File> iterator() {
return new AbstractIterator<Vfs.File>() {
{
try { jarInputStream = new JarInputStream(url.openConnection().getInputStream()); }
catch (Exception e) { throw new ReflectionsException("Could not open url connection", e); }
}
protected Vfs.File computeNext() {
while (true) {
try {
ZipEntry entry = jarInputStream.getNextJarEntry();
if (entry == null) {
return endOfData();
}
long size = entry.getSize();
if (size < 0) size = 0xffffffffl + size; //JDK-6916399
nextCursor += size;
if (!entry.isDirectory()) {
return new JarInputFile(entry, JarInputDir.this, cursor, nextCursor);
}
} catch (IOException e) {
throw new ReflectionsException("could not get next zip entry", e);
}
}
}
};
}
};
}
public void close() {
Utils.close(jarInputStream);
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/vfs/JarInputFile.java
|
package org.reflections.vfs;
import java.io.IOException;
import java.io.InputStream;
import java.util.zip.ZipEntry;
/**
*
*/
public class JarInputFile implements Vfs.File {
private final ZipEntry entry;
private final JarInputDir jarInputDir;
private final long fromIndex;
private final long endIndex;
public JarInputFile(ZipEntry entry, JarInputDir jarInputDir, long cursor, long nextCursor) {
this.entry = entry;
this.jarInputDir = jarInputDir;
fromIndex = cursor;
endIndex = nextCursor;
}
public String getName() {
String name = entry.getName();
return name.substring(name.lastIndexOf("/") + 1);
}
public String getRelativePath() {
return entry.getName();
}
public InputStream openInputStream() throws IOException {
return new InputStream() {
@Override
public int read() throws IOException {
if (jarInputDir.cursor >= fromIndex && jarInputDir.cursor <= endIndex) {
int read = jarInputDir.jarInputStream.read();
jarInputDir.cursor++;
return read;
} else {
return -1;
}
}
};
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/vfs/SystemDir.java
|
package org.reflections.vfs;
import com.google.common.collect.AbstractIterator;
import com.google.common.collect.Lists;
import java.util.Collections;
import java.util.Iterator;
import java.util.Stack;
import java.util.List;
import java.io.File;
/*
* An implementation of {@link org.reflections.vfs.Vfs.Dir} for directory {@link java.io.File}.
*/
public class SystemDir implements Vfs.Dir {
private final File file;
public SystemDir(File file) {
if (file != null && (!file.isDirectory() || !file.canRead())) {
throw new RuntimeException("cannot use dir " + file);
}
this.file = file;
}
public String getPath() {
if (file == null) {
return "/NO-SUCH-DIRECTORY/";
}
return file.getPath().replace("\\", "/");
}
public Iterable<Vfs.File> getFiles() {
if (file == null || !file.exists()) {
return Collections.emptyList();
}
return new Iterable<Vfs.File>() {
public Iterator<Vfs.File> iterator() {
return new AbstractIterator<Vfs.File>() {
final Stack<File> stack = new Stack<File>();
{stack.addAll(listFiles(file));}
protected Vfs.File computeNext() {
while (!stack.isEmpty()) {
final File file = stack.pop();
if (file.isDirectory()) {
stack.addAll(listFiles(file));
} else {
return new SystemFile(SystemDir.this, file);
}
}
return endOfData();
}
};
}
};
}
private static List<File> listFiles(final File file) {
File[] files = file.listFiles();
if (files != null)
return Lists.newArrayList(files);
else
return Lists.newArrayList();
}
public void close() {
}
@Override
public String toString() {
return getPath();
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/vfs/SystemFile.java
|
package org.reflections.vfs;
import java.io.InputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
/** an implementation of {@link org.reflections.vfs.Vfs.File} for a directory {@link java.io.File} */
public class SystemFile implements Vfs.File {
private final SystemDir root;
private final java.io.File file;
public SystemFile(final SystemDir root, java.io.File file) {
this.root = root;
this.file = file;
}
public String getName() {
return file.getName();
}
public String getRelativePath() {
String filepath = file.getPath().replace("\\", "/");
if (filepath.startsWith(root.getPath())) {
return filepath.substring(root.getPath().length() + 1);
}
return null; //should not get here
}
public InputStream openInputStream() {
try {
return new FileInputStream(file);
} catch (FileNotFoundException e) {
throw new RuntimeException(e);
}
}
@Override
public String toString() {
return file.toString();
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/vfs/UrlTypeVFS.java
|
package org.reflections.vfs;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.jar.JarFile;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.reflections.Reflections;
import org.reflections.ReflectionsException;
import org.reflections.vfs.Vfs.Dir;
import org.reflections.vfs.Vfs.UrlType;
import com.google.common.base.Predicate;
/**
* UrlType to be used by Reflections library.
* This class handles the vfszip and vfsfile protocol of JBOSS files.
* <p>
* <p>to use it, register it in Vfs via {@link org.reflections.vfs.Vfs#addDefaultURLTypes(org.reflections.vfs.Vfs.UrlType)} or {@link org.reflections.vfs.Vfs#setDefaultURLTypes(java.util.List)}.
* @author Sergio Pola
*
*/
public class UrlTypeVFS implements UrlType {
public final static String[] REPLACE_EXTENSION = new String[]{".ear/", ".jar/", ".war/", ".sar/", ".har/", ".par/"};
final String VFSZIP = "vfszip";
final String VFSFILE = "vfsfile";
public boolean matches(URL url) {
return VFSZIP.equals(url.getProtocol()) || VFSFILE.equals(url.getProtocol());
}
public Dir createDir(final URL url) {
try {
URL adaptedUrl = adaptURL(url);
return new ZipDir(new JarFile(adaptedUrl.getFile()));
} catch (Exception e) {
if (Reflections.log != null) {
Reflections.log.warn("Could not get URL", e);
}
try {
return new ZipDir(new JarFile(url.getFile()));
} catch (IOException e1) {
if (Reflections.log != null) {
Reflections.log.warn("Could not get URL", e1);
}
}
}
return null;
}
public URL adaptURL(URL url) throws MalformedURLException {
if (VFSZIP.equals(url.getProtocol())) {
return replaceZipSeparators(url.getPath(), realFile);
} else if (VFSFILE.equals(url.getProtocol())) {
return new URL(url.toString().replace(VFSFILE, "file"));
} else {
return url;
}
}
URL replaceZipSeparators(String path, Predicate<File> acceptFile)
throws MalformedURLException {
int pos = 0;
while (pos != -1) {
pos = findFirstMatchOfDeployableExtention(path, pos);
if (pos > 0) {
File file = new File(path.substring(0, pos - 1));
if (acceptFile.apply(file)) { return replaceZipSeparatorStartingFrom(path, pos); }
}
}
throw new ReflectionsException("Unable to identify the real zip file in path '" + path + "'.");
}
int findFirstMatchOfDeployableExtention(String path, int pos) {
Pattern p = Pattern.compile("\\.[ejprw]ar/");
Matcher m = p.matcher(path);
if (m.find(pos)) {
return m.end();
} else {
return -1;
}
}
Predicate<File> realFile = new Predicate<File>() {
public boolean apply(File file) {
return file.exists() && file.isFile();
}
};
URL replaceZipSeparatorStartingFrom(String path, int pos)
throws MalformedURLException {
String zipFile = path.substring(0, pos - 1);
String zipPath = path.substring(pos);
int numSubs = 1;
for (String ext : REPLACE_EXTENSION) {
while (zipPath.contains(ext)) {
zipPath = zipPath.replace(ext, ext.substring(0, 4) + "!");
numSubs++;
}
}
String prefix = "";
for (int i = 0; i < numSubs; i++) {
prefix += "zip:";
}
if (zipPath.trim().length() == 0) {
return new URL(prefix + "/" + zipFile);
} else {
return new URL(prefix + "/" + zipFile + "!" + zipPath);
}
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/vfs/Vfs.java
|
package org.reflections.vfs;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.apache.commons.vfs2.*;
import org.reflections.Reflections;
import org.reflections.ReflectionsException;
import org.reflections.util.ClasspathHelper;
import org.reflections.util.Utils;
import javax.annotation.Nullable;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.*;
import java.util.*;
import java.util.jar.JarFile;
/**
* a simple virtual file system bridge
* <p>use the {@link org.reflections.vfs.Vfs#fromURL(java.net.URL)} to get a {@link org.reflections.vfs.Vfs.Dir},
* then use {@link org.reflections.vfs.Vfs.Dir#getFiles()} to iterate over the {@link org.reflections.vfs.Vfs.File}
* <p>for example:
* <pre>
* Vfs.Dir dir = Vfs.fromURL(url);
* Iterable<Vfs.File> files = dir.getFiles();
* for (Vfs.File file : files) {
* InputStream is = file.openInputStream();
* }
* </pre>
* <p>{@link org.reflections.vfs.Vfs#fromURL(java.net.URL)} uses static {@link org.reflections.vfs.Vfs.DefaultUrlTypes} to resolve URLs.
* It contains VfsTypes for handling for common resources such as local jar file, local directory, jar url, jar input stream and more.
* <p>It can be plugged in with other {@link org.reflections.vfs.Vfs.UrlType} using {@link org.reflections.vfs.Vfs#addDefaultURLTypes(org.reflections.vfs.Vfs.UrlType)} or {@link org.reflections.vfs.Vfs#setDefaultURLTypes(java.util.List)}.
* <p>for example:
* <pre>
* Vfs.addDefaultURLTypes(new Vfs.UrlType() {
* public boolean matches(URL url) {
* return url.getProtocol().equals("http");
* }
* public Vfs.Dir createDir(final URL url) {
* return new HttpDir(url); //implement this type... (check out a naive implementation on VfsTest)
* }
* });
*
* Vfs.Dir dir = Vfs.fromURL(new URL("http://mirrors.ibiblio.org/pub/mirrors/maven2/org/slf4j/slf4j-api/1.5.6/slf4j-api-1.5.6.jar"));
* </pre>
* <p>use {@link org.reflections.vfs.Vfs#findFiles(java.util.Collection, com.google.common.base.Predicate)} to get an
* iteration of files matching given name predicate over given list of urls
*/
public abstract class Vfs {
private static List<UrlType> defaultUrlTypes = Lists.<UrlType>newArrayList(DefaultUrlTypes.values());
/** an abstract vfs dir */
public interface Dir {
String getPath();
Iterable<File> getFiles();
void close();
}
/** an abstract vfs file */
public interface File {
String getName();
String getRelativePath();
InputStream openInputStream() throws IOException;
}
/** a matcher and factory for a url */
public interface UrlType {
boolean matches(URL url) throws Exception;
Dir createDir(URL url) throws Exception;
}
/** the default url types that will be used when issuing {@link org.reflections.vfs.Vfs#fromURL(java.net.URL)} */
public static List<UrlType> getDefaultUrlTypes() {
return defaultUrlTypes;
}
/** sets the static default url types. can be used to statically plug in urlTypes */
public static void setDefaultURLTypes(final List<UrlType> urlTypes) {
defaultUrlTypes = urlTypes;
}
/** add a static default url types to the beginning of the default url types list. can be used to statically plug in urlTypes */
public static void addDefaultURLTypes(UrlType urlType) {
defaultUrlTypes.add(0, urlType);
}
/** tries to create a Dir from the given url, using the defaultUrlTypes */
public static Dir fromURL(final URL url) {
return fromURL(url, defaultUrlTypes);
}
/** tries to create a Dir from the given url, using the given urlTypes*/
public static Dir fromURL(final URL url, final List<UrlType> urlTypes) {
for (UrlType type : urlTypes) {
try {
if (type.matches(url)) {
Dir dir = type.createDir(url);
if (dir != null) return dir;
}
} catch (Throwable e) {
if (Reflections.log != null) {
Reflections.log.warn("could not create Dir using " + type + " from url " + url.toExternalForm() + ". skipping.", e);
}
}
}
throw new ReflectionsException("could not create Vfs.Dir from url, no matching UrlType was found [" + url.toExternalForm() + "]\n" +
"either use fromURL(final URL url, final List<UrlType> urlTypes) or " +
"use the static setDefaultURLTypes(final List<UrlType> urlTypes) or addDefaultURLTypes(UrlType urlType) " +
"with your specialized UrlType.");
}
/** tries to create a Dir from the given url, using the given urlTypes*/
public static Dir fromURL(final URL url, final UrlType... urlTypes) {
return fromURL(url, Lists.<UrlType>newArrayList(urlTypes));
}
/** return an iterable of all {@link org.reflections.vfs.Vfs.File} in given urls, starting with given packagePrefix and matching nameFilter */
public static Iterable<File> findFiles(final Collection<URL> inUrls, final String packagePrefix, final Predicate<String> nameFilter) {
Predicate<File> fileNamePredicate = new Predicate<File>() {
public boolean apply(File file) {
String path = file.getRelativePath();
if (path.startsWith(packagePrefix)) {
String filename = path.substring(path.indexOf(packagePrefix) + packagePrefix.length());
return !Utils.isEmpty(filename) && nameFilter.apply(filename.substring(1));
} else {
return false;
}
}
};
return findFiles(inUrls, fileNamePredicate);
}
/** return an iterable of all {@link org.reflections.vfs.Vfs.File} in given urls, matching filePredicate */
public static Iterable<File> findFiles(final Collection<URL> inUrls, final Predicate<File> filePredicate) {
Iterable<File> result = new ArrayList<File>();
for (final URL url : inUrls) {
try {
result = Iterables.concat(result,
Iterables.filter(new Iterable<File>() {
public Iterator<File> iterator() {
return fromURL(url).getFiles().iterator();
}
}, filePredicate));
} catch (Throwable e) {
if (Reflections.log != null) {
Reflections.log.error("could not findFiles for url. continuing. [" + url + "]", e);
}
}
}
return result;
}
/**try to get {@link java.io.File} from url*/
public static @Nullable java.io.File getFile(URL url) {
java.io.File file;
String path;
try {
path = url.toURI().getSchemeSpecificPart();
if ((file = new java.io.File(path)).exists()) return file;
} catch (URISyntaxException e) {
}
try {
path = URLDecoder.decode(url.getPath(), "UTF-8");
if (path.contains(".jar!")) path = path.substring(0, path.lastIndexOf(".jar!") + ".jar".length());
if ((file = new java.io.File(path)).exists()) return file;
} catch (UnsupportedEncodingException e) {
}
try {
path = url.toExternalForm();
if (path.startsWith("jar:")) path = path.substring("jar:".length());
if (path.startsWith("wsjar:")) path = path.substring("wsjar:".length());
if (path.startsWith("file:")) path = path.substring("file:".length());
if (path.contains(".jar!")) path = path.substring(0, path.indexOf(".jar!") + ".jar".length());
if ((file = new java.io.File(path)).exists()) return file;
path = path.replace("%20", " ");
if ((file = new java.io.File(path)).exists()) return file;
} catch (Exception e) {
}
return null;
}
/** default url types used by {@link org.reflections.vfs.Vfs#fromURL(java.net.URL)}
* <p>
* <p>jarFile - creates a {@link org.reflections.vfs.ZipDir} over jar file
* <p>jarUrl - creates a {@link org.reflections.vfs.ZipDir} over a jar url (contains ".jar!/" in it's name), using Java's {@link JarURLConnection}
* <p>directory - creates a {@link org.reflections.vfs.SystemDir} over a file system directory
* <p>jboss vfs - for protocols vfs, using jboss vfs (should be provided in classpath)
* <p>jboss vfsfile - creates a {@link UrlTypeVFS} for protocols vfszip and vfsfile.
* <p>bundle - for bundle protocol, using eclipse FileLocator (should be provided in classpath)
* <p>jarInputStream - creates a {@link JarInputDir} over jar files, using Java's JarInputStream
* */
public static enum DefaultUrlTypes implements UrlType {
jarFile {
public boolean matches(URL url) {
return url.getProtocol().equals("file") && url.toExternalForm().contains(".jar");
}
public Dir createDir(final URL url) throws Exception {
return new ZipDir(new JarFile(getFile(url)));
}
},
jarUrl {
public boolean matches(URL url) {
return "jar".equals(url.getProtocol()) || "zip".equals(url.getProtocol()) || "wsjar".equals(url.getProtocol());
}
public Dir createDir(URL url) throws Exception {
try {
URLConnection urlConnection = url.openConnection();
if (urlConnection instanceof JarURLConnection) {
return new ZipDir(((JarURLConnection) urlConnection).getJarFile());
}
} catch (Throwable e) { /*fallback*/ }
java.io.File file = getFile(url);
if (file != null) {
return new ZipDir(new JarFile(file));
}
return null;
}
},
directory {
public boolean matches(URL url) {
return url.getProtocol().equals("file") && !url.toExternalForm().contains(".jar") &&
getFile(url).isDirectory();
}
public Dir createDir(final URL url) throws Exception {
return new SystemDir(getFile(url));
}
},
jboss_vfs {
public boolean matches(URL url) {
return url.getProtocol().equals("vfs");
}
public Vfs.Dir createDir(URL url) throws Exception {
Object content = url.openConnection().getContent();
Class<?> virtualFile = ClasspathHelper.contextClassLoader().loadClass("org.jboss.vfs.VirtualFile");
java.io.File physicalFile = (java.io.File) virtualFile.getMethod("getPhysicalFile").invoke(content);
String name = (String) virtualFile.getMethod("getName").invoke(content);
java.io.File file = new java.io.File(physicalFile.getParentFile(), name);
if (!file.exists() || !file.canRead()) file = physicalFile;
return file.isDirectory() ? new SystemDir(file) : new ZipDir(new JarFile(file));
}
},
jboss_vfsfile {
public boolean matches(URL url) throws Exception {
return "vfszip".equals(url.getProtocol()) || "vfsfile".equals(url.getProtocol());
}
public Dir createDir(URL url) throws Exception {
return new UrlTypeVFS().createDir(url);
}
},
bundle {
public boolean matches(URL url) throws Exception {
return url.getProtocol().startsWith("bundle");
}
public Dir createDir(URL url) throws Exception {
return fromURL((URL) ClasspathHelper.contextClassLoader().
loadClass("org.eclipse.core.runtime.FileLocator").getMethod("resolve", URL.class).invoke(null, url));
}
},
jarInputStream {
public boolean matches(URL url) throws Exception {
return url.toExternalForm().contains(".jar");
}
public Dir createDir(final URL url) throws Exception {
return new JarInputDir(url);
}
}
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/vfs/ZipDir.java
|
package org.reflections.vfs;
import com.google.common.collect.AbstractIterator;
import org.reflections.Reflections;
import java.io.IOException;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.jar.JarFile;
import java.util.zip.ZipEntry;
/** an implementation of {@link org.reflections.vfs.Vfs.Dir} for {@link java.util.zip.ZipFile} */
public class ZipDir implements Vfs.Dir {
final java.util.zip.ZipFile jarFile;
public ZipDir(JarFile jarFile) {
this.jarFile = jarFile;
}
public String getPath() {
return jarFile.getName();
}
public Iterable<Vfs.File> getFiles() {
return new Iterable<Vfs.File>() {
public Iterator<Vfs.File> iterator() {
return new AbstractIterator<Vfs.File>() {
final Enumeration<? extends ZipEntry> entries = jarFile.entries();
protected Vfs.File computeNext() {
while (entries.hasMoreElements()) {
ZipEntry entry = entries.nextElement();
if (!entry.isDirectory()) {
return new ZipFile(ZipDir.this, entry);
}
}
return endOfData();
}
};
}
};
}
public void close() {
try { jarFile.close(); } catch (IOException e) {
if (Reflections.log != null) {
Reflections.log.warn("Could not close JarFile", e);
}
}
}
@Override
public String toString() {
return jarFile.getName();
}
}
|
0
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections
|
java-sources/ai/h2o/reflections/0.9.11-h2o-custom/org/reflections/vfs/ZipFile.java
|
package org.reflections.vfs;
import java.io.IOException;
import java.io.InputStream;
import java.util.zip.ZipEntry;
/** an implementation of {@link org.reflections.vfs.Vfs.File} for {@link java.util.zip.ZipEntry} */
public class ZipFile implements Vfs.File {
private final ZipDir root;
private final ZipEntry entry;
public ZipFile(final ZipDir root, ZipEntry entry) {
this.root = root;
this.entry = entry;
}
public String getName() {
String name = entry.getName();
return name.substring(name.lastIndexOf("/") + 1);
}
public String getRelativePath() {
return entry.getName();
}
public InputStream openInputStream() throws IOException {
return root.jarFile.getInputStream(entry);
}
@Override
public String toString() {
return root.getPath() + "!" + java.io.File.separatorChar + entry.toString();
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/org/apache/spark
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/org/apache/spark/h2o/JavaH2OContext.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.h2o;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.h2o.converters.SupportedRDD$;
import org.apache.spark.mllib.regression.LabeledPoint;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.SQLContext;
import scala.Option;
import water.Key;
import water.fvec.Frame;
import water.fvec.H2OFrame;
/**
* A Java-friendly version of [[org.apache.spark.h2o.H2OContext]]
*
*
* Sparkling Water can run in two modes. External cluster mode and internal cluster mode. When using external cluster
* mode, it tries to connect to existing H2O cluster using the provided spark
* configuration properties. In the case of internal cluster mode,it creates H2O cluster living in Spark - that means
* that each Spark executor will have one h2o instance running in it. This mode is not
* recommended for big clusters and clusters where Spark executors are not stable.
*
* Cluster mode can be set using the spark configuration
* property spark.ext.h2o.mode which can be set in script starting sparkling-water or
* can be set in H2O configuration class H2OConf
*
*/
public class JavaH2OContext {
/*
Note for developers: This class is not written in scala intentionally as we want to have static method getOrCreate on
the class itself and not on class generated from the object ( like H2OContext$). This way the functionality and API
remains the same as in H2OContext, but we need to write a few pass-through functions.
If we write this class in scala the Java users would have to call getOrCreate method on generated class ending with $
which is not nice.
*/
transient private H2OContext hc;
public H2OContext h2oContext(){
return hc;
}
public SparkContext sparkContext(){
return hc.sparkContext();
}
/**
* Create new JavaH2OContext based on existing H2O Context
*
* @param hc H2O Context
*/
private JavaH2OContext(H2OContext hc){
this.hc = hc;
}
/**
* Get Java H2O Context based on existing H2O Context
* @param hc H2O Context
* @return Java H2O Context
*/
public static JavaH2OContext getFromExisting(H2OContext hc){
return new JavaH2OContext(hc);
}
/**
* Pass-through to H2OContext.asH2OFrame. For API support only.
* @param df data frame to pass for building an H2OFrame
* @return a new H2O frame
*/ public H2OFrame asH2OFrame(DataFrame df){
return hc.asH2OFrame(df);
}
/**
* Pass-through to H2OContext.asH2OFrame. For API support only.
* @param df data frame to pass for building an H2OFrame
* @param frameName name of the new frame
* @return a new H2O frame
*/ public H2OFrame asH2OFrame(DataFrame df, String frameName){
return hc.asH2OFrame(df, frameName);
}
/**
* Create a new H2OFrame based on existing Frame referenced by its key.
* @param s the key
* @return a new H2O frame
*/
public H2OFrame asH2OFrame(String s){
return hc.asH2OFrame(s);
}
/**
* Create a new H2OFrame based on existing Frame
* @param fr the frame to be used
* @return Java H2O Context
*/
public H2OFrame asH2OFrame(Frame fr){
return hc.asH2OFrame(fr);
}
/**
* Convert given H2O frame into a Product RDD type
* @param fr the frame to be used
* @param <A> type of data being handled
* @return a new RDD
*/
public <A> JavaRDD<A> asRDD(H2OFrame fr){
//TODO: Implement this conversion
//return hc.asRDD(fr, (RDD<A>)JavaSparkContext.fakeClassTag())
return null;
}
/**
* Convert given H2O frame into DataFrame type
* @param fr the frame to be used
* @param sqlContext sql context to be used for creating a frame
* @return a new data frame
*/ public DataFrame asDataFrame(Frame fr, SQLContext sqlContext){
return asDataFrame(fr, true, sqlContext);
}
/** Convert given H2O frame into DataFrame type */
public DataFrame asDataFrame(Frame fr, boolean copyMetadata, SQLContext sqlContext){
return hc.asDataFrame(fr, copyMetadata, sqlContext);
}
/** Convert given H2O frame into DataFrame type */
public DataFrame asDataFrame(String key, SQLContext sqlContext){
return asDataFrame(key, true, sqlContext);
}
/** Convert given H2O frame into DataFrame type */
public DataFrame asDataFrame(String key, boolean copyMetadata, SQLContext sqlContext){
return hc.asDataFrame(key, copyMetadata, sqlContext);
}
/** Pass-through to H2OContext.toH2OFrameKey. For API support only.*/
public Key<Frame> toH2OFrameKey(DataFrame df){
return hc.toH2OFrameKey(df);
}
/** Pass-through to H2OContext.toH2OFrameKey. For API support only.*/
public Key<Frame> toH2OFrameKey(DataFrame df, Option<String> frameName){
return hc.toH2OFrameKey(df, frameName);
}
/** Pass-through to H2OContext.toH2OFrameKey. For API support only.*/
public Key<Frame> toH2OFrameKey(DataFrame df, String frameName){
return hc.toH2OFrameKey(df, frameName);
}
/**
* Get existing or create new JavaH2OContext based on provided H2O configuration. It searches the configuration
* properties passed to Sparkling Water and based on them starts H2O Context. If the values are not found, the default
* values are used in most of the cases. The default cluster mode is internal, ie. spark.ext.h2o.external.cluster.mode=false
*
* @param jsc Java Spark Context
* @return Java H2O Context
*/
public static JavaH2OContext getOrCreate(JavaSparkContext jsc){
H2OConf conf = new H2OConf(jsc.sc());
return getOrCreate(jsc, conf);
}
/**
* Get existing or create new JavaH2OContext based on provided H2O configuration
*
* @param jsc Java Spark Context
* @param conf H2O configuration
* @return Java H2O Context
*/
public static JavaH2OContext getOrCreate(JavaSparkContext jsc, H2OConf conf){
return new JavaH2OContext(H2OContext.getOrCreate(jsc.sc(), conf));
}
public String toString(){
return hc.toString();
}
public String h2oLocalClient(){
return hc.h2oLocalClient();
}
public String h2oLocalClientIp(){
return hc.h2oLocalClientIp();
}
public int h2oLocalClientPort(){
return hc.h2oLocalClientPort();
}
public void stop(boolean stopSparkContext){
hc.stop(stopSparkContext);
}
public void openFlow(){
hc.openFlow();
}
/**
* Return a copy of this JavaH2OContext's configuration. The configuration ''cannot'' be changed at runtime.
*/
public H2OConf getConf(){
return hc.getConf();
}
/** Conversion from RDD[String] to H2O's DataFrame */
public H2OFrame asH2OFrameFromRDDString(JavaRDD<String> rdd, String frameName){
return hc.asH2OFrame(SupportedRDD$.MODULE$.toH2OFrameFromRDDString(rdd.rdd()), Option.apply(frameName));
}
/** Conversion from RDD[Boolean] to H2O's DataFrame */
public H2OFrame asH2OFrameFromRDDBool(JavaRDD<Boolean> rdd, String frameName){
return hc.asH2OFrame(SupportedRDD$.MODULE$.toH2OFrameFromRDDJavaBool(rdd.rdd()), Option.apply(frameName));
}
/** Conversion from RDD[Integer] to H2O's DataFrame */
public H2OFrame asH2OFrameFromRDDInt(JavaRDD<Integer> rdd, String frameName){
return hc.asH2OFrame(SupportedRDD$.MODULE$.toH2OFrameFromRDDJavaInt(rdd.rdd()), Option.apply(frameName));
}
/** Conversion from RDD[Byte] to H2O's DataFrame */
public H2OFrame asH2OFrameFromRDDByte(JavaRDD<Byte> rdd, String frameName){
return hc.asH2OFrame(SupportedRDD$.MODULE$.toH2OFrameFromRDDJavaByte(rdd.rdd()), Option.apply(frameName));
}
/** Conversion from RDD[Short] to H2O's DataFrame */
public H2OFrame asH2OFrameFromRDDShort(JavaRDD<Short> rdd, String frameName){
return hc.asH2OFrame(SupportedRDD$.MODULE$.toH2OFrameFromRDDJavaShort(rdd.rdd()), Option.apply(frameName));
}
/** Conversion from RDD[Float] to H2O's DataFrame */
public H2OFrame asH2OFrameFromRDDFloat(JavaRDD<Float> rdd, String frameName){
return hc.asH2OFrame(SupportedRDD$.MODULE$.toH2OFrameFromRDDJavaFloat(rdd.rdd()), Option.apply(frameName));
}
/** Conversion from RDD[Double] to H2O's DataFrame */
public H2OFrame asH2OFrameFromRDDDouble(JavaRDD<Double> rdd, String frameName){
return hc.asH2OFrame(SupportedRDD$.MODULE$.toH2OFrameFromRDDJavaDouble(rdd.rdd()), Option.apply(frameName));
}
/** Conversion from RDD[Double] to H2O's DataFrame
* This method is used by the python client since even though the rdd is of type double,
* some of the elements are actually integers. We need to convert all types to double in order to not break the
* backend
* */
public H2OFrame asH2OFrameFromPythonRDDDouble(JavaRDD<Number> rdd, String frameName){
JavaRDD<Double> casted = rdd.map(new RDDDoubleConversionFunc());
return asH2OFrameFromRDDDouble(casted, frameName);
}
/** Conversion from RDD[Long] to H2O's DataFrame */
public H2OFrame asH2OFrameFromRDDLong(JavaRDD<Long> rdd, String frameName){
return hc.asH2OFrame(SupportedRDD$.MODULE$.toH2OFrameFromRDDJavaLong(rdd.rdd()), Option.apply(frameName));
}
/** Conversion from RDD[Long] to H2O's DataFrame
* This method is used by the python client since even though the rdd is of type long,
* some of the elements are actually integers. We need to convert all types to long in order to not break the
* backend
* */
public H2OFrame asH2OFrameFromPythonRDDLong(JavaRDD<Number> rdd, String frameName){
JavaRDD<Long> casted = rdd.map(new RDDLongConversionFunc());
return asH2OFrameFromRDDLong(casted, frameName);
}
/** Conversion from RDD[LabeledPoint] to H2O's DataFrame */
public H2OFrame asH2OFrameFromRDDLabeledPoint(JavaRDD<LabeledPoint> rdd, String frameName){
return hc.asH2OFrame(SupportedRDD$.MODULE$.toH2OFrameFromRDDLabeledPoint(rdd.rdd()), Option.apply(frameName));
}
/** Conversion from RDD[java.sql.TimeStamp] to H2O's DataFrame */
public H2OFrame asH2OFrameFromRDDTimeStamp(JavaRDD<java.sql.Timestamp> rdd, String frameName){
return hc.asH2OFrame(SupportedRDD$.MODULE$.toH2OFrameFromRDDTimeStamp(rdd.rdd()), Option.apply(frameName));
}
/** Returns key of the H2O's DataFrame conversed from RDD[String]*/
@SuppressWarnings("unchecked")
public Key<Frame> asH2OFrameFromRDDStringKey(JavaRDD<String> rdd, String frameName){
return (Key<Frame>) hc.toH2OFrameKey(SupportedRDD$.MODULE$.toH2OFrameFromRDDString(rdd.rdd()), Option.apply(frameName));
}
/** Returns key of the H2O's DataFrame conversed from RDD[Boolean]*/
@SuppressWarnings("unchecked")
public Key<Frame> asH2OFrameFromRDDBoolKey(JavaRDD<Boolean> rdd, String frameName){
return (Key<Frame>) hc.toH2OFrameKey(SupportedRDD$.MODULE$.toH2OFrameFromRDDJavaBool(rdd.rdd()), Option.apply(frameName));
}
/** Returns key of the H2O's DataFrame conversed from RDD[Integer]*/
@SuppressWarnings("unchecked")
public Key<Frame> asH2OFrameFromRDDIntKey(JavaRDD<Integer> rdd, String frameName){
return (Key<Frame>) hc.toH2OFrameKey(SupportedRDD$.MODULE$.toH2OFrameFromRDDJavaInt(rdd.rdd()), Option.apply(frameName));
}
/** Returns key of the H2O's DataFrame conversed from RDD[Byte]*/
@SuppressWarnings("unchecked")
public Key<Frame> asH2OFrameFromRDDByteKey(JavaRDD<Byte> rdd, String frameName){
return (Key<Frame>) hc.toH2OFrameKey(SupportedRDD$.MODULE$.toH2OFrameFromRDDJavaByte(rdd.rdd()), Option.apply(frameName));
}
/** Returns key of the H2O's DataFrame conversed from RDD[Short]*/
@SuppressWarnings("unchecked")
public Key<Frame> asH2OFrameFromRDDShortKey(JavaRDD<Short> rdd, String frameName){
return (Key<Frame>) hc.toH2OFrameKey(SupportedRDD$.MODULE$.toH2OFrameFromRDDJavaShort(rdd.rdd()), Option.apply(frameName));
}
/** Returns key of the H2O's DataFrame conversed from RDD[Float]*/
@SuppressWarnings("unchecked")
public Key<Frame> asH2OFrameFromRDDFloatKey(JavaRDD<Float> rdd, String frameName){
return (Key<Frame>) hc.toH2OFrameKey(SupportedRDD$.MODULE$.toH2OFrameFromRDDJavaFloat(rdd.rdd()), Option.apply(frameName));
}
/** Returns key of the H2O's DataFrame conversed from RDD[Double]*/
@SuppressWarnings("unchecked")
public Key<Frame> asH2OFrameFromRDDDoubleKey(JavaRDD<Double> rdd, String frameName) {
return (Key<Frame>) hc.toH2OFrameKey(SupportedRDD$.MODULE$.toH2OFrameFromRDDJavaDouble(rdd.rdd()), Option.apply(frameName));
}
/** Returns key of the H2O's DataFrame conversed from RDD[Long]*/
@SuppressWarnings("unchecked")
public Key<Frame> asH2OFrameFromRDDLongKey(JavaRDD<Long> rdd, String frameName){
return (Key<Frame>) hc.toH2OFrameKey(SupportedRDD$.MODULE$.toH2OFrameFromRDDJavaLong(rdd.rdd()), Option.apply(frameName));
}
/** Returns key of the H2O's DataFrame conversed from RDD[LabeledPoint]*/
@SuppressWarnings("unchecked")
public Key<Frame> asH2OFrameFromRDDLabeledPointKey(JavaRDD<LabeledPoint> rdd, String frameName){
return (Key<Frame>) hc.toH2OFrameKey(SupportedRDD$.MODULE$.toH2OFrameFromRDDLabeledPoint(rdd.rdd()), Option.apply(frameName));
}
/** Returns key of the H2O's DataFrame conversed from RDD[java.sql.Timestamp]*/
@SuppressWarnings("unchecked")
public Key<Frame> asH2OFrameFromRDDTimeStampKey(JavaRDD<java.sql.Timestamp> rdd, String frameName){
return (Key<Frame>) hc.toH2OFrameKey(SupportedRDD$.MODULE$.toH2OFrameFromRDDTimeStamp(rdd.rdd()), Option.apply(frameName));
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/org/apache/spark
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/org/apache/spark/h2o/RDDDoubleConversionFunc.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.h2o;
import org.apache.spark.api.java.function.Function;
/**
* Function converting all elements in rdd into double values
*/
public class RDDDoubleConversionFunc implements Function<Number, Double>{
@Override
public Double call(Number n) throws Exception {
return n.doubleValue();
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/org/apache/spark
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/org/apache/spark/h2o/RDDLongConversionFunc.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.h2o;
import org.apache.spark.api.java.function.Function;
public class RDDLongConversionFunc implements Function<Number, Long> {
@Override
public Long call(Number n) throws Exception {
return n.longValue();
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/water/api
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/water/api/DataFrames/DataFrameV3.java
|
package water.api.DataFrames;
import water.api.API;
import water.api.Schema;
/**
* Schema representing /3/dataframes/<dataframe_id> endpoint
*/
public class DataFrameV3 extends Schema<IcedDataFrameInfo, DataFrameV3> {
@API(help = "Data frame ID", direction = API.Direction.INOUT)
public String dataframe_id;
@API(help = "Number of partitions", direction = API.Direction.OUTPUT)
public int partitions;
@API(help = "Schema of this DataFrame.", direction = API.Direction.OUTPUT)
public String schema;
}
|
0
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/water/api
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/water/api/DataFrames/H2OFrameIDV3.java
|
package water.api.DataFrames;
import water.api.API;
import water.api.Schema;
/**
* Schema representing /3/dataframe/<dataframe_id>/h2oframe endpoint
*/
public class H2OFrameIDV3 extends Schema<IcedH2OFrameID, H2OFrameIDV3> {
@API(help = "ID of Spark's DataFrame to be transformed", direction = API.Direction.INPUT)
public String dataframe_id;
@API(help = "ID of generated transformed H2OFrame", direction = API.Direction.INOUT)
public String h2oframe_id;
}
|
0
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/water/api
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/water/api/H2OFrames/DataFrameIDV3.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package water.api.H2OFrames;
import water.api.API;
import water.api.Schema;
/**
* Schema representing /3/h2oframes/<h2oframe_id>/dataframe
*/
public class DataFrameIDV3 extends Schema<IcedDataFrameID, DataFrameIDV3> {
@API(help = "ID of H2OFrame to be transformed", direction = API.Direction.INPUT)
public String h2oframe_id;
@API(help = "ID of generated Spark's DataFrame", direction = API.Direction.INOUT)
public String dataframe_id;
}
|
0
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/water/api
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/water/api/RDDs/RDD2H2OFrameIDV3.java
|
package water.api.RDDs;
import water.api.API;
import water.api.DataFrames.IcedH2OFrameID;
import water.api.Schema;
/**
* Schema representing /3/RDDs/<rdd_id>/h2oframe endpoint
*/
public class RDD2H2OFrameIDV3 extends Schema<IcedRDD2H2OFrameID, RDD2H2OFrameIDV3> {
@API(help = "Id of RDD to be transformed", direction = API.Direction.INPUT)
public int rdd_id;
@API(help = "Id of transformed H2OFrame", direction = API.Direction.INOUT)
public String h2oframe_id;
}
|
0
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/water/api
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/water/api/RDDs/RDDV3.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package water.api.RDDs;
import water.api.API;
import water.api.Schema;
/**
* Schema representing [POST] /3/RDDs/<rdd_id> endpoint
*/
public class RDDV3 extends Schema<IcedRDDInfo, RDDV3> {
@API(help = "RDD ID", direction = API.Direction.INOUT)
public int rdd_id;
@API(help = "RDD Name", direction = API.Direction.OUTPUT)
public String name;
@API(help = "Number of partitions", direction = API.Direction.OUTPUT)
public int partitions;
}
|
0
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/water/api
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/water/api/scalaInt/ScalaCodeV3.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package water.api.scalaInt;
import water.api.API;
import water.api.Schema;
/**
* Schema representing [POST] /3/scalaint/<session_id> endpoint.
*/
public class ScalaCodeV3 extends Schema<IcedCode, ScalaCodeV3> {
@API(help = "Session id identifying the correct interpreter", direction = API.Direction.INPUT)
public int session_id;
@API(help = "Scala code to interpret", direction = API.Direction.INPUT)
public String code;
@API(help = "Status of the code execution", direction = API.Direction.OUTPUT)
public String status;
@API(help = "Response of the interpreter", direction = API.Direction.OUTPUT)
public String response;
@API(help = "Redirected console output, for example output of println is stored to this field",
direction = API.Direction.OUTPUT)
public String output;
}
|
0
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/water/api
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/water/api/scalaInt/ScalaSessionIdV3.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package water.api.scalaInt;
import water.api.API;
import water.api.Schema;
/**
* Schema representing [POST] and [DELETE] /3/scalaint endpoint
*/
public class ScalaSessionIdV3 extends Schema<IcedSessionId, ScalaSessionIdV3> {
@API(help = "Session id identifying the correct interpreter", direction = API.Direction.INOUT)
public int session_id;
}
|
0
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/water
|
java-sources/ai/h2o/sparkling-water-core_2.10/1.6.13/water/munging/JoinMethod.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package water.munging;
// FIXME: we need to put this into H2O !!!
public enum JoinMethod {
AUTO("auto"),
RADIX("radix"),
HASH("hash");
public final String name;
JoinMethod(String name) {
this.name = name;
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-core_2.11/3.46.0.6-1-2.4/ai/h2o
|
java-sources/ai/h2o/sparkling-water-core_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/H2OCredentials.java
|
package ai.h2o.sparkling;
import java.io.Serializable;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import org.apache.commons.codec.binary.Base64;
/** A serializable versions of water.webserver.iface.Credentials */
public class H2OCredentials implements Serializable {
private final String _user;
private final String _password;
public H2OCredentials() {
_user = null;
_password = null;
}
public H2OCredentials(String user, String password) {
this._user = user;
this._password = password;
}
public String toBasicAuth() {
return "Basic " + base64EncodeToString(_user + ":" + _password);
}
public String toHashFileEntry() {
return _user + ": " + credentialMD5digest(_password) + "\n";
}
public String toDebugString() {
return "Credentials[_user='" + _user + "', _password='" + _password + "']";
}
/** This replaces Jetty's B64Code.encode(). */
private static String base64EncodeToString(String s) {
final byte[] bytes = s.getBytes(StandardCharsets.ISO_8859_1);
return Base64.encodeBase64String(bytes);
}
// following part is copied out of Jetty's class org.eclipse.jetty.util.security.Credential$MD5,
// because we cannot depend on the library
private static final String __TYPE = "MD5:";
private static final Object __md5Lock = new Object();
private static MessageDigest __md;
/** This replaces Jetty's Credential.MD5.digest(). */
private static String credentialMD5digest(String password) {
try {
byte[] digest;
synchronized (__md5Lock) {
if (__md == null) {
try {
__md = MessageDigest.getInstance("MD5");
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
__md.reset();
__md.update(password.getBytes(StandardCharsets.ISO_8859_1));
digest = __md.digest();
}
return __TYPE + toString(digest, 16);
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
private static String toString(byte[] bytes, int base) {
StringBuilder buf = new StringBuilder();
for (byte b : bytes) {
int bi = 0xff & b;
int c = '0' + (bi / base) % base;
if (c > '9') c = 'a' + (c - '0' - 10);
buf.append((char) c);
c = '0' + bi % base;
if (c > '9') c = 'a' + (c - '0' - 10);
buf.append((char) c);
}
return buf.toString();
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions/internals/CategoricalConstants.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.extensions.internals;
import water.parser.Categorical;
public final class CategoricalConstants {
public static final String TESTING_MAXIMUM_CATEGORICAL_LEVELS_PROPERTY_NAME =
"testing.maximumCategoricalLevels";
public static int getMaximumCategoricalLevels() {
String testingThreshold = System.getProperty(TESTING_MAXIMUM_CATEGORICAL_LEVELS_PROPERTY_NAME);
if (testingThreshold == null) {
return Categorical.MAX_CATEGORICAL_COUNT;
} else {
return new Integer(testingThreshold).intValue();
}
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions/internals/CollectCategoricalDomainsTask.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.extensions.internals;
import java.util.Arrays;
import jsr166y.ForkJoinTask;
import water.H2O;
import water.Key;
import water.MRTask;
import water.parser.BufferedString;
import water.parser.PackedDomains;
import water.util.Log;
public class CollectCategoricalDomainsTask extends MRTask<CollectCategoricalDomainsTask> {
private final Key frameKey;
private byte[][] packedDomains;
private int maximumCategoricalLevels;
public CollectCategoricalDomainsTask(Key frameKey) {
this.frameKey = frameKey;
this.maximumCategoricalLevels = CategoricalConstants.getMaximumCategoricalLevels();
}
@Override
public void setupLocal() {
if (!LocalNodeDomains.containsDomains(frameKey)) return;
final String[][][] localDomains = LocalNodeDomains.getDomains(frameKey);
if (localDomains.length == 0) return;
packedDomains = chunkDomainsToPackedDomains(localDomains[0]);
for (int i = 1; i < localDomains.length; i++) {
byte[][] anotherPackedDomains = chunkDomainsToPackedDomains(localDomains[i]);
mergePackedDomains(packedDomains, anotherPackedDomains);
}
Log.trace("Done locally collecting domains on each node.");
}
private byte[][] chunkDomainsToPackedDomains(String[][] domains) {
byte[][] result = new byte[domains.length][];
for (int i = 0; i < domains.length; i++) {
String[] columnDomain = domains[i];
if (columnDomain.length > this.maximumCategoricalLevels) {
result[i] = null;
} else {
BufferedString[] values = BufferedString.toBufferedString(columnDomain);
Arrays.sort(values);
result[i] = PackedDomains.pack(values);
}
}
return result;
}
private void mergePackedDomains(byte[][] target, byte[][] source) {
for (int i = 0; i < target.length; i++) {
if (target[i] == null || source[i] == null) {
target[i] = null;
} else if (target[i].length + source[i].length > this.maximumCategoricalLevels) {
target[i] = null;
} else {
target[i] = PackedDomains.merge(target[i], source[i]);
}
}
}
@Override
public void reduce(final CollectCategoricalDomainsTask other) {
if (packedDomains == null) {
packedDomains = other.packedDomains;
} else if (other.packedDomains != null) { // merge two packed domains
H2O.H2OCountedCompleter[] tasks = new H2O.H2OCountedCompleter[packedDomains.length];
for (int i = 0; i < packedDomains.length; i++) {
final int fi = i;
tasks[i] =
new H2O.H2OCountedCompleter(currThrPriority()) {
@Override
public void compute2() {
if (packedDomains[fi] == null || other.packedDomains[fi] == null) {
packedDomains[fi] = null;
} else if (PackedDomains.sizeOf(packedDomains[fi])
+ PackedDomains.sizeOf(other.packedDomains[fi])
> maximumCategoricalLevels) {
packedDomains[fi] = null;
} else {
packedDomains[fi] =
PackedDomains.merge(packedDomains[fi], other.packedDomains[fi]);
}
tryComplete();
}
};
}
ForkJoinTask.invokeAll(tasks);
}
Log.trace("Done merging domains.");
}
public String[][] getDomains() {
if (packedDomains == null) return null;
String[][] result = new String[packedDomains.length][];
for (int i = 0; i < packedDomains.length; i++) {
if (packedDomains[i] == null) {
result[i] = null;
} else {
result[i] = PackedDomains.unpackToStrings(packedDomains[i]);
}
}
return result;
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions/internals/ConvertCategoricalToStringColumnsTask.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.extensions.internals;
import water.H2O;
import water.Key;
import water.MRTask;
import water.exceptions.H2OIllegalArgumentException;
import water.fvec.Chunk;
import water.fvec.NewChunk;
public class ConvertCategoricalToStringColumnsTask
extends MRTask<ConvertCategoricalToStringColumnsTask> {
private final Key frameKey;
private final int[] domainIndices;
public ConvertCategoricalToStringColumnsTask(Key frameKey, int[] domainIndices) {
this.frameKey = frameKey;
this.domainIndices = domainIndices;
}
@Override
public void map(Chunk[] chunks, NewChunk[] newChunks) {
int chunkId = chunks[0].cidx();
if (!LocalNodeDomains.containsDomains(frameKey, chunkId)) {
throw new H2OIllegalArgumentException(
String.format(
"No local domain found for the chunk '%d' on the node '%s'.",
chunkId, H2O.SELF.getIpPortString()));
}
String[][] localDomains = LocalNodeDomains.getDomains(frameKey, chunkId);
for (int colIdx = 0; colIdx < chunks.length; colIdx++) {
int domainIdx = domainIndices[colIdx];
Chunk chunk = chunks[colIdx];
NewChunk newChunk = newChunks[colIdx];
String[] localDomain = localDomains[domainIdx];
for (int valIdx = 0; valIdx < chunk._len; ++valIdx) {
if (chunk.isNA(valIdx)) {
newChunk.addNA();
} else {
final int oldValue = (int) chunk.at8(valIdx);
final String category = localDomain[oldValue];
newChunk.addStr(category);
}
}
}
}
@Override
public void closeLocal() {
super.closeLocal();
LocalNodeDomains.remove(frameKey, domainIndices);
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions/internals/LocalNodeDomains.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.extensions.internals;
import java.util.ArrayList;
import water.Key;
import water.nbhm.NonBlockingHashMap;
public final class LocalNodeDomains {
private static NonBlockingHashMap<Key, ArrayList<String[][]>> domainsMap =
new NonBlockingHashMap<>();
private static NonBlockingHashMap<String, String[][]> domainsMapByChunk =
new NonBlockingHashMap<>();
private static NonBlockingHashMap<Key, ArrayList<String>> frameKeyToChunkKeys =
new NonBlockingHashMap<>();
public static synchronized void addDomains(Key frameKey, int chunkId, String[][] domains) {
ArrayList<String[][]> nodeDomains = domainsMap.get(frameKey);
if (nodeDomains == null) {
nodeDomains = new ArrayList<>();
domainsMap.put(frameKey, nodeDomains);
}
nodeDomains.add(domains);
ArrayList<String> chunkKeys = frameKeyToChunkKeys.get(frameKey);
if (chunkKeys == null) {
chunkKeys = new ArrayList<>();
frameKeyToChunkKeys.put(frameKey, chunkKeys);
}
String chunkKey = createChunkKey(frameKey, chunkId);
chunkKeys.add(chunkKey);
domainsMapByChunk.putIfAbsent(chunkKey, domains);
}
public static synchronized boolean containsDomains(Key frameKey) {
return domainsMap.containsKey(frameKey);
}
public static synchronized boolean containsDomains(Key frameKey, int chunkId) {
String chunkKey = createChunkKey(frameKey, chunkId);
return domainsMapByChunk.containsKey(chunkKey);
}
/**
* The method returns domains for all chunks on the H2O node. The first array level identifies
* chunks, the second columns, the third column values.
*
* @param frameKey A key of a frame containing categorical domains.
* @return Domains for all chunks on the H2O node.
*/
public static synchronized String[][][] getDomains(Key frameKey) {
return domainsMap.get(frameKey).toArray(new String[0][][]);
}
public static synchronized String[][] getDomains(Key frameKey, int chunkId) {
String chunkKey = createChunkKey(frameKey, chunkId);
return domainsMapByChunk.get(chunkKey);
}
public static synchronized void remove(Key frameKey) {
if (domainsMap.remove(frameKey) != null) {
ArrayList<String> chunkKeys = frameKeyToChunkKeys.remove(frameKey);
for (String chunkKey : chunkKeys) {
domainsMapByChunk.remove(chunkKey);
}
frameKeyToChunkKeys.remove(frameKey);
}
}
/**
* Removes local domains associated with the column indices
*
* @param frameKey a frame key
* @param domainIndices a sorted list of domain indices
*/
public static synchronized void remove(Key frameKey, int[] domainIndices) {
if (domainIndices.length > 0) {
ArrayList<String> chunkKeys = frameKeyToChunkKeys.get(frameKey);
if (chunkKeys != null) {
ArrayList<String[][]> newFrameDomains = new ArrayList<>();
for (int i = 0; i < chunkKeys.size(); i++) {
String chunkKey = chunkKeys.get(i);
String[][] oldDomains = domainsMapByChunk.get(chunkKey);
String[][] newDomains = removeDomains(oldDomains, domainIndices);
domainsMapByChunk.replace(chunkKey, newDomains);
newFrameDomains.add(newDomains);
}
domainsMap.replace(frameKey, newFrameDomains);
}
}
}
private static String[][] removeDomains(String[][] originalDomains, int[] domainsToRemove) {
int newSize = originalDomains.length - domainsToRemove.length;
String[][] result = new String[newSize][];
int removedDomainIndex = 0;
for (int originalIndex = 0; originalIndex < originalDomains.length; originalIndex++) {
if (removedDomainIndex >= domainsToRemove.length
|| originalIndex != domainsToRemove[removedDomainIndex]) {
result[originalIndex - removedDomainIndex] = originalDomains[originalIndex];
} else {
removedDomainIndex++;
}
}
return result;
}
private static String createChunkKey(Key frameKey, int chunkId) {
return frameKey.toString() + "_" + chunkId;
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions/internals/UpdateCategoricalIndicesTask.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.extensions.internals;
import water.*;
import water.exceptions.H2OIllegalArgumentException;
import water.fvec.CStrChunk;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.parser.BufferedString;
import water.util.IcedHashMap;
public class UpdateCategoricalIndicesTask extends MRTask<UpdateCategoricalIndicesTask> {
private final Key frameKey;
private final int[] categoricalColumns;
public UpdateCategoricalIndicesTask(Key frameKey, int[] categoricalColumns) {
this.frameKey = frameKey;
this.categoricalColumns = categoricalColumns;
}
private static IcedHashMap<BufferedString, Integer> domainToCategoricalMap(String[] domain) {
IcedHashMap<BufferedString, Integer> categoricalMap = new IcedHashMap<>();
for (int j = 0; j < domain.length; j++) {
categoricalMap.put(new BufferedString(domain[j]), j);
}
return categoricalMap;
}
@Override
public void map(Chunk[] chunks) {
Frame frame = DKV.getGet(frameKey);
int chunkId = chunks[0].cidx();
if (!LocalNodeDomains.containsDomains(frameKey, chunkId)) {
throw new H2OIllegalArgumentException(
String.format(
"No local domain found for the chunk '%d' on the node '%s'.",
chunkId, H2O.SELF.getIpPortString()));
}
String[][] localDomains = LocalNodeDomains.getDomains(frameKey, chunkId);
for (int catColIdx = 0; catColIdx < categoricalColumns.length; catColIdx++) {
int colId = categoricalColumns[catColIdx];
Chunk chunk = chunks[colId];
String[] localDomain = localDomains[catColIdx];
IcedHashMap<BufferedString, Integer> categoricalMap =
domainToCategoricalMap(frame.vec(colId).domain());
if (chunk instanceof CStrChunk) continue;
for (int valIdx = 0; valIdx < chunk._len; ++valIdx) {
if (chunk.isNA(valIdx)) continue;
final int oldValue = (int) chunk.at8(valIdx);
final BufferedString category = new BufferedString(localDomain[oldValue]);
final int newValue = categoricalMap.get(category);
chunk.set(valIdx, newValue);
}
chunk.close(chunkId, _fs);
}
}
@Override
public void closeLocal() {
super.closeLocal();
LocalNodeDomains.remove(frameKey);
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions/rest/api
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions/rest/api/schema/FinalizeFrameV3.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.extensions.rest.api.schema;
import water.Iced;
import water.api.API;
import water.api.schemas3.RequestSchemaV3;
public class FinalizeFrameV3 extends RequestSchemaV3<Iced, FinalizeFrameV3> {
@API(help = "Frame name", direction = API.Direction.INPUT)
public String key = null;
@API(
help =
"Number of rows represented by individual chunks. The type is long[] encoded with base64 encoding.",
direction = API.Direction.INPUT)
public String rows_per_chunk = null;
@API(
help = "H2O types of individual columns. The type is byte[] encoded with base64 encoding.",
direction = API.Direction.INPUT)
public String column_types = null;
}
|
0
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions/rest/api
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions/rest/api/schema/InitializeFrameV3.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.extensions.rest.api.schema;
import water.Iced;
import water.api.API;
import water.api.schemas3.RequestSchemaV3;
public class InitializeFrameV3 extends RequestSchemaV3<Iced, InitializeFrameV3> {
@API(help = "Frame Name", direction = API.Direction.INPUT)
public String key = null;
@API(help = "Column Names", direction = API.Direction.INPUT)
public String[] columns = null;
}
|
0
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions/rest/api
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions/rest/api/schema/LogLevelV3.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.extensions.rest.api.schema;
import water.Iced;
import water.api.API;
import water.api.schemas3.RequestSchemaV3;
public class LogLevelV3 extends RequestSchemaV3<Iced, LogLevelV3> {
@API(help = "Log Level", direction = API.Direction.INOUT)
public String log_level = "INFO";
}
|
0
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions/rest/api
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions/rest/api/schema/SWAvailableV3.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.extensions.rest.api.schema;
import water.Iced;
import water.api.schemas3.RequestSchemaV3;
public class SWAvailableV3 extends RequestSchemaV3<Iced, SWAvailableV3> {}
|
0
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions/rest/api
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions/rest/api/schema/UploadPlanV3.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.extensions.rest.api.schema;
import water.H2ONode;
import water.Iced;
import water.api.API;
import water.api.schemas3.RequestSchemaV3;
import water.api.schemas3.SchemaV3;
public class UploadPlanV3 extends RequestSchemaV3<Iced, UploadPlanV3> {
@API(help = "Required number of chunks", direction = API.Direction.INPUT)
public int number_of_chunks = -1;
@API(help = "Column Names", direction = API.Direction.OUTPUT)
public ChunkAssigmentV3[] layout = null;
public static class ChunkAssigmentV3 extends SchemaV3<Iced, UploadPlanV3.ChunkAssigmentV3> {
@API(help = "An identifier unique in scope of a given frame", direction = API.Direction.OUTPUT)
public int chunk_id;
@API(
help = "Index of H2O node where the chunk should be uploaded to",
direction = API.Direction.OUTPUT)
public int node_idx;
@API(
help = "IP address of H2O node where the chunk should be uploaded to",
direction = API.Direction.OUTPUT)
public String ip;
@API(
help = "Port of H2O node where the chunk should be uploaded to",
direction = API.Direction.OUTPUT)
public int port;
public ChunkAssigmentV3() {}
public ChunkAssigmentV3(int id, H2ONode node) {
this.chunk_id = id;
this.node_idx = node.index();
String[] ipPortArray = node.getIpPortString().split(":");
this.ip = ipPortArray[0];
this.port = new Integer(ipPortArray[1]);
}
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions/rest/api
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions/rest/api/schema/VerifyVersionV3.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.extensions.rest.api.schema;
import water.Iced;
import water.api.API;
import water.api.schemas3.RequestSchemaV3;
public class VerifyVersionV3 extends RequestSchemaV3<Iced, VerifyVersionV3> {
@API(help = "Reference version to validate against", direction = API.Direction.INPUT)
public String referenced_version;
@API(help = "Nodes with wrong versions", direction = API.Direction.OUTPUT)
public NodeWithVersionV3[] nodes_wrong_version;
public static class NodeWithVersionV3 extends RequestSchemaV3<Iced, NodeWithVersionV3> {
@API(help = "Node address", direction = API.Direction.OUTPUT)
public String ip_port;
@API(help = "Node version", direction = API.Direction.OUTPUT)
public String version;
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions/rest/api
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/extensions/rest/api/schema/VerifyWebOpenV3.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.extensions.rest.api.schema;
import water.Iced;
import water.api.API;
import water.api.schemas3.RequestSchemaV3;
public class VerifyWebOpenV3 extends RequestSchemaV3<Iced, VerifyWebOpenV3> {
@API(help = "Nodes with disabled web", direction = API.Direction.OUTPUT)
public String[] nodes_web_disabled;
}
|
0
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/water
|
java-sources/ai/h2o/sparkling-water-extensions_2.11/3.46.0.6-1-2.4/water/parser/CategoricalPreviewParseWriter.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package water.parser;
public class CategoricalPreviewParseWriter {
public static byte guessType(String[] domain, int nLines, int nEmpty) {
final int nStrings = nLines - nEmpty;
final int nNums = 0;
final int nDates = 0;
final int nUUID = 0;
final int nZeros = 0;
PreviewParseWriter.IDomain domainWrapper =
new PreviewParseWriter.IDomain() {
public int size() {
return domain.length;
}
public boolean contains(String value) {
for (String domainValue : domain) {
if (value.equals(domainValue)) return true;
}
return false;
}
};
return PreviewParseWriter.guessType(
nLines, nNums, nStrings, nDates, nUUID, nZeros, nEmpty, domainWrapper);
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-ml_2.10/1.6.13/hex
|
java-sources/ai/h2o/sparkling-water-ml_2.10/1.6.13/hex/schemas/SVMV3.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hex.schemas;
import org.apache.spark.ml.spark.models.MissingValuesHandling;
import org.apache.spark.ml.spark.models.svm.*;
import water.DKV;
import water.Key;
import water.Value;
import water.api.API;
import water.api.schemas3.KeyV3;
import water.api.schemas3.ModelParametersSchemaV3;
import water.fvec.Frame;
// Seems like this has to be in Java since H2O's frameworks uses reflection's getFields...
// I probably could mix Java and Scala here, leave SVMParametersV3 with fields as Java
// and then make the same Scala class SVMParametersV3 which extends it but not sure if it's worth it...
public class SVMV3 extends ModelBuilderSchema<SVM, SVMV3, SVMV3.SVMParametersV3> {
public static final class SVMParametersV3 extends
ModelParametersSchemaV3<SVMParameters, SVMParametersV3> {
public static String[] fields = new String[]{
"model_id",
"training_frame",
"response_column",
"initial_weights_frame",
"validation_frame",
"nfolds",
"add_intercept",
"step_size",
"reg_param",
"convergence_tol",
"mini_batch_fraction",
"threshold",
"updater",
"gradient",
"ignored_columns",
"ignore_const_cols",
"missing_values_handling"
};
@API(help="Initial model weights.", direction=API.Direction.INOUT, gridable = true)
public KeyV3.FrameKeyV3 initial_weights_frame;
@API(help="Add intercept.", direction=API.Direction.INOUT, gridable = true, level = API.Level.expert)
public boolean add_intercept = false;
@API(help="Set step size", direction=API.Direction.INPUT, gridable = true, level = API.Level.expert)
public double step_size = 1.0;
@API(help="Set regularization parameter", direction=API.Direction.INPUT, gridable = true, level = API.Level.expert)
public double reg_param = 0.01;
@API(help="Set convergence tolerance", direction=API.Direction.INPUT, gridable = true, level = API.Level.expert)
public double convergence_tol = 0.001;
@API(help="Set mini batch fraction", direction=API.Direction.INPUT, gridable = true, level = API.Level.expert)
public double mini_batch_fraction = 1.0;
// TODO what exactly does INOUT do?? Should this be only INPUT?
@API(help="Set threshold that separates positive predictions from negative ones. NaN for raw prediction.", direction=API.Direction.INOUT, gridable = true, level = API.Level.expert)
public double threshold = 0.0;
@API(help="Set the updater for SGD.", direction=API.Direction.INPUT, values = {"L2", "L1", "Simple"}, required = true, gridable = true, level = API.Level.expert)
public Updater updater = Updater.L2;
@API(help="Set the gradient computation type for SGD.", direction=API.Direction.INPUT, values = {"Hinge", "LeastSquares", "Logistic"}, required = true, gridable = true, level = API.Level.expert)
public Gradient gradient = Gradient.Hinge;
@API(level = API.Level.expert, direction = API.Direction.INOUT, gridable = true,
values = {"NotAllowed", "Skip", "MeanImputation"},
help = "Handling of missing values. Either NotAllowed, Skip or MeanImputation.")
public MissingValuesHandling missing_values_handling;
@Override
public SVMParametersV3 fillFromImpl(SVMParameters impl) {
super.fillFromImpl(impl);
if (null != impl._initial_weights) {
Value v = DKV.get(impl._initial_weights);
if (null != v) {
initial_weights_frame = new KeyV3.FrameKeyV3(((Frame) v.get())._key);
}
}
return this;
}
@Override
public SVMParameters fillImpl(SVMParameters impl) {
super.fillImpl(impl);
impl._initial_weights =
null == this.initial_weights_frame ? null : Key.<Frame>make(this.initial_weights_frame.name);
return impl;
}
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-ml_2.10/1.6.13/org/apache/spark/ml/spark
|
java-sources/ai/h2o/sparkling-water-ml_2.10/1.6.13/org/apache/spark/ml/spark/models/MissingValuesHandling.java
|
package org.apache.spark.ml.spark.models;
public enum MissingValuesHandling {
NotAllowed, Skip, MeanImputation
}
|
0
|
java-sources/ai/h2o/sparkling-water-ml_2.10/1.6.13/org/apache/spark/ml/spark/models
|
java-sources/ai/h2o/sparkling-water-ml_2.10/1.6.13/org/apache/spark/ml/spark/models/svm/Gradient.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.spark.models.svm;
import org.apache.spark.mllib.optimization.HingeGradient;
import org.apache.spark.mllib.optimization.LeastSquaresGradient;
import org.apache.spark.mllib.optimization.LogisticGradient;
public enum Gradient {
Hinge(new HingeGradient()),
LeastSquares(new LeastSquaresGradient()),
Logistic(new LogisticGradient());
private org.apache.spark.mllib.optimization.Gradient sparkGradient;
Gradient(org.apache.spark.mllib.optimization.Gradient sparkGradient) {
this.sparkGradient = sparkGradient;
}
public org.apache.spark.mllib.optimization.Gradient get() {
return sparkGradient;
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-ml_2.10/1.6.13/org/apache/spark/ml/spark/models
|
java-sources/ai/h2o/sparkling-water-ml_2.10/1.6.13/org/apache/spark/ml/spark/models/svm/SVM.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.spark.models.svm;
import hex.*;
import org.apache.spark.SparkContext;
import org.apache.spark.h2o.H2OContext;
import org.apache.spark.ml.spark.ProgressListener;
import org.apache.spark.ml.FrameMLUtils;
import org.apache.spark.ml.spark.models.MissingValuesHandling;
import org.apache.spark.ml.spark.models.svm.SVMModel.SVMOutput;
import org.apache.spark.mllib.classification.SVMWithSGD;
import org.apache.spark.mllib.linalg.Vector;
import org.apache.spark.mllib.linalg.Vectors;
import org.apache.spark.mllib.regression.LabeledPoint;
import org.apache.spark.rdd.RDD;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.storage.RDDInfo;
import scala.Tuple2;
import water.DKV;
import water.exceptions.H2OIllegalArgumentException;
import water.fvec.Frame;
import water.fvec.Vec;
import water.util.Log;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import static scala.collection.JavaConversions.*;
public class SVM extends ModelBuilder<SVMModel, SVMParameters, SVMOutput> {
transient private final H2OContext hc;
public SVM(boolean startup_once, H2OContext hc) {
super(new SVMParameters(), startup_once);
this.hc = hc;
}
public SVM(SVMParameters parms, H2OContext hc) {
super(parms);
init(false);
this.hc = hc;
}
@Override
protected Driver trainModelImpl() {
return new SVMDriver();
}
@Override
public ModelCategory[] can_build() {
return new ModelCategory[]{
ModelCategory.Binomial,
ModelCategory.Regression
};
}
@Override
public boolean isSupervised() {
return true;
}
@Override
public void init(boolean expensive) {
super.init(expensive);
_parms.validate(this);
if (_train == null) return;
if (null != _parms._initial_weights) {
Frame user_points = _parms._initial_weights.get();
if (user_points.numCols() != _train.numCols() - numSpecialCols()) {
error("_initial_weights",
"The user-specified initial weights must have the same number of columns " +
"(" + (_train.numCols() - numSpecialCols()) + ") as the training observations");
}
if (user_points.hasNAs()) {
error("_initial_weights", "Initial weights cannot contain missing values.");
}
}
if(MissingValuesHandling.NotAllowed == _parms._missing_values_handling) {
for (int i = 0; i < _train.numCols(); i++) {
Vec vec = _train.vec(i);
String vecName = _train.name(i);
if (vec.naCnt() > 0 && (null == _parms._ignored_columns || Arrays.binarySearch(_parms._ignored_columns, vecName) < 0)) {
error("_train", "Training frame cannot contain any missing values [" + vecName + "].");
}
}
}
Set<String> ignoredCols = null != _parms._ignored_columns ?
new HashSet<String>(Arrays.asList(_parms._ignored_columns)) :
new HashSet<String>();
for (int i = 0; i < _train.vecs().length; i++) {
Vec vec = _train.vec(i);
if (!ignoredCols.contains(_train.name(i)) && !(vec.isNumeric() || vec.isCategorical())) {
error("_train", "SVM supports only frames with numeric/categorical values (except for result column). But a " + vec.get_type_str() + " was found.");
}
}
if (null != _parms._response_column && null == _train.vec(_parms._response_column)) {
error("_train", "Training frame has to contain the response column.");
}
if (_train != null && _parms._response_column != null) {
String[] responseDomains = responseDomains();
if (null == responseDomains) {
if (!(Double.isNaN(_parms._threshold))) {
error("_threshold", "Threshold cannot be set for regression SVM. Set the threshold to NaN or modify the response column to an enum.");
}
if (!_train.vec(_parms._response_column).isNumeric()) {
error("_response_column", "Regression SVM requires the response column type to be numeric.");
}
} else {
if (Double.isNaN(_parms._threshold)) {
error("_threshold", "Threshold has to be set for binomial SVM. Set the threshold to a numeric value or change the response column type.");
}
if (responseDomains.length != 2) {
error("_response_column", "SVM requires the response column's domain to be of size 2.");
}
}
}
}
private String[] responseDomains() {
int idx = _parms.train().find(_parms._response_column);
if (idx == -1) {
return null;
}
return _parms.train().domains()[idx];
}
@Override
public int numSpecialCols() {
return (hasOffsetCol() ? 1 : 0) +
(hasWeightCol() ? 1 : 0) +
(hasFoldCol() ? 1 : 0) + 1;
}
private final class SVMDriver extends Driver {
transient private SparkContext sc = hc.sparkContext();
transient private H2OContext h2oContext = hc;
transient private SQLContext sqlContext = SQLContext.getOrCreate(sc);
@Override
public void computeImpl() {
init(true);
// The model to be built
SVMModel model = new SVMModel(dest(), _parms, new SVMModel.SVMOutput(SVM.this));
try {
model.delete_and_lock(_job);
Tuple2<RDD<LabeledPoint>, double[]> points = FrameMLUtils.toLabeledPoints(
_train,
_parms._response_column,
model._output.nfeatures(),
_parms._missing_values_handling,
h2oContext,
sqlContext
);
RDD<LabeledPoint> training = points._1();
training.cache();
if(training.count() == 0 &&
MissingValuesHandling.Skip == _parms._missing_values_handling) {
throw new H2OIllegalArgumentException("No rows left in the dataset after filtering out rows with missing values. Ignore columns with many NAs or set missing_values_handling to 'MeanImputation'.");
}
SVMWithSGD svm = new SVMWithSGD();
svm.setIntercept(_parms._add_intercept);
svm.optimizer().setNumIterations(_parms._max_iterations);
svm.optimizer().setStepSize(_parms._step_size);
svm.optimizer().setRegParam(_parms._reg_param);
svm.optimizer().setMiniBatchFraction(_parms._mini_batch_fraction);
svm.optimizer().setConvergenceTol(_parms._convergence_tol);
svm.optimizer().setGradient(_parms._gradient.get());
svm.optimizer().setUpdater(_parms._updater.get());
ProgressListener progressBar = new ProgressListener(sc,
_job,
RDDInfo.fromRdd(training),
iterableAsScalaIterable(Arrays.asList("treeAggregate")));
sc.addSparkListener(progressBar);
final org.apache.spark.mllib.classification.SVMModel trainedModel =
(null == _parms._initial_weights) ?
svm.run(training) :
svm.run(training, vec2vec(_parms.initialWeights().vecs()));
training.unpersist(false);
sc.listenerBus().listeners().remove(progressBar);
model._output.weights_$eq(trainedModel.weights().toArray());
model._output.iterations_$eq(_parms._max_iterations);
model._output.interceptor_$eq(trainedModel.intercept());
model._output.numMeans_$eq(points._2());
Frame train = DKV.<Frame>getGet(_parms._train);
model.score(train).delete();
model._output._training_metrics = ModelMetrics.getFromDKV(model, train);
model.update(_job);
if (_valid != null) {
model.score(_parms.valid()).delete();
model._output._validation_metrics =
ModelMetrics.getFromDKV(model, _parms.valid());
model.update(_job);
}
model._output.interceptor_$eq(trainedModel.intercept());
Log.info(model._output._model_summary);
} finally {
model.unlock(_job);
}
}
private Vector vec2vec(Vec[] vals) {
double[] dense = new double[vals.length];
for (int i = 0; i < vals.length; i++) {
dense[i] = vals[i].at(0);
}
return Vectors.dense(dense);
}
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-ml_2.10/1.6.13/org/apache/spark/ml/spark/models
|
java-sources/ai/h2o/sparkling-water-ml_2.10/1.6.13/org/apache/spark/ml/spark/models/svm/SVMParameters.java
|
package org.apache.spark.ml.spark.models.svm;
import hex.Model;
import org.apache.spark.ml.spark.models.MissingValuesHandling;
import water.Key;
import water.fvec.Frame;
public class SVMParameters extends Model.Parameters {
@Override
public String algoName() { return "SVM"; }
@Override
public String fullName() { return "Support Vector Machine (*Spark*)"; }
@Override
public String javaName() { return SVMModel.class.getName(); }
@Override
public long progressUnits() { return _max_iterations; }
public final Frame initialWeights() {
if (null == _initial_weights) {
return null;
} else {
return _initial_weights.get();
}
}
public int _max_iterations = 1000;
public boolean _add_intercept = false;
public double _step_size = 1.0;
public double _reg_param = 0.01;
public double _convergence_tol = 0.001;
public double _mini_batch_fraction = 1.0;
public double _threshold = 0.0;
public Updater _updater = Updater.L2;
public Gradient _gradient = Gradient.Hinge;
public Key<Frame> _initial_weights = null;
public MissingValuesHandling _missing_values_handling = MissingValuesHandling.MeanImputation;
public void validate(SVM svm) {
if (_max_iterations < 0 || _max_iterations > 1e6) {
svm.error("_max_iterations", " max_iterations must be between 0 and 1e6");
}
if(_step_size <= 0) {
svm.error("_step_size", "The step size has to be positive.");
}
if(_reg_param <= 0) {
svm.error("_reg_param", "The regularization parameter has to be positive.");
}
if(_convergence_tol <= 0) {
svm.error("_convergence_tol", "The convergence tolerance has to be positive.");
}
if(_mini_batch_fraction <= 0) {
svm.error("_mini_batch_fraction", "The minimum batch fraction has to be positive.");
}
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-ml_2.10/1.6.13/org/apache/spark/ml/spark/models
|
java-sources/ai/h2o/sparkling-water-ml_2.10/1.6.13/org/apache/spark/ml/spark/models/svm/Updater.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.spark.models.svm;
import org.apache.spark.mllib.optimization.L1Updater;
import org.apache.spark.mllib.optimization.SimpleUpdater;
import org.apache.spark.mllib.optimization.SquaredL2Updater;
public enum Updater {
L2(new SquaredL2Updater()),
L1(new L1Updater()),
Simple(new SimpleUpdater());
private org.apache.spark.mllib.optimization.Updater sparkUpdater;
Updater(org.apache.spark.mllib.optimization.Updater sparkUpdater) {
this.sparkUpdater = sparkUpdater;
}
public org.apache.spark.mllib.optimization.Updater get() {
return sparkUpdater;
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-scoring_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/ml
|
java-sources/ai/h2o/sparkling-water-scoring_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/ml/internals/H2OMetric.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.ml.internals;
public enum H2OMetric {
AUTO(true),
MeanResidualDeviance(false),
MAE(false),
RMSLE(false),
R2(true),
ResidualDeviance(false),
ResidualDegreesOfFreedom(false),
NullDeviance(false),
NullDegreesOfFreedom(false),
AIC(true),
AUC(true),
PRAUC(true),
Gini(true),
F1(true),
F2(true),
F0point5(true),
Precision(true),
Recall(true),
MCC(true),
Logloss(false),
Error(false),
MaxPerClassError(false),
Accuracy(true),
MSE(false),
RMSE(false),
Withinss(false),
Betweenss(true),
TotWithinss(false),
Totss(false),
MeanPerClassError(false),
ScoringTime(false),
Nobs(true),
MeanNormalizedScore(false),
MeanScore(false),
Concordance(true),
Concordant(true),
Discordant(false),
TiedY(true),
NumErr(false),
NumCnt(true),
CatErr(false),
CatCnt(true),
Loglikelihood(true);
public boolean higherTheBetter() {
return higherTheBetter;
}
private boolean higherTheBetter;
H2OMetric(boolean higherTheBetter) {
this.higherTheBetter = higherTheBetter;
}
}
|
0
|
java-sources/ai/h2o/sparkling-water-scoring_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/ml
|
java-sources/ai/h2o/sparkling-water-scoring_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/ml/metrics/MetricsDescription.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.ml.metrics;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface MetricsDescription {
public String description() default "";
}
|
0
|
java-sources/ai/h2o/sparkling-water-scoring_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/ml
|
java-sources/ai/h2o/sparkling-water-scoring_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/ml/params/H2OTargetEncoderProblemType.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.ml.params;
public enum H2OTargetEncoderProblemType {
Auto,
Classification,
Regression
}
|
0
|
java-sources/ai/h2o/sparkling-water-utils_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/ml
|
java-sources/ai/h2o/sparkling-water-utils_2.11/3.46.0.6-1-2.4/ai/h2o/sparkling/ml/utils/H2OAutoMLSortMetric.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.ml.utils;
public enum H2OAutoMLSortMetric {
AUTO,
deviance,
logloss,
MSE,
RMSE,
MAE,
RMSLE,
AUC,
mean_per_class_error
}
|
0
|
java-sources/ai/h2o/xgboost-predictor/0.3.20/biz/k11i
|
java-sources/ai/h2o/xgboost-predictor/0.3.20/biz/k11i/xgboost/Predictor.java
|
package biz.k11i.xgboost;
import biz.k11i.xgboost.config.PredictorConfiguration;
import biz.k11i.xgboost.gbm.GradBooster;
import biz.k11i.xgboost.learner.ObjFunction;
import biz.k11i.xgboost.spark.SparkModelParam;
import biz.k11i.xgboost.util.FVec;
import biz.k11i.xgboost.util.ModelReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.util.Arrays;
/**
* Predicts using the Xgboost model.
*/
public class Predictor implements Serializable {
private ModelParam mparam;
private SparkModelParam sparkModelParam;
private String name_obj;
private String name_gbm;
private ObjFunction obj;
private GradBooster gbm;
private float base_score;
public Predictor(InputStream in) throws IOException {
this(in, null);
}
/**
* Instantiates with the Xgboost model
*
* @param in input stream
* @param configuration configuration
* @throws IOException If an I/O error occurs
*/
public Predictor(InputStream in, PredictorConfiguration configuration) throws IOException {
if (configuration == null) {
configuration = PredictorConfiguration.DEFAULT;
}
ModelReader reader = new ModelReader(in);
readParam(reader);
initObjFunction(configuration);
initObjGbm();
gbm.loadModel(configuration, reader, mparam.saved_with_pbuffer != 0);
if (mparam.major_version >= 1) {
base_score = obj.probToMargin(mparam.base_score);
} else {
base_score = mparam.base_score;
}
}
void readParam(ModelReader reader) throws IOException {
byte[] first4Bytes = reader.readByteArray(4);
byte[] next4Bytes = reader.readByteArray(4);
float base_score;
int num_feature;
if (first4Bytes[0] == 0x62 &&
first4Bytes[1] == 0x69 &&
first4Bytes[2] == 0x6e &&
first4Bytes[3] == 0x66) {
// Old model file format has a signature "binf" (62 69 6e 66)
base_score = reader.asFloat(next4Bytes);
num_feature = reader.readUnsignedInt();
} else if (first4Bytes[0] == 0x00 &&
first4Bytes[1] == 0x05 &&
first4Bytes[2] == 0x5f) {
// Model generated by xgboost4j-spark?
String modelType = null;
if (first4Bytes[3] == 0x63 &&
next4Bytes[0] == 0x6c &&
next4Bytes[1] == 0x73 &&
next4Bytes[2] == 0x5f) {
// classification model
modelType = SparkModelParam.MODEL_TYPE_CLS;
} else if (first4Bytes[3] == 0x72 &&
next4Bytes[0] == 0x65 &&
next4Bytes[1] == 0x67 &&
next4Bytes[2] == 0x5f) {
// regression model
modelType = SparkModelParam.MODEL_TYPE_REG;
}
if (modelType != null) {
int len = (next4Bytes[3] << 8) + (reader.readByteAsInt());
String featuresCol = reader.readUTF(len);
this.sparkModelParam = new SparkModelParam(modelType, featuresCol, reader);
base_score = reader.readFloat();
num_feature = reader.readUnsignedInt();
} else {
base_score = reader.asFloat(first4Bytes);
num_feature = reader.asUnsignedInt(next4Bytes);
}
} else {
base_score = reader.asFloat(first4Bytes);
num_feature = reader.asUnsignedInt(next4Bytes);
}
mparam = new ModelParam(base_score, num_feature, reader);
name_obj = reader.readString();
name_gbm = reader.readString();
}
void initObjFunction(PredictorConfiguration configuration) {
obj = configuration.getObjFunction();
if (obj == null) {
obj = ObjFunction.fromName(name_obj);
}
}
void initObjGbm() {
obj = ObjFunction.fromName(name_obj);
gbm = GradBooster.Factory.createGradBooster(name_gbm);
gbm.setNumClass(mparam.num_class);
gbm.setNumFeature(mparam.num_feature);
}
/**
* Generates predictions for given feature vector.
*
* @param feat feature vector
* @return prediction values
*/
public float[] predict(FVec feat) {
return predict(feat, false);
}
/**
* Generates predictions for given feature vector.
*
* @param feat feature vector
* @param output_margin whether to only predict margin value instead of transformed prediction
* @return prediction values
*/
public float[] predict(FVec feat, boolean output_margin) {
return predict(feat, output_margin, 0);
}
/**
* Generates predictions for given feature vector.
*
* @param feat feature vector
* @param base_margin predict with base margin for each prediction
* @return prediction values
*/
public float[] predict(FVec feat, float base_margin) {
return predict(feat, base_margin, 0);
}
/**
* Generates predictions for given feature vector.
*
* @param feat feature vector
* @param base_margin predict with base margin for each prediction
* @param ntree_limit limit the number of trees used in prediction
* @return prediction values
*/
public float[] predict(FVec feat, float base_margin, int ntree_limit) {
float[] preds = predictRaw(feat, ntree_limit, base_margin);
preds = obj.predTransform(preds);
return preds;
}
/**
* Generates predictions for given feature vector.
*
* @param feat feature vector
* @param output_margin whether to only predict margin value instead of transformed prediction
* @param ntree_limit limit the number of trees used in prediction
* @return prediction values
*/
public float[] predict(FVec feat, boolean output_margin, int ntree_limit) {
float[] preds = predictRaw(feat, ntree_limit, base_score);
if (! output_margin) {
preds = obj.predTransform(preds);
}
return preds;
}
float[] predictRaw(FVec feat, int ntree_limit, float base_score) {
if (isBeforeOrEqual12()) {
float[] preds = gbm.predict(feat, ntree_limit, 0 /* intentionally use 0 and add base score after to have the same floating point order of operation */);
for (int i = 0; i < preds.length; i++) {
preds[i] += base_score;
}
return preds;
} else {
// Since xgboost 1.3 the floating point operations order has changed - add base_score as first and predictions after
return gbm.predict(feat, ntree_limit, base_score);
}
}
/**
* Generates a prediction for given feature vector.
* <p>
* This method only works when the model outputs single value.
* </p>
*
* @param feat feature vector
* @return prediction value
*/
public float predictSingle(FVec feat) {
return predictSingle(feat, false);
}
/**
* Generates a prediction for given feature vector.
* <p>
* This method only works when the model outputs single value.
* </p>
*
* @param feat feature vector
* @param output_margin whether to only predict margin value instead of transformed prediction
* @return prediction value
*/
public float predictSingle(FVec feat, boolean output_margin) {
return predictSingle(feat, output_margin, 0);
}
/**
* Generates a prediction for given feature vector.
* <p>
* This method only works when the model outputs single value.
* </p>
*
* @param feat feature vector
* @param output_margin whether to only predict margin value instead of transformed prediction
* @param ntree_limit limit the number of trees used in prediction
* @return prediction value
*/
public float predictSingle(FVec feat, boolean output_margin, int ntree_limit) {
float pred = predictSingleRaw(feat, ntree_limit);
if (!output_margin) {
pred = obj.predTransform(pred);
}
return pred;
}
float predictSingleRaw(FVec feat, int ntree_limit) {
if (isBeforeOrEqual12()) {
return gbm.predictSingle(feat, ntree_limit, 0) + base_score;
} else {
return gbm.predictSingle(feat, ntree_limit, base_score);
}
}
/**
* Predicts leaf index of each tree.
*
* @param feat feature vector
* @return leaf indexes
*/
public int[] predictLeaf(FVec feat) {
return predictLeaf(feat, 0);
}
/**
* Predicts leaf index of each tree.
*
* @param feat feature vector
* @param ntree_limit limit, 0 for all
* @return leaf indexes
*/
public int[] predictLeaf(FVec feat, int ntree_limit) {
return gbm.predictLeaf(feat, ntree_limit);
}
/**
* Predicts path to leaf of each tree.
*
* @param feat feature vector
* @return leaf paths
*/
public String[] predictLeafPath(FVec feat) {
return predictLeafPath(feat, 0);
}
/**
* Predicts path to leaf of each tree.
*
* @param feat feature vector
* @param ntree_limit limit, 0 for all
* @return leaf paths
*/
public String[] predictLeafPath(FVec feat, int ntree_limit) {
return gbm.predictLeafPath(feat, ntree_limit);
}
public SparkModelParam getSparkModelParam() {
return sparkModelParam;
}
/**
* Returns number of class.
*
* @return number of class
*/
public int getNumClass() {
return mparam.num_class;
}
/**
* Used e.g. for the change od floating point operation order in between xgboost 1.2 and 1.3
*
* @return True if the booster was build with xgboost version <= 1.2.
*/
private boolean isBeforeOrEqual12() {
return mparam.major_version < 1 || (mparam.major_version == 1 && mparam.minor_version <= 2);
}
/**
* Parameters.
*/
static class ModelParam implements Serializable {
/* \brief global bias */
final float base_score;
/* \brief number of features */
final /* unsigned */ int num_feature;
/* \brief number of class, if it is multi-class classification */
final int num_class;
/*! \brief whether the model itself is saved with pbuffer */
final int saved_with_pbuffer;
/*! \brief Model contain eval metrics */
private final int contain_eval_metrics;
/*! \brief the version of XGBoost. */
private final int major_version;
private final int minor_version;
/*! \brief reserved field */
final int[] reserved;
ModelParam(float base_score, int num_feature, ModelReader reader) throws IOException {
this.base_score = base_score;
this.num_feature = num_feature;
this.num_class = reader.readInt();
this.saved_with_pbuffer = reader.readInt();
this.contain_eval_metrics = reader.readInt();
this.major_version = reader.readUnsignedInt();
this.minor_version = reader.readUnsignedInt();
this.reserved = reader.readIntArray(27);
}
}
public GradBooster getBooster(){
return gbm;
}
public String getObjName() {
return name_obj;
}
public float getBaseScore() {
return base_score;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.