index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/h2o/h2o-persist-s3/3.46.0.7/water
|
java-sources/ai/h2o/h2o-persist-s3/3.46.0.7/water/persist/RegisterRestApi.java
|
package water.persist;
import water.api.AbstractRegister;
import water.api.RestApiContext;
public class RegisterRestApi extends AbstractRegister {
@Override
public void registerEndPoints(RestApiContext context) {
context.registerEndpoint("set_s3_credentials", "POST /3/PersistS3", PersistS3Handler.class,
"setS3Credentials", "Set Amazon S3 credentials (Secret Key ID, Secret Access Key)");
context.registerEndpoint("remove_s3_credentials", "DELETE /3/PersistS3", PersistS3Handler.class,
"removeS3Credentials", "Remove store Amazon S3 credentials");
}
@Override
public String getName() {
return "Amazon S3";
}
}
|
0
|
java-sources/ai/h2o/h2o-security/3.46.0.7/water
|
java-sources/ai/h2o/h2o-security/3.46.0.7/water/network/SecurityUtils.java
|
package water.network;
import water.network.util.ExternalKeytool;
import water.network.util.JavaVersionUtils;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.*;
import java.util.Properties;
public class SecurityUtils {
private static final SecureRandom RANDOM = new SecureRandom();
private static final String AB = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
private final static String[] keyToolCandidates;
static {
final int ver = JavaVersionUtils.getMajorVersion();
keyToolCandidates = new String[] {
ver >= 16 ? ExternalKeytool.class.getName() : null, // Java 16 and later (don't access sun-private API)
"sun.security.tools.KeyTool", // Java 6
"sun.security.tools.keytool.Main", // Java 7 - Java 15
"com.ibm.crypto.tools.KeyTool" // IBM Java
};
}
private static StoreCredentials generateKeystore(String password, String location) throws Exception {
return generateKeystore(password, "h2o-internal.jks", location);
}
private static StoreCredentials generateKeystore(String password) throws Exception {
return generateKeystore(password, "h2o-internal.jks", "");
}
private static StoreCredentials generateKeystore(String password, String name, String location) throws Exception {
String path = null != location && !location.isEmpty() ? location + File.separatorChar + name : name;
if(new File(path).exists()) {
throw new IllegalStateException("A file under the location " + path + " already exists. Please delete it first.");
}
String[] genKeyArgs = new String[]{
"-genkeypair",
"-alias", "h2o-internal",
"-keyalg", "RSA",
"-sigalg", "SHA256withRSA",
"-dname", "CN=Java",
"-storetype", "JKS",
"-keypass", password,
"-keystore", path,
"-storepass", password,
"-validity", "3650"
};
Class<?> keytool = getKeyToolClass();
keytool.getMethod("main", String[].class).invoke(null, (Object) genKeyArgs);
new File(path).deleteOnExit();
return new StoreCredentials(name, location, password);
}
static Class<?> getKeyToolClass() {
for (String keyToolCandidate : keyToolCandidates) {
try {
return Class.forName(keyToolCandidate);
} catch (Exception e) {
// Ignore, try other candidates
}
}
// Unsupported JRE/JDK
throw new IllegalStateException("Your Java version doesn't support generating keystore. " +
"Please use Oracle/OpenJDK version 8 or later.");
}
public static SSLCredentials generateSSLPair(String passwd, String name, String location) throws Exception {
StoreCredentials jks = generateKeystore(passwd, name, location);
return new SSLCredentials(jks, jks);
}
public static SSLCredentials generateSSLPair() throws Exception {
Path temp = Files.createTempDirectory("h2o-internal-jks-" + Long.toString(System.nanoTime()));
temp.toFile().deleteOnExit();
StoreCredentials jks = generateKeystore(passwordGenerator(16), temp.toAbsolutePath().toString());
return new SSLCredentials(jks, jks);
}
public static String passwordGenerator(int len) {
StringBuilder sb = new StringBuilder(len);
for (int i = 0; i < len; i++) {
sb.append(AB.charAt(RANDOM.nextInt(AB.length())));
}
return sb.toString();
}
public static String generateSSLConfig(SSLCredentials credentials) throws IOException {
File temp = File.createTempFile("h2o-internal-" + System.nanoTime(), "-ssl.properties");
temp.deleteOnExit();
return generateSSLConfig(credentials, temp.getAbsolutePath());
}
static String generateSSLConfig(SSLCredentials credentials, String file) throws IOException {
Properties sslConfig = new Properties();
sslConfig.put("h2o_ssl_protocol", defaultTLSVersion());
sslConfig.put("h2o_ssl_jks_internal", credentials.jks.name);
sslConfig.put("h2o_ssl_jks_password", credentials.jks.pass);
sslConfig.put("h2o_ssl_jts", credentials.jts.name);
sslConfig.put("h2o_ssl_jts_password", credentials.jts.pass);
try (FileOutputStream output = new FileOutputStream(file)) {
sslConfig.store(output, "");
}
// ignore
return file;
}
public static String defaultTLSVersion() {
return System.getProperty("java.version", "NA").startsWith("1.6") ? "TLSv1" : "TLSv1.2";
}
public static class StoreCredentials {
public String name;
public String path;
public String pass;
StoreCredentials(String name, String path, String pass) {
this.name = name;
this.path = path;
this.pass = pass;
}
public String getLocation() {
return null != path && !path.isEmpty() ? path + File.separatorChar + name : name;
}
}
public static class SSLCredentials {
public StoreCredentials jks;
public StoreCredentials jts;
SSLCredentials(StoreCredentials jks, StoreCredentials jts) {
this.jks = jks;
this.jts = jts;
}
}
}
|
0
|
java-sources/ai/h2o/h2o-security/3.46.0.7/water/network
|
java-sources/ai/h2o/h2o-security/3.46.0.7/water/network/util/ExternalKeytool.java
|
package water.network.util;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.Executors;
public class ExternalKeytool {
public static void main(String[] args) throws Exception {
String javaHome = System.getProperty("java.home");
String keytoolPath = javaHome != null ?
new File(javaHome, new File("bin", "keytool").getPath()).getAbsolutePath() : "keytool";
List<String> command = new ArrayList<>(args.length + 1);
command.add(keytoolPath);
command.addAll(Arrays.asList(args));
ProcessBuilder builder = new ProcessBuilder();
builder.command(command);
Process process = builder.start();
StreamGobbler streamGobbler = new StreamGobbler(process.getInputStream());
Executors.newSingleThreadExecutor().submit(streamGobbler);
int exitCode = process.waitFor();
if (exitCode != 0) {
throw new IllegalStateException("External keytool execution failed (exit code: " + exitCode + ").");
}
}
}
|
0
|
java-sources/ai/h2o/h2o-security/3.46.0.7/water/network
|
java-sources/ai/h2o/h2o-security/3.46.0.7/water/network/util/JavaVersionUtils.java
|
package water.network.util;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class JavaVersionUtils {
private static final int UNKNOWN = -1;
public static int getMajorVersion() {
return parseMajor(System.getProperty("java.version"));
}
private static int parseMajor(String version) {
if (version!=null) {
final Pattern pattern = Pattern.compile("1\\.([0-9]*).*|([0-9][0-9]*).*");
final Matcher matcher = pattern.matcher(version);
if (matcher.matches()) {
return Integer.parseInt(matcher.group(matcher.group(1)!=null?1:2));
}
}
return UNKNOWN;
}
}
|
0
|
java-sources/ai/h2o/h2o-security/3.46.0.7/water/network
|
java-sources/ai/h2o/h2o-security/3.46.0.7/water/network/util/StreamGobbler.java
|
package water.network.util;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
class StreamGobbler implements Runnable {
private final InputStream _is;
StreamGobbler(InputStream is) {
_is = is;
}
@Override
public void run() {
new BufferedReader(new InputStreamReader(_is)).lines()
.forEach(System.out::println);
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/TestFrameCatalog.java
|
package water;
import org.junit.Ignore;
import water.fvec.Frame;
import water.fvec.TestFrameBuilder;
import water.fvec.Vec;
import java.util.UUID;
import static water.TestUtil.*;
@Ignore // prepackaged small H2O Frames
public class TestFrameCatalog {
public static Frame oneChunkFewRows() {
return new TestFrameBuilder()
.withVecTypes(Vec.T_NUM, Vec.T_NUM, Vec.T_CAT, Vec.T_CAT)
.withDataForCol(0, new double[]{1.2, 3.4, 5.6})
.withDataForCol(1, new double[]{-1, 0, 1})
.withDataForCol(2, new String[]{"a", "b", "a"})
.withDataForCol(3, new String[]{"y", "y", "n"})
.build();
}
public static Frame specialColumns() {
return new TestFrameBuilder()
.withColNames("Fold", "ColA", "Response", "ColB", "Weight", "Offset", "ColC")
.withVecTypes(Vec.T_NUM, Vec.T_NUM, Vec.T_NUM, Vec.T_STR, Vec.T_NUM, Vec.T_NUM, Vec.T_CAT)
.withDataForCol(0, ard(0, 1, 0, 1, 0, 1, 0))
.withDataForCol(1, ard(Double.NaN, 1, 2, 3, 4, 5.6, 7))
.withDataForCol(2, ard(1, 2, 3, 4, 1, 2, 3))
.withDataForCol(3, ar("A", "B", "C", "E", "F", "I", "J"))
.withDataForCol(4, ard(0.25, 0.25, 0.5, 0.5, 0.5, 0.75, 0.75))
.withDataForCol(5, ard(0.1, 0.1, 0.1, 0.1, 0.2, 0.2, 0.2))
.withDataForCol(6, ar("A", "B,", "A", "C", "A", "B", "A"))
.build();
}
/**
* Creates a frame with columns that are typically not used in model building (UUID, Bad)
* @return a frame instance
*/
public static Frame unusualTypes() {
return new TestFrameBuilder()
.withColNames("UUIDCol", "BadCol")
.withVecTypes(Vec.T_UUID, Vec.T_BAD)
.withDataForCol(0, ar(UUID.randomUUID().toString(), null))
.withDataForCol(1, ard(Double.NaN, Double.NaN))
.build();
}
public static Frame prostateCleaned() {
final String response = "CAPSULE";
final String testFile = "./smalldata/logreg/prostate.csv";
Frame fr = parseAndTrackTestFile(testFile)
.toCategoricalCol("RACE")
.toCategoricalCol("GLEASON")
.toCategoricalCol(response);
fr.remove("ID").remove();
DKV.put(fr);
return fr;
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/TestUtil.java
|
package water;
import Jama.Matrix;
import hex.CreateFrame;
import hex.Model;
import hex.SplitFrame;
import hex.genmodel.GenModel;
import hex.genmodel.ModelMojoReader;
import hex.genmodel.MojoReaderBackend;
import hex.genmodel.MojoReaderBackendFactory;
import hex.genmodel.easy.RowData;
import org.junit.AfterClass;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.rules.TestRule;
import org.junit.runner.Description;
import org.junit.runners.model.Statement;
import water.api.StreamingSchema;
import water.fvec.*;
import water.init.NetworkInit;
import water.junit.Priority;
import water.junit.rules.RulesPriorities;
import water.parser.BufferedString;
import water.parser.DefaultParserProviders;
import water.parser.ParseDataset;
import water.parser.ParseSetup;
import water.util.Timer;
import water.util.*;
import water.util.fp.Function;
import java.io.*;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLConnection;
import java.util.*;
import static org.junit.Assert.*;
import static water.util.ArrayUtils.gaussianVector;
@Ignore("Support for tests, but no actual tests here")
public class TestUtil extends Iced {
{ // we need assertions to be checked at least when tests are running
ClassLoader loader = getClass().getClassLoader();
loader.setDefaultAssertionStatus(true);
}
public final static boolean JACOCO_ENABLED = Boolean.parseBoolean(System.getProperty("test.jacocoEnabled", "false"));
private static boolean _stall_called_before = false;
private static String[] ignoreTestsNames;
private static String[] doonlyTestsNames;
protected static int _initial_keycnt = 0;
/**
* Minimal cloud size to start test.
*/
public static int MINCLOUDSIZE = Integer.parseInt(System.getProperty("cloudSize", "1"));
/**
* Default time in ms to wait for clouding
*/
protected static int DEFAULT_TIME_FOR_CLOUDING = 60000 /* ms */;
public TestUtil() {
this(1);
}
public TestUtil(int minCloudSize) {
MINCLOUDSIZE = Math.max(MINCLOUDSIZE, minCloudSize);
String ignoreTests = System.getProperty("ignore.tests");
if (ignoreTests != null) {
ignoreTestsNames = ignoreTests.split(",");
if (ignoreTestsNames.length == 1 && ignoreTestsNames[0].equals("")) {
ignoreTestsNames = null;
}
}
String doonlyTests = System.getProperty("doonly.tests");
if (doonlyTests != null) {
doonlyTestsNames = doonlyTests.split(",");
if (doonlyTestsNames.length == 1 && doonlyTestsNames[0].equals("")) {
doonlyTestsNames = null;
}
}
}
// ==== Test Setup & Teardown Utilities ====
// Stall test until we see at least X members of the Cloud
protected static int getDefaultTimeForClouding() {
return JACOCO_ENABLED
? DEFAULT_TIME_FOR_CLOUDING * 10
: DEFAULT_TIME_FOR_CLOUDING;
}
public static void stall_till_cloudsize(int x) {
stall_till_cloudsize(x, getDefaultTimeForClouding());
}
/**
* Take a double array and return it as a single array. It will take each row on top of each other
*
* @param arr
* @return
*/
public static double[] changeDouble2SingleArray(double[][] arr) {
double[] result = new double[arr.length * arr[0].length];
int numRows = arr.length;
int offset = 0;
for (int rind = 0; rind < numRows; rind++) {
int rowLength = arr[rind].length;
System.arraycopy(arr[rind], 0, result, offset, rowLength);
offset += rowLength;
}
return result;
}
public static void stall_till_cloudsize(int x, int timeout) {
stall_till_cloudsize(new String[]{}, x, timeout);
}
public static void stall_till_cloudsize(String[] args, int x) {
stall_till_cloudsize(args, x, getDefaultTimeForClouding());
}
public static void stall_till_cloudsize(String[] args, int x, int timeout) {
x = Math.max(MINCLOUDSIZE, x);
if (!_stall_called_before) {
H2O.main(args);
H2O.registerResourceRoot(new File(System.getProperty("user.dir") + File.separator + "h2o-web/src/main/resources/www"));
H2O.registerResourceRoot(new File(System.getProperty("user.dir") + File.separator + "h2o-core/src/main/resources/www"));
ExtensionManager.getInstance().registerRestApiExtensions();
_stall_called_before = true;
}
H2O.waitForCloudSize(x, timeout);
_initial_keycnt = H2O.store_size();
// Finalize registration of REST API to enable tests which are touching Schemas.
H2O.startServingRestApi();
}
@AfterClass
public static void checkLeakedKeys() {
int leaked_keys = H2O.store_size() - _initial_keycnt;
int cnt = 0;
if (leaked_keys > 0) {
int print_max = 10;
for (Key k : H2O.localKeySet()) {
Value value = Value.STORE_get(k);
// Ok to leak VectorGroups and the Jobs list
if (value == null || value.isVecGroup() || value.isESPCGroup() || k == Job.LIST ||
// Also leave around all attempted Jobs for the Jobs list
(value.isJob() && value.<Job>get().isStopped())) {
leaked_keys--;
} else {
System.out.println(k + " -> " + (value.type() != TypeMap.PRIM_B ? value.get() : "byte[]"));
if (cnt++ < print_max)
System.err.println("Leaked key: " + k + " = " + TypeMap.className(value.type()));
}
}
if (print_max < leaked_keys) System.err.println("... and " + (leaked_keys - print_max) + " more leaked keys");
}
assertTrue("Keys leaked: " + leaked_keys + ", cnt = " + cnt, leaked_keys <= 0 || cnt == 0);
// Bulk brainless key removal. Completely wipes all Keys without regard.
new DKVCleaner().doAllNodes();
_initial_keycnt = H2O.store_size();
}
private static class KeyCleaner extends MRTask<KeyCleaner> {
private final Class[] objectType;
private KeyCleaner(Class[] objectType) {
this.objectType = objectType;
}
@Override
protected void setupLocal() {
Futures fs = new Futures();
for (Key k : H2O.localKeySet()) {
Value value = Value.STORE_get(k);
if (value == null || value.isVecGroup() || value.isESPCGroup() || k == Job.LIST ||
value.isJob() || value.type() == TypeMap.PRIM_B
) {
// do nothing
} else {
for (Class c : objectType) {
if (c.isInstance(value.get())) {
DKV.remove(k, fs);
break;
}
}
}
}
fs.blockForPending();
}
}
public static void cleanupKeys(Class... objectType) {
new KeyCleaner(objectType).doAllNodes();
}
public static double[][] genRandomMatrix(int row, int col, long seedValue) {
double[][] randomMat = new double[row][];
Random random = new Random(seedValue);
for (int rInd = 0; rInd < row; rInd++)
randomMat[rInd] = gaussianVector(col, random);
return randomMat;
}
public static double[][] genSymPsdMatrix(int matSize, long seedValue, int multiplier) {
double[][] mat = genRandomMatrix(matSize, matSize, seedValue);
// generate symmetric matrix
Matrix matT = new Matrix(mat);
Matrix symMat = matT.plus(matT.transpose()).times(0.5);
for (int index=0; index<matSize; index++) {
symMat.set(index, index, Math.abs(genRandomMatrix(1,1,123)[0][0])*multiplier);
}
return symMat.getArray();
}
public static double[] genRandomArray(int length, long seedValue) {
Random random = new Random(seedValue);
return gaussianVector(length, random);
}
public static void checkArrays(double[] expected, double[] actual, double threshold) {
for (int i = 0; i < actual.length; i++) {
if (!Double.isNaN(expected[i]) && !Double.isNaN(actual[i])) // only compare when both are not NaN
assertEquals(expected[i], actual[i], threshold * Math.min(Math.abs(expected[i]), Math.abs(actual[i])));
}
}
public static void checkDoubleArrays(double[][] expected, double[][] actual, double threshold) {
int len1 = expected.length;
assertEquals(len1, actual.length);
for (int ind = 0; ind < len1; ind++) {
assertEquals(expected[ind].length, actual[ind].length);
checkArrays(expected[ind], actual[ind], threshold);
}
}
public static void check3DArrays(double[][][] expected, double[][][] actual, double threshold) {
int len = expected.length;
assertEquals(len, actual.length);
for (int ind=0; ind < len; ind++) {
checkDoubleArrays(expected[ind], actual[ind], threshold);
}
}
public static void checkIntArrays(int[][] expected, int[][] actual) {
int len1 = expected.length;
assertEquals(len1, actual.length);
for (int ind = 0; ind < len1; ind++) {
assertEquals(expected[ind].length, actual[ind].length);
Arrays.equals(expected[ind], actual[ind]);
}
}
/**
* @deprecated use {@link #generateEnumOnly(int, int, int, double)} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected static Frame generate_enum_only(int numCols, int numRows, int num_factor, double missingfrac) {
return generateEnumOnly(numCols, numRows, num_factor, missingfrac);
}
/**
* generate random frames containing enum columns only
*
* @param numCols
* @param numRows
* @param num_factor
* @return
*/
protected static Frame generateEnumOnly(int numCols, int numRows, int num_factor, double missingfrac) {
long seed = System.currentTimeMillis();
System.out.println("Createframe parameters: rows: " + numRows + " cols:" + numCols + " seed: " + seed);
return generateEnumOnly(numCols, numRows, num_factor, missingfrac, seed);
}
/**
* @deprecated use {@link #generateEnumOnly(int, int, int, double, long)} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected static Frame generate_enum_only(int numCols, int numRows, int num_factor, double missingfrac, long seed) {
return generateEnumOnly(numCols, numRows, num_factor, missingfrac, seed);
}
public static Frame generateEnumOnly(int numCols, int numRows, int num_factor, double missingfrac, long seed) {
CreateFrame cf = new CreateFrame();
cf.rows = numRows;
cf.cols = numCols;
cf.factors = num_factor;
cf.binary_fraction = 0;
cf.integer_fraction = 0;
cf.categorical_fraction = 1;
cf.has_response = false;
cf.missing_fraction = missingfrac;
cf.seed = seed;
System.out.println("Createframe parameters: rows: " + numRows + " cols:" + numCols + " seed: " + cf.seed);
return cf.execImpl().get();
}
/**
* @deprecated use {@link #generateRealOnly(int, int, double)} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected static Frame generate_real_only(int numCols, int numRows, double missingfrac) {
return generateRealOnly(numCols, numRows, missingfrac);
}
protected static Frame generateRealOnly(int numCols, int numRows, double missingfrac) {
long seed = System.currentTimeMillis();
System.out.println("Createframe parameters: rows: " + numRows + " cols:" + numCols + " seed: " + seed);
return generateRealOnly(numCols, numRows, missingfrac, seed);
}
/**
* @deprecated use {@link #generateRealOnly(int, int, double, long)} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected static Frame generate_real_only(int numCols, int numRows, double missingfrac, long seed) {
return generateRealOnly(numCols, numRows, missingfrac, seed);
}
protected static Frame generateRealOnly(int numCols, int numRows, double missingfrac, long seed) {
return generateRealWithRangeOnly(numCols, numRows, missingfrac, seed, 100);
}
protected static Frame generateRealWithRangeOnly(int numCols, int numRows, double missingfrac, long seed, long range) {
CreateFrame cf = new CreateFrame();
cf.rows = numRows;
cf.cols = numCols;
cf.binary_fraction = 0;
cf.integer_fraction = 0;
cf.categorical_fraction = 0;
cf.time_fraction = 0;
cf.string_fraction = 0;
cf.has_response = false;
cf.missing_fraction = missingfrac;
cf.real_range = range;
cf.seed = seed;
System.out.println("Createframe parameters: rows: " + numRows + " cols:" + numCols + " seed: " + cf.seed +
" range: "+range);
return cf.execImpl().get();
}
/**
* @deprecated use {@link #generateIntOnly(int, int, int, double)} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected static Frame generate_int_only(int numCols, int numRows, int integer_range, double missingfrac) {
return generateIntOnly(numCols, numRows, integer_range, missingfrac);
}
protected static Frame generateIntOnly(int numCols, int numRows, int integer_range, double missingfrac) {
long seed = System.currentTimeMillis();
System.out.println("Createframe parameters: rows: " + numRows + " cols:" + numCols + " seed: " + seed);
return generateIntOnly(numCols, numRows, integer_range, missingfrac, seed);
}
/**
* @deprecated use {@link #generateIntOnly(int, int, int, double, long)} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected static Frame generate_int_only(int numCols, int numRows, int integer_range, double missingfrac, long seed) {
return generateIntOnly(numCols, numRows, integer_range, missingfrac, seed);
}
protected static Frame generateIntOnly(int numCols, int numRows, int integerRange, double missingfrac, long seed) {
CreateFrame cf = new CreateFrame();
cf.rows = numRows;
cf.cols = numCols;
cf.binary_fraction = 0;
cf.integer_fraction = 1;
cf.categorical_fraction = 0;
cf.time_fraction = 0;
cf.string_fraction = 0;
cf.has_response = false;
cf.missing_fraction = missingfrac;
cf.integer_range = integerRange;
cf.seed = seed;
System.out.println("Createframe parameters: rows: " + numRows + " cols:" + numCols + " seed: " + cf.seed);
return cf.execImpl().get();
}
protected static int[] rangeFun(int numEle, int offset) {
int[] ranges = new int[numEle];
for (int index = 0; index < numEle; index++) {
ranges[index] = index + offset;
}
return ranges;
}
protected static int[] sortDir(int numEle, Random rand) {
int[] sortDir = new int[numEle];
int[] dirs = new int[]{-1, 1};
for (int index = 0; index < numEle; index++) {
sortDir[index] = dirs[rand.nextInt(2)];
}
return sortDir;
}
public static class DKVCleaner extends MRTask<DKVCleaner> {
@Override
public void setupLocal() {
H2O.raw_clear();
water.fvec.Vec.ESPC.clear();
}
}
// current running test - assumes no test parallelism just like the rest of this class
public static Description CURRENT_TEST_DESCRIPTION;
/**
* Execute this rule before each test to print test name and test class
*/
@Rule
transient public TestRule logRule = new TestRule() {
@Override
public Statement apply(Statement base, Description description) {
Log.info("###########################################################");
Log.info(" * Test class name: " + description.getClassName());
Log.info(" * Test method name: " + description.getMethodName());
Log.info("###########################################################");
CURRENT_TEST_DESCRIPTION = description;
return base;
}
};
/* Ignore tests specified in the ignore.tests system property: applied last, if test is ignored, no other rule with be evaluated */
@Rule
transient public TestRule runRule = new @Priority(RulesPriorities.RUN_TEST) TestRule() {
@Override
public Statement apply(Statement base, Description description) {
String testName = description.getClassName() + "#" + description.getMethodName();
boolean ignored = false;
if (ignoreTestsNames != null && ignoreTestsNames.length > 0) {
for (String tn : ignoreTestsNames) {
if (testName.startsWith(tn)) {
ignored = true;
break;
}
}
}
if (doonlyTestsNames != null && doonlyTestsNames.length > 0) {
ignored = true;
for (String tn : doonlyTestsNames) {
if (testName.startsWith(tn)) {
ignored = false;
break;
}
}
}
if (ignored) {
// Ignored tests trump do-only tests
Log.info("#### TEST " + testName + " IGNORED");
return new Statement() {
@Override
public void evaluate() throws Throwable {
}
};
} else {
return base;
}
}
};
@Rule
transient public TestRule timerRule = new TestRule() {
@Override
public Statement apply(Statement base, Description description) {
return new TimerStatement(base, description.getClassName() + "#" + description.getMethodName());
}
class TimerStatement extends Statement {
private final Statement _base;
private final String _tname;
public TimerStatement(Statement base, String tname) {
_base = base;
_tname = tname;
}
@Override
public void evaluate() throws Throwable {
Timer t = new Timer();
try {
_base.evaluate();
} finally {
Log.info("#### TEST " + _tname + " EXECUTION TIME: " + t);
}
}
}
};
// ==== Data Frame Creation Utilities ====
/**
* Compare 2 frames
*
* @param fr1 Frame
* @param fr2 Frame
* @param epsilon Relative tolerance for floating point numbers
*/
public static void assertIdenticalUpToRelTolerance(Frame fr1, Frame fr2, double epsilon) {
assertIdenticalUpToRelTolerance(fr1, fr2, epsilon, true, "");
}
public static void assertIdenticalUpToRelTolerance(Frame fr1, Frame fr2, double epsilon, String messagePrefix) {
assertIdenticalUpToRelTolerance(fr1, fr2, epsilon, true, messagePrefix);
}
public static void assertIdenticalUpToRelTolerance(Frame fr1, Frame fr2, double epsilon, boolean expected) {
assertIdenticalUpToRelTolerance(fr1, fr2, epsilon, expected, "");
}
public static void assertIdenticalUpToRelTolerance(Frame fr1, Frame fr2, double epsilon, boolean expected, String messagePrefix) {
if (fr1 == fr2) return;
if (expected) {
assertEquals("Number of columns differ.", fr1.numCols(), fr2.numCols());
assertEquals("Number of rows differ.", fr1.numRows(), fr2.numRows());
} else if (fr1.numCols() != fr2.numCols() || fr1.numRows() != fr2.numRows()) {
return;
}
Scope.enter();
if (!fr1.isCompatible(fr2)) fr1.makeCompatible(fr2);
Cmp1 cmp = new Cmp1(epsilon, messagePrefix).doAll(new Frame(fr1).add(fr2));
Scope.exit();
assertTrue(cmp._message, expected == !cmp._unequal);
}
/**
* Compare 2 frames
*
* @param fr1 Frame
* @param fr2 Frame
*/
public static void assertBitIdentical(Frame fr1, Frame fr2) {
assertIdenticalUpToRelTolerance(fr1, fr2, 0);
}
static File[] contentsOf(String name, File folder) {
try {
return FileUtils.contentsOf(folder, name);
} catch (IOException ioe) {
fail(ioe.getMessage());
return null;
}
}
/**
* @deprecated use {@link #parseTestFile(String)} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
public static Frame parse_test_file(String fname) {
return parseTestFile(fname);
}
/**
* @deprecated use {@link #parseTestFile(String, int[])} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
public static Frame parse_test_file(String fname, int[] skipped_columns) {
return parseTestFile(fname, skipped_columns);
}
/**
* Find & parse a CSV file. NPE if file not found.
*
* @param fname Test filename
* @return Frame or NPE
*/
public static Frame parseTestFile(String fname) {
return parseTestFile(Key.make(), fname);
}
public static Frame parseTestFile(String fname, int[] skipped_columns) {
return parseTestFile(Key.make(), fname, skipped_columns);
}
/**
* Find & parse & track in {@link Scope} a CSV file. NPE if file not found.
*
* @param fname Test filename
* @return Frame or NPE
*/
public static Frame parseAndTrackTestFile(String fname) {
return Scope.track(parseTestFile(Key.make(), fname));
}
/**
* Make sure the given frame is distributed in a way that MRTask reduce operation is called
* and spans at least 2 nodes of the cluster (if running on multinode).
* <p>
* If a new frame is created - it is automatically tracked in Scope if it is currently active.
*
* @param frame input frame
* @return possibly new Frame rebalanced to a minimum number of chunks
*/
public static Frame ensureDistributed(Frame frame) {
int minChunks = H2O.getCloudSize() * 4; // at least one node will have 4 chunks (MR tree will have at least 2 levels)
return ensureDistributed(frame, minChunks);
}
/**
* Make sure the given frame is distributed at least to given minimum number of chunks
* and spans at least 2 nodes of the cluster (if running on multinode).
* <p>
* If a new frame is created - it is automatically tracked in Scope if it is currently active.
*
* @param frame input frame
* @param minChunks minimum required number of chunks
* @return possibly new Frame rebalanced to a minimum number of chunks
*/
public static Frame ensureDistributed(Frame frame, int minChunks) {
if (frame.anyVec().nChunks() < minChunks) {
// rebalance first
Key<Frame> k = Key.make();
H2O.submitTask(new RebalanceDataSet(frame, k, minChunks)).join();
frame = trackIfScopeActive(k.get());
}
// check frame spans 2+ nodes
if (H2O.CLOUD.size() > 1) {
Vec v = frame.anyVec();
H2ONode node = null;
for (int i = 0; i < v.nChunks(); i++) {
H2ONode cNode = v.chunkKey(i).home_node();
if (v.chunkLen(i) == 0)
continue;
if (node == null)
node = cNode;
else if (cNode != node) // found proof
return frame;
}
throw new IllegalStateException("Frame is only stored on a sigle node");
}
return frame;
}
static Frame trackIfScopeActive(Frame frame) {
if (Scope.isActive()) {
// this function can only be called in tests - it is thus safe to auto-track the frame if the test created a Scope
Scope.track(frame);
}
return frame;
}
public static void assertExists(String fname) {
NFSFileVec v = makeNfsFileVec(fname);
assertNotNull("File '" + fname + "' was not found", v);
v.remove();
}
public static NFSFileVec makeNfsFileVec(String fname) {
try {
File file = FileUtils.locateFile(fname);
if ((file == null) && (isCI() || runWithoutLocalFiles())) {
long lastModified = downloadTestFileFromS3(fname);
if (lastModified != 0 && isCI()) { // in CI fail if the file is missing for more than 30 days
if (System.currentTimeMillis() - lastModified > 30 * 24 * 60 * 60 * 1000L) {
throw new IllegalStateException(
"File '" + fname + "' is still not locally synchronized (more than 30 days). Talk to #devops-requests");
}
}
}
return NFSFileVec.make(fname);
} catch (IOException ioe) {
Log.err(ioe);
fail(ioe.getMessage());
return null;
}
}
private static boolean runWithoutLocalFiles() {
return Boolean.parseBoolean(System.getenv("H2O_JUNIT_ALLOW_NO_SMALLDATA"));
}
private static File getLocalSmalldataFile(final String fname) {
String projectDir = System.getenv("H2O_PROJECT_DIR");
return projectDir != null ? new File(projectDir, fname) : new File(fname);
}
protected static long downloadTestFileFromS3(String fname) throws IOException {
if (fname.startsWith("./"))
fname = fname.substring(2);
final File f = getLocalSmalldataFile(fname);
if (!f.exists()) {
if (f.getParentFile() != null) {
boolean dirsCreated = f.getParentFile().mkdirs();
if (! dirsCreated) {
Log.warn("Failed to create directory:" + f.getParentFile());
}
}
File tmpFile = File.createTempFile(f.getName(), "tmp", f.getParentFile());
final URL source = new URL("https://h2o-public-test-data.s3.amazonaws.com/" + fname);
final URLConnection connection = source.openConnection();
connection.setConnectTimeout(1000);
connection.setReadTimeout(2000);
final long lastModified = connection.getLastModified();
try (final InputStream stream = connection.getInputStream()) {
org.apache.commons.io.FileUtils.copyInputStreamToFile(stream, tmpFile);
}
if (tmpFile.renameTo(f)) {
return lastModified;
} else {
Log.warn("Couldn't download " + fname + " from S3.");
}
}
return 0;
}
/**
* @deprecated use {@link #parseTestFile(Key, String, boolean)} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected Frame parse_test_file(Key outputKey, String fname, boolean guessSetup) {
return parseTestFile(outputKey, fname, guessSetup);
}
protected Frame parseTestFile(Key outputKey, String fname, boolean guessSetup) {
return parseTestFile(outputKey, fname, guessSetup, null);
}
/**
* @deprecated use {@link #parseTestFile(Key, String, boolean, int[])} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected Frame parse_test_file(Key outputKey, String fname, boolean guessSetup, int[] skippedColumns) {
return parseTestFile(outputKey, fname, guessSetup, skippedColumns);
}
protected Frame parseTestFile(Key outputKey, String fname, boolean guessSetup, int[] skippedColumns) {
NFSFileVec nfs = makeNfsFileVec(fname);
ParseSetup guessParseSetup = ParseSetup.guessSetup(new Key[]{nfs._key}, false, 1);
if (skippedColumns != null) {
guessParseSetup.setSkippedColumns(skippedColumns);
guessParseSetup.setParseColumnIndices(guessParseSetup.getNumberColumns(), skippedColumns);
}
return ParseDataset.parse(outputKey, new Key[]{nfs._key}, true, ParseSetup.guessSetup(new Key[]{nfs._key}, false, 1));
}
/**
* @deprecated use {@link #parseTestFile(Key, String)} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected Frame parse_test_file(Key outputKey, String fname) {
return parseTestFile(outputKey, fname);
}
public static Frame parseTestFile(Key outputKey, String fname) {
return parseTestFile(outputKey, fname, new int[]{});
}
/**
* @deprecated use {@link #parseTestFile(Key, String, int[])} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected Frame parse_test_file(Key outputKey, String fname, int[] skippedColumns) {
return parseTestFile(outputKey, fname, skippedColumns);
}
public static Frame parseTestFile(Key outputKey, String fname, int[] skippedColumns) {
return parseTestFile(outputKey, fname, null, skippedColumns);
}
/**
* @deprecated use {@link #parseTestFile(String, ParseSetupTransformer)} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected Frame parse_test_file(String fname, ParseSetupTransformer transformer) {
return parseTestFile(fname, transformer);
}
public static Frame parseTestFile(String fname, ParseSetupTransformer transformer) {
return parseTestFile(Key.make(), fname, transformer);
}
/**
* @deprecated use {@link #parseTestFile(String, ParseSetupTransformer, int[])} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected Frame parse_test_file(String fname, ParseSetupTransformer transformer, int[] skippedColumns) {
return parseTestFile(fname, transformer, skippedColumns);
}
public static Frame parseTestFile(String fname, ParseSetupTransformer transformer, int[] skippedColumns) {
return parseTestFile(Key.make(), fname, transformer, skippedColumns);
}
/**
* @deprecated use {@link #parseTestFile(Key, String, ParseSetupTransformer)} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected Frame parse_test_file(Key outputKey, String fname, ParseSetupTransformer transformer) {
return parseTestFile(outputKey, fname, transformer);
}
public static Frame parseTestFile(Key outputKey, String fname, ParseSetupTransformer transformer) {
return parseTestFile(outputKey, fname, transformer, null);
}
/**
* @deprecated use {@link #parseTestFile(Key outputKey, String fname, ParseSetupTransformer transformer, int[] skippedColumns)} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected Frame parse_test_file(Key outputKey, String fname, ParseSetupTransformer transformer, int[] skippedColumns) {
return parseTestFile(outputKey, fname, transformer, skippedColumns);
}
public static Frame parseTestFile(Key outputKey, String fname, ParseSetupTransformer transformer, int[] skippedColumns) {
NFSFileVec nfs = makeNfsFileVec(fname);
ParseSetup guessedSetup = ParseSetup.guessSetup(new Key[]{nfs._key}, false, ParseSetup.GUESS_HEADER);
if (skippedColumns != null) {
guessedSetup.setSkippedColumns(skippedColumns);
guessedSetup.setParseColumnIndices(guessedSetup.getNumberColumns(), skippedColumns);
}
if (transformer != null)
guessedSetup = transformer.transformSetup(guessedSetup);
return ParseDataset.parse(outputKey, new Key[]{nfs._key}, true, guessedSetup);
}
public static Frame parseTestFile(Key outputKey, String fname, ParseSetupTransformer transformer,
int[] skippedColumns, int psetup) {
NFSFileVec nfs = makeNfsFileVec(fname);
ParseSetup guessedSetup = ParseSetup.guessSetup(new Key[]{nfs._key}, false, psetup);
if (skippedColumns != null) {
guessedSetup.setSkippedColumns(skippedColumns);
guessedSetup.setParseColumnIndices(guessedSetup.getNumberColumns(), skippedColumns);
}
if (transformer != null)
guessedSetup = transformer.transformSetup(guessedSetup);
return ParseDataset.parse(outputKey, new Key[]{nfs._key}, true, guessedSetup);
}
/**
* @deprecated use {@link #parseTestFile(String fname, String na_string, int check_header, byte[] column_types)} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected Frame parse_test_file(String fname, String na_string, int check_header, byte[] column_types) {
return parseTestFile(fname, na_string, check_header, column_types);
}
public static Frame parseTestFile(String fname, String na_string, int check_header, byte[] column_types) {
return parseTestFile(fname, na_string, check_header, column_types, null, null);
}
/**
* @deprecated use {@link #parseTestFile(String fname, String na_string, int check_header, byte[] column_types, ParseSetupTransformer transformer)} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected Frame parse_test_file(String fname, String na_string, int check_header, byte[] column_types, ParseSetupTransformer transformer) {
return parseTestFile(fname, na_string, check_header, column_types, transformer);
}
public static Frame parseTestFile(String fname, String na_string, int check_header, byte[] column_types, ParseSetupTransformer transformer) {
return parseTestFile(fname, na_string, check_header, column_types, transformer, null);
}
/**
* @deprecated use {@link #parseTestFile(String fname, String na_string, int check_header, byte[] column_types, ParseSetupTransformer transformer, int[] skippedColumns)} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected Frame parse_test_file(String fname, String na_string, int check_header, byte[] column_types, ParseSetupTransformer transformer, int[] skippedColumns) {
return parseTestFile(fname, na_string, check_header, column_types, transformer, skippedColumns);
}
public static Frame parseTestFile(String fname, String na_string, int check_header, byte[] column_types, ParseSetupTransformer transformer, int[] skippedColumns) {
NFSFileVec nfs = makeNfsFileVec(fname);
Key[] res = {nfs._key};
// create new parseSetup in order to store our na_string
ParseSetup p = ParseSetup.guessSetup(res, new ParseSetup(DefaultParserProviders.GUESS_INFO, (byte) ',', false,
check_header, 0, null, null, null, null, null, null, null, false));
if (skippedColumns != null) {
p.setSkippedColumns(skippedColumns);
p.setParseColumnIndices(p.getNumberColumns(), skippedColumns);
}
// add the na_strings into p.
if (na_string != null) {
int column_number = p.getColumnTypes().length;
int na_length = na_string.length() - 1;
String[][] na_strings = new String[column_number][na_length + 1];
for (int index = 0; index < column_number; index++) {
na_strings[index][na_length] = na_string;
}
p.setNAStrings(na_strings);
}
if (column_types != null)
p.setColumnTypes(column_types);
if (transformer != null)
p = transformer.transformSetup(p);
return ParseDataset.parse(Key.make(), res, true, p);
}
public static Frame parseTestFile(String fname, String na_string, int check_header, byte[] column_types,
ParseSetupTransformer transformer, int[] skippedColumns, boolean force_col_types) {
NFSFileVec nfs = makeNfsFileVec(fname);
Key[] res = {nfs._key};
// create new parseSetup in order to store our na_string
ParseSetup p = ParseSetup.guessSetup(res, new ParseSetup(DefaultParserProviders.GUESS_INFO, (byte) ',', false,
check_header, 0, null, null, null, null, null, null, null, false));
if (skippedColumns != null) {
p.setSkippedColumns(skippedColumns);
p.setParseColumnIndices(p.getNumberColumns(), skippedColumns);
}
if (force_col_types) // only useful for parquet parsers here
p.setForceColTypes(true);
// add the na_strings into p.
if (na_string != null) {
int column_number = p.getColumnTypes().length;
int na_length = na_string.length() - 1;
String[][] na_strings = new String[column_number][na_length + 1];
for (int index = 0; index < column_number; index++) {
na_strings[index][na_length] = na_string;
}
p.setNAStrings(na_strings);
}
if (column_types != null)
p.setColumnTypes(column_types);
if (transformer != null)
p = transformer.transformSetup(p);
return ParseDataset.parse(Key.make(), res, true, p);
}
/**
* @deprecated use {@link #parseTestFolder(String)} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected Frame parse_test_folder(String fname) {
return parseTestFolder(fname);
}
/**
* Find & parse a folder of CSV files. NPE if file not found.
*
* @param fname Test filename
* @return Frame or NPE
*/
protected Frame parseTestFolder(String fname) {
return parseTestFolder(fname, null);
}
/**
* @deprecated use {@link #parseTestFolder(String, int[])} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected Frame parse_test_folder(String fname, int[] skippedColumns) {
return parseTestFolder(fname, skippedColumns);
}
/**
* Find & parse a folder of CSV files. NPE if file not found.
*
* @param fname Test filename
* @return Frame or NPE
*/
protected Frame parseTestFolder(String fname, int[] skippedColumns) {
File folder = FileUtils.locateFile(fname);
File[] files = contentsOf(fname, folder);
Arrays.sort(files);
ArrayList<Key> keys = new ArrayList<>();
for (File f : files)
if (f.isFile())
keys.add(NFSFileVec.make(f)._key);
Key[] res = new Key[keys.size()];
keys.toArray(res);
return ParseDataset.parse(skippedColumns, Key.make(), res);
}
/**
* @deprecated use {@link #parseTestFolder(String, String, int, byte[], ParseSetupTransformer)} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected Frame parse_test_folder(String fname, String na_string, int check_header, byte[] column_types,
ParseSetupTransformer transformer) {
return parseTestFolder(fname, na_string, check_header, column_types, transformer);
}
/**
* Parse a folder with csv files when a single na_string is specified.
*
* @param fname name of folder
* @param na_string string for NA in a column
* @return
*/
protected static Frame parseTestFolder(String fname, String na_string, int check_header, byte[] column_types,
ParseSetupTransformer transformer) {
return parseTestFolder(fname, na_string, check_header, column_types, transformer, null);
}
/**
* @deprecated use {@link #parseTestFolder(String, String, int, byte[], ParseSetupTransformer, int[])} instead
* <p>
* Will be removed at version 3.38.0.1
*/
@Deprecated
protected Frame parse_test_folder(String fname, String na_string, int check_header, byte[] column_types,
ParseSetupTransformer transformer, int[] skipped_columns) {
return parseTestFolder(fname, na_string, check_header, column_types, transformer, skipped_columns);
}
/**
* Parse a folder with csv files when a single na_string is specified.
*
* @param fname name of folder
* @param na_string string for NA in a column
* @return
*/
protected static Frame parseTestFolder(String fname, String na_string, int check_header, byte[] column_types,
ParseSetupTransformer transformer, int[] skipped_columns) {
File folder = FileUtils.locateFile(fname);
File[] files = contentsOf(fname, folder);
Arrays.sort(files);
ArrayList<Key> keys = new ArrayList<>();
for (File f : files)
if (f.isFile())
keys.add(NFSFileVec.make(f)._key);
Key[] res = new Key[keys.size()];
keys.toArray(res); // generated the necessary key here
// create new parseSetup in order to store our na_string
ParseSetup p = ParseSetup.guessSetup(res, new ParseSetup(DefaultParserProviders.GUESS_INFO, (byte) ',', true,
check_header, 0, null, null, null, null, null, null, null, false));
if (skipped_columns != null) {
p.setSkippedColumns(skipped_columns);
p.setParseColumnIndices(p.getNumberColumns(), skipped_columns);
}
// add the na_strings into p.
if (na_string != null) {
int column_number = p.getColumnTypes().length;
int na_length = na_string.length() - 1;
String[][] na_strings = new String[column_number][na_length + 1];
for (int index = 0; index < column_number; index++) {
na_strings[index][na_length] = na_string;
}
p.setNAStrings(na_strings);
}
if (column_types != null)
p.setColumnTypes(column_types);
if (transformer != null)
p = transformer.transformSetup(p);
return ParseDataset.parse(Key.make(), res, true, p);
}
public static class Frames {
public final Frame train;
public final Frame test;
public final Frame valid;
public Frames(Frame train, Frame test, Frame valid) {
this.train = train;
this.test = test;
this.valid = valid;
}
}
public static Frames split(Frame f) {
return split(f, 0.9, 0d);
}
public static Frames split(Frame f, double testFraction) {
return split(f, testFraction, 0);
}
public static Frames split(Frame f, double testFraction, double validFraction) {
double[] fractions;
double trainFraction = 1d - testFraction - validFraction;
if (validFraction > 0d) {
fractions = new double[]{trainFraction, testFraction, validFraction};
} else {
fractions = new double[]{trainFraction, testFraction};
}
SplitFrame sf = new SplitFrame(f, fractions, null);
sf.exec().get();
Key<Frame>[] splitKeys = sf._destination_frames;
Frame trainFrame = Scope.track(splitKeys[0].get());
Frame testFrame = Scope.track(splitKeys[1].get());
Frame validFrame = (validFraction > 0d) ? Scope.track(splitKeys[2].get()) : null;
return new Frames(trainFrame, testFrame, validFrame);
}
/**
* A Numeric Vec from an array of ints
*
* @param rows Data
* @return The Vec
*/
public static Vec vec(int... rows) {
return vec(null, rows);
}
/**
* A Categorical/Factor Vec from an array of ints - with categorical/domain mapping
*
* @param domain Categorical/Factor names, mapped by the data values
* @param rows Data
* @return The Vec
*/
public static Vec vec(String[] domain, int... rows) {
Key<Vec> k = Vec.VectorGroup.VG_LEN1.addVec();
Futures fs = new Futures();
AppendableVec avec = new AppendableVec(k, Vec.T_NUM);
avec.setDomain(domain);
NewChunk chunk = new NewChunk(avec, 0);
for (int r : rows) chunk.addNum(r);
chunk.close(0, fs);
Vec vec = avec.layout_and_close(fs);
fs.blockForPending();
return vec;
}
/**
* A numeric Vec from an array of ints
*/
public static Vec ivec(int... rows) {
return vec(null, rows);
}
/**
* A categorical Vec from an array of strings
*/
public static Vec cvec(String... rows) {
return cvec(null, rows);
}
public static Vec cvec(String[] domain, String... rows) {
HashMap<String, Integer> domainMap = new HashMap<>(10);
ArrayList<String> domainList = new ArrayList<>(10);
if (domain != null) {
int j = 0;
for (String s : domain) {
domainMap.put(s, j++);
domainList.add(s);
}
}
int[] irows = new int[rows.length];
for (int i = 0, j = 0; i < rows.length; i++) {
String s = rows[i];
if (!domainMap.containsKey(s)) {
domainMap.put(s, j++);
domainList.add(s);
}
irows[i] = domainMap.get(s);
}
return vec(domainList.toArray(new String[]{}), irows);
}
/**
* A numeric Vec from an array of doubles
*/
public static Vec dvec(double... rows) {
Key<Vec> k = Vec.VectorGroup.VG_LEN1.addVec();
Futures fs = new Futures();
AppendableVec avec = new AppendableVec(k, Vec.T_NUM);
NewChunk chunk = new NewChunk(avec, 0);
for (double r : rows)
chunk.addNum(r);
chunk.close(0, fs);
Vec vec = avec.layout_and_close(fs);
fs.blockForPending();
return vec;
}
/**
* A time Vec from an array of ints
*/
public static Vec tvec(int... rows) {
Key<Vec> k = Vec.VectorGroup.VG_LEN1.addVec();
Futures fs = new Futures();
AppendableVec avec = new AppendableVec(k, Vec.T_TIME);
NewChunk chunk = new NewChunk(avec, 0);
for (int r : rows)
chunk.addNum(r);
chunk.close(0, fs);
Vec vec = avec.layout_and_close(fs);
fs.blockForPending();
return vec;
}
/**
* A string Vec from an array of strings
*/
public static Vec svec(String... rows) {
Key<Vec> k = Vec.VectorGroup.VG_LEN1.addVec();
Futures fs = new Futures();
AppendableVec avec = new AppendableVec(k, Vec.T_STR);
NewChunk chunk = new NewChunk(avec, 0);
for (String r : rows)
chunk.addStr(r);
chunk.close(0, fs);
Vec vec = avec.layout_and_close(fs);
fs.blockForPending();
return vec;
}
/**
* A string Vec from an array of strings
*/
public static Vec uvec(UUID... rows) {
Key<Vec> k = Vec.VectorGroup.VG_LEN1.addVec();
Futures fs = new Futures();
AppendableVec avec = new AppendableVec(k, Vec.T_UUID);
NewChunk chunk = new NewChunk(avec, 0);
for (UUID r : rows)
chunk.addUUID(r);
chunk.close(0, fs);
Vec vec = avec.layout_and_close(fs);
fs.blockForPending();
return vec;
}
// Shortcuts for initializing constant arrays
public static String[] ar(String... a) {
return a;
}
public static String[][] ar(String[]... a) {
return a;
}
public static byte[] ar(byte... a) {
return a;
}
public static long[] ar(long... a) {
return a;
}
public static long[][] ar(long[]... a) {
return a;
}
public static int[] ari(int... a) {
return a;
}
public static int[][] ar(int[]... a) {
return a;
}
public static float[] arf(float... a) {
return a;
}
public static double[] ard(double... a) {
return a;
}
public static double[][] ard(double[]... a) {
return a;
}
public static double[][] ear(double... a) {
double[][] r = new double[a.length][1];
for (int i = 0; i < a.length; i++) r[i][0] = a[i];
return r;
}
// Java7+ @SafeVarargs
public static <T> T[] aro(T... a) {
return a;
}
// ==== Comparing Results ====
public static void assertFrameEquals(Frame expected, Frame actual, double absDelta) {
assertFrameEquals(expected, actual, absDelta, null);
}
public static void assertFrameEquals(Frame expected, Frame actual, Double absDelta, Double relativeDelta) {
assertEquals("Frames have different number of vecs. ", expected.vecs().length, actual.vecs().length);
for (int i = 0; i < expected.vecs().length; i++) {
if (expected.vec(i).isString())
assertStringVecEquals(expected.vec(i), actual.vec(i));
else
assertVecEquals(i + "/" + expected._names[i] + " ", expected.vec(i), actual.vec(i), absDelta, relativeDelta);
}
}
public static void assertVecEquals(Vec expecteds, Vec actuals, double delta) {
assertVecEquals("", expecteds, actuals, delta);
}
public static void assertVecEquals(Vec expecteds, Vec actuals, double delta, double relativeDelta) {
assertVecEquals("", expecteds, actuals, delta, relativeDelta);
}
public static void assertVecEquals(String messagePrefix, Vec expecteds, Vec actuals, double delta) {
assertVecEquals(messagePrefix, expecteds, actuals, delta, null);
}
public static void assertVecEquals(String messagePrefix, Vec expecteds, Vec actuals, Double absDelta, Double relativeDelta) {
assertEquals(expecteds.length(), actuals.length());
for (int i = 0; i < expecteds.length(); i++) {
final String message = messagePrefix + i + ": " + expecteds.at(i) + " != " + actuals.at(i) + ", chunkIds = " + expecteds.elem2ChunkIdx(i) + ", " + actuals.elem2ChunkIdx(i) + ", row in chunks = " + (i - expecteds.chunkForRow(i).start()) + ", " + (i - actuals.chunkForRow(i).start());
double expectedVal = expecteds.at(i);
double actualVal = actuals.at(i);
assertEquals(message, expectedVal, actualVal, computeAssertionDelta(expectedVal, absDelta, relativeDelta));
}
}
private static double computeAssertionDelta(double expectedVal, Double absDelta, Double relDelta) {
if ((absDelta == null || absDelta.isNaN()) && (relDelta == null || relDelta.isNaN())) {
throw new IllegalArgumentException("Either absolute or relative delta has to be non-null and non-NaN");
} else if (relDelta == null || relDelta.isNaN()) {
return absDelta;
} else {
double computedRelativeDelta;
double deltaBase = Math.abs(expectedVal);
if (deltaBase == 0) {
computedRelativeDelta = relDelta;
} else {
computedRelativeDelta = deltaBase * relDelta;
}
if (absDelta == null || absDelta.isNaN()) {
return computedRelativeDelta;
} else {
// use the bigger delta for the assert
return Math.max(computedRelativeDelta, absDelta);
}
}
}
public static void assertUUIDVecEquals(Vec expecteds, Vec actuals) {
assertEquals(expecteds.length(), actuals.length());
assertEquals("Vec types match", expecteds.get_type_str(), actuals.get_type_str());
for (int i = 0; i < expecteds.length(); i++) {
UUID expected = new UUID(expecteds.at16l(i), expecteds.at16h(i));
UUID actual = new UUID(actuals.at16l(i), actuals.at16h(i));
final String message = i + ": " + expected + " != " + actual + ", chunkIds = " + expecteds.elem2ChunkIdx(i) + ", " + actuals.elem2ChunkIdx(i) + ", row in chunks = " + (i - expecteds.chunkForRow(i).start()) + ", " + (i - actuals.chunkForRow(i).start());
assertEquals(message, expected, actual);
}
}
private static String toStr(BufferedString bs) {
return bs != null ? bs.toString() : null;
}
public static void assertStringVecEquals(Vec expecteds, Vec actuals) {
assertEquals(expecteds.length(), actuals.length());
assertEquals("Vec types match", expecteds.get_type_str(), actuals.get_type_str());
for (int i = 0; i < expecteds.length(); i++) {
String expected = toStr(expecteds.atStr(new BufferedString(), i));
String actual = toStr(actuals.atStr(new BufferedString(), i));
final String message = i + ": " + expected + " != " + actual + ", chunkIds = " + expecteds.elem2ChunkIdx(i) + ", " + actuals.elem2ChunkIdx(i) + ", row in chunks = " + (i - expecteds.chunkForRow(i).start()) + ", " + (i - actuals.chunkForRow(i).start());
assertEquals(message, expected, actual);
}
}
private static String getFactorAsString(Vec v, long row) {
return v.isNA(row) ? null : v.factor((long) v.at(row));
}
public static void assertCatVecEquals(Vec expecteds, Vec actuals) {
assertEquals(expecteds.length(), actuals.length());
assertEquals("Vec types match", expecteds.get_type_str(), actuals.get_type_str());
for (int i = 0; i < expecteds.length(); i++) {
String expected = getFactorAsString(expecteds, i);
String actual = getFactorAsString(actuals, i);
final String message = i + ": " + expected + " != " + actual + ", chunkIds = " + expecteds.elem2ChunkIdx(i) + ", " + actuals.elem2ChunkIdx(i) + ", row in chunks = " + (i - expecteds.chunkForRow(i).start()) + ", " + (i - actuals.chunkForRow(i).start());
assertEquals(message, expected, actual);
}
}
public static void assertTwoDimTableEquals(TwoDimTable expected, TwoDimTable actual) {
assertEquals("tableHeader different", expected.getTableHeader(), actual.getTableHeader());
assertEquals("tableDescriptionDifferent", expected.getTableDescription(), actual.getTableDescription());
assertArrayEquals("rowHeaders different", expected.getRowHeaders(), actual.getRowHeaders());
assertArrayEquals("colHeaders different", expected.getColHeaders(), actual.getColHeaders());
assertArrayEquals("colTypes different", expected.getColTypes(), actual.getColTypes());
assertArrayEquals("colFormats different", expected.getColFormats(), actual.getColFormats());
assertEquals("colHeaderForRowHeaders different", expected.getColHeaderForRowHeaders(), actual.getColHeaderForRowHeaders());
for (int r = 0; r < expected.getRowDim(); r++) {
for (int c = 0; c < expected.getColDim(); c++) {
Object ex = expected.get(r, c);
Object act = actual.get(r, c);
assertEquals("cellValues different at row " + r + ", col " + c, ex, act);
}
}
}
public static void checkStddev(double[] expected, double[] actual, double threshold) {
for (int i = 0; i < actual.length; i++)
assertEquals(expected[i], actual[i], threshold);
}
public static void checkIcedArrays(IcedWrapper[][] expected, IcedWrapper[][] actual, double threshold) {
for (int i = 0; i < actual.length; i++)
for (int j = 0; j < actual[0].length; j++)
assertEquals(expected[i][j].d, actual[i][j].d, threshold);
}
public static boolean[] checkEigvec(double[][] expected, double[][] actual, double threshold) {
int nfeat = actual.length;
int ncomp = actual[0].length;
boolean[] flipped = new boolean[ncomp];
for (int j = 0; j < ncomp; j++) {
// flipped[j] = Math.abs(expected[0][j] - actual[0][j]) > threshold;
flipped[j] = Math.abs(expected[0][j] - actual[0][j]) > Math.abs(expected[0][j] + actual[0][j]);
for (int i = 0; i < nfeat; i++) {
assertEquals(expected[i][j], flipped[j] ? -actual[i][j] : actual[i][j], threshold);
}
}
return flipped;
}
public static boolean[] checkEigvec(double[][] expected, TwoDimTable actual, double threshold) {
int nfeat = actual.getRowDim();
int ncomp = actual.getColDim();
boolean[] flipped = new boolean[ncomp];
for (int j = 0; j < ncomp; j++) {
flipped[j] = Math.abs(expected[0][j] - (double) actual.get(0, j)) > threshold;
for (int i = 0; i < nfeat; i++) {
assertEquals(expected[i][j], flipped[j] ? -(double) actual.get(i, j) : (double) actual.get(i, j), threshold);
}
}
return flipped;
}
public static boolean equalTwoArrays(double[] array1, double[] array2, double tol) {
assert array1.length == array2.length : "Arrays have different lengths";
for (int index = 0; index < array1.length; index++) {
double diff = Math.abs(array1[index] - array2[index])/Math.max(Math.abs(array1[index]), Math.abs(array2[index]));
if (diff > tol)
return false;
}
return true;
}
public static class StandardizeColumns extends MRTask<StandardizeColumns> {
int[] _columns2Transform;
double[] _colMeans;
double[] _oneOStd;
public StandardizeColumns(int[] cols, double[] colMeans, double[] oneOSigma,
Frame transF) {
assert cols.length == colMeans.length;
assert colMeans.length == oneOSigma.length;
_columns2Transform = cols;
_colMeans = colMeans;
_oneOStd = oneOSigma;
int numCols = transF.numCols();
for (int cindex : cols) { // check to make sure columns are numerical
assert transF.vec(cindex).isNumeric();
}
}
@Override
public void map(Chunk[] chks) {
int chunkLen = chks[0].len();
int colCount = 0;
for (int cindex : _columns2Transform) {
for (int rindex = 0; rindex < chunkLen; rindex++) {
double temp = (chks[cindex].atd(rindex) - _colMeans[colCount]) * _oneOStd[colCount];
chks[cindex].set(rindex, temp);
}
colCount += 1;
}
}
}
public static boolean equalTwoHashMaps(HashMap<String, Double> coeff1, HashMap<String, Double> coeff2, double tol) {
assert coeff1.size() == coeff2.size() : "HashMap sizes are differenbt";
for (String key : coeff1.keySet()) {
if (Math.abs(coeff1.get(key) - coeff2.get(key)) > tol)
return false;
}
return true;
}
public static boolean equalTwoDimTables(TwoDimTable tab1, TwoDimTable tab2, double tol) {
boolean same = true;
//compare colHeaders
same = Arrays.equals(tab1.getColHeaders(), tab2.getColHeaders()) &&
Arrays.equals(tab1.getColTypes(), tab2.getColTypes());
String[] colTypes = tab2.getColTypes();
IcedWrapper[][] cellValues1 = tab1.getCellValues();
IcedWrapper[][] cellValues2 = tab2.getCellValues();
same = same && cellValues1.length == cellValues2.length;
if (!same)
return false;
// compare cell values
for (int cindex = 0; cindex < cellValues1.length; cindex++) {
same = same && cellValues1[cindex].length == cellValues2[cindex].length;
if (!same)
return false;
for (int index = 0; index < cellValues1[cindex].length; index++) {
if (colTypes[index].equals("double")) {
same = same && Math.abs(Double.parseDouble(cellValues1[cindex][index].toString()) - Double.parseDouble(cellValues2[cindex][index].toString())) < tol;
} else {
same = same && cellValues1[cindex][index].toString().equals(cellValues2[cindex][index].toString());
}
}
}
return same;
}
public static boolean[] checkEigvec(TwoDimTable expected, TwoDimTable actual, double threshold) {
int nfeat = actual.getRowDim();
int ncomp = actual.getColDim();
boolean[] flipped = new boolean[ncomp];
// better way to get sign
for (int j = 0; j < ncomp; j++) {
for (int i = 0; i < nfeat; i++) {
if (Math.abs((Double) expected.get(i, j)) > 0.0 && Math.abs((Double) actual.get(i, j)) > 0.0) { // only non zeros
flipped[j] = !(Math.signum((Double) expected.get(i, j)) == Math.signum((Double) actual.get(i, j)));
break;
}
}
}
for (int j = 0; j < ncomp; j++) {
for (int i = 0; i < nfeat; i++) {
assertEquals((double) expected.get(i, j), flipped[j] ? -(double) actual.get(i, j) : (double) actual.get(i, j), threshold);
}
}
return flipped;
}
public static boolean[] checkProjection(Frame expected, Frame actual, double threshold, boolean[] flipped) {
assertEquals("Number of columns", expected.numCols(), actual.numCols());
assertEquals("Number of columns in flipped", expected.numCols(), flipped.length);
int nfeat = (int) expected.numRows();
int ncomp = expected.numCols();
for (int j = 0; j < ncomp; j++) {
Vec.Reader vexp = expected.vec(j).new Reader();
Vec.Reader vact = actual.vec(j).new Reader();
assertEquals(vexp.length(), vact.length());
for (int i = 0; i < nfeat; i++) {
if (vexp.isNA(i) || vact.isNA(i)) {
continue;
}
// only perform comparison when data is not NAN
assertEquals(vexp.at8(i), flipped[j] ? -vact.at8(i) : vact.at8(i), threshold);
}
}
return flipped;
}
// Run tests from cmd-line since testng doesn't seem to be able to it.
public static void main(String[] args) {
H2O.main(new String[0]);
for (String arg : args) {
try {
System.out.println("=== Starting " + arg);
Class<?> clz = Class.forName(arg);
Method main = clz.getDeclaredMethod("main");
main.invoke(null);
} catch (InvocationTargetException ite) {
Throwable e = ite.getCause();
e.printStackTrace();
try {
Thread.sleep(100);
} catch (Exception ignore) {
}
} catch (Exception e) {
e.printStackTrace();
try {
Thread.sleep(100);
} catch (Exception ignore) {
}
} finally {
System.out.println("=== Stopping " + arg);
}
}
try {
Thread.sleep(100);
} catch (Exception ignore) {
}
if (args.length != 0)
UDPRebooted.T.shutdown.send(H2O.SELF);
}
protected static class Cmp1 extends MRTask<Cmp1> {
final double _epsilon;
final String _messagePrefix;
public Cmp1(double epsilon) {
_epsilon = epsilon;
_messagePrefix = "";
}
public Cmp1(double epsilon, String msg) {
_epsilon = epsilon;
_messagePrefix = msg + " ";
}
public boolean _unequal;
public String _message;
@Override
public void map(Chunk chks[]) {
for (int cols = 0; cols < chks.length >> 1; cols++) {
Chunk c0 = chks[cols];
Chunk c1 = chks[cols + (chks.length >> 1)];
for (int rows = 0; rows < chks[0]._len; rows++) {
String msgBase = _messagePrefix + "At [" + rows + ", " + cols + "]: ";
if (c0.isNA(rows) != c1.isNA(rows)) {
_unequal = true;
_message = msgBase + "c0.isNA " + c0.isNA(rows) + " != c1.isNA " + c1.isNA(rows);
return;
} else if (!(c0.isNA(rows) && c1.isNA(rows))) {
if (c0 instanceof C16Chunk && c1 instanceof C16Chunk) {
long lo0 = c0.at16l(rows), lo1 = c1.at16l(rows);
long hi0 = c0.at16h(rows), hi1 = c1.at16h(rows);
if (lo0 != lo1 || hi0 != hi1) {
_unequal = true;
_message = msgBase + " lo0 " + lo0 + " != lo1 " + lo1 + " || hi0 " + hi0 + " != hi1 " + hi1;
return;
}
} else if (c0 instanceof CStrChunk && c1 instanceof CStrChunk) {
BufferedString s0 = new BufferedString(), s1 = new BufferedString();
c0.atStr(s0, rows);
c1.atStr(s1, rows);
if (s0.compareTo(s1) != 0) {
_unequal = true;
_message = msgBase + " s0 " + s0 + " != s1 " + s1;
return;
}
} else if ((c0 instanceof C8Chunk) && (c1 instanceof C8Chunk)) {
long d0 = c0.at8(rows), d1 = c1.at8(rows);
if (d0 != d1) {
_unequal = true;
_message = msgBase + " d0 " + d0 + " != d1 " + d1;
return;
}
} else {
double d0 = c0.atd(rows), d1 = c1.atd(rows);
double cmpValue = ((d0 == 0.0) || (d1 == 0.0)) ? 1.0 : Math.abs(d0) + Math.abs(d1);
if (!(Math.abs(d0 - d1) <= cmpValue * _epsilon)) {
_unequal = true;
_message = msgBase + " d0 " + d0 + " != d1 " + d1;
return;
}
}
}
}
}
}
@Override
public void reduce(Cmp1 cmp) {
if (_unequal) return;
if (cmp._unequal) {
_unequal = true;
_message = cmp._message;
}
}
}
public static void assertFrameAssertion(FrameAssertion frameAssertion) {
int[] dim = frameAssertion.dim;
Frame frame = null;
try {
frame = frameAssertion.prepare();
assertEquals("Frame has to have expected number of columns", dim[0], frame.numCols());
assertEquals("Frame has to have expected number of rows", dim[1], frame.numRows());
frameAssertion.check(frame);
} finally {
frameAssertion.done(frame);
if (frame != null)
frame.delete();
}
}
public static abstract class FrameAssertion {
protected final String file;
private final int[] dim; // columns X rows
public FrameAssertion(String file, int[] dim) {
this.file = file;
this.dim = dim;
}
public Frame prepare() {
return parseTestFile(file);
}
public void done(Frame frame) {
}
public void check(Frame frame) {
}
public final int nrows() {
return dim[1];
}
public final int ncols() {
return dim[0];
}
}
public static abstract class GenFrameAssertion extends FrameAssertion {
public GenFrameAssertion(String file, int[] dim) {
this(file, dim, null);
}
public GenFrameAssertion(String file, int[] dim, ParseSetupTransformer psTransformer) {
super(file, dim);
this.psTransformer = psTransformer;
}
protected File generatedFile;
protected ParseSetupTransformer psTransformer;
protected abstract File prepareFile() throws IOException;
@Override
public Frame prepare() {
try {
File f = generatedFile = prepareFile();
System.out.println("File generated into: " + f.getCanonicalPath());
if (f.isDirectory()) {
return parseTestFolder(f.getCanonicalPath(), null, ParseSetup.HAS_HEADER, null, psTransformer);
} else {
return parseTestFile(f.getCanonicalPath(), psTransformer);
}
} catch (IOException e) {
throw new RuntimeException("Cannot prepare test frame from file: " + file, e);
}
}
@Override
public void done(Frame frame) {
if (generatedFile != null) {
generatedFile.deleteOnExit();
org.apache.commons.io.FileUtils.deleteQuietly(generatedFile);
}
}
}
public static class Datasets {
public static Frame iris() {
return parseTestFile(Key.make("iris.hex"), "smalldata/iris/iris_wheader.csv");
}
}
/**
* Tests can hook into the parse process using this interface and modify some of the guessed parameters.
* This simplifies the test workflow as usually most of the guessed parameters are correct and the test really only
* needs to modify/add few parameters.
*/
public interface ParseSetupTransformer {
ParseSetup transformSetup(ParseSetup guessedSetup);
}
/**
* @param frame
* @param columnName column's name to be factorized
* @return Frame with factorized column
*/
public static Frame asFactor(Frame frame, String columnName) {
Vec vec = frame.vec(columnName);
frame.replace(frame.find(columnName), vec.toCategoricalVec());
vec.remove();
DKV.put(frame);
return frame;
}
public static void printOutFrameAsTable(Frame fr) {
printOutFrameAsTable(fr, false, fr.numRows());
}
public static void printOutFrameAsTable(Frame fr, boolean rollups, long limit) {
assert limit <= Integer.MAX_VALUE;
TwoDimTable twoDimTable = fr.toTwoDimTable(0, (int) limit, rollups);
System.out.println(twoDimTable.toString(2, true));
}
public void printOutColumnsMetadata(Frame fr) {
for (String header : fr.toTwoDimTable().getColHeaders()) {
String type = fr.vec(header).get_type_str();
int cardinality = fr.vec(header).cardinality();
System.out.println(header + " - " + type + String.format("; Cardinality = %d", cardinality));
}
}
protected static RowData toRowData(Frame fr, String[] columns, long row) {
RowData rd = new RowData();
for (String col : columns) {
Vec v = fr.vec(col);
if (!v.isNumeric() && !v.isCategorical()) {
throw new UnsupportedOperationException("Unsupported column type for column '" + col + "': " + v.get_type_str());
}
if (!v.isNA(row)) {
Object val;
if (v.isCategorical()) {
val = v.domain()[(int) v.at8(row)];
} else {
val = v.at(row);
}
rd.put(col, val);
}
}
return rd;
}
protected static double[] toNumericRow(Frame fr, long row) {
double[] result = new double[fr.numCols()];
for (int i = 0; i < result.length; i++) {
result[i] = fr.vec(i).at(row);
}
return result;
}
/**
* Compares two frames. Two frames are equal if and only if they contain the same number of columns, rows,
* and values at each cell (coordinate) are the same. Column names are ignored, as well as chunks sizes and all other
* aspects besides those explicitly mentioned.
*
* @param f1 Frame to be compared, not null
* @param f2 Frame to be compared, not null
* @param delta absolute tolerance
* @return True if frames are the same up to tolerance - number of columns, rows & values at each cell.
* @throws AssertionError If any inequalities are found
* @throws IllegalArgumentException If input frames don't have the same column and row count
*/
public static boolean compareFrames(final Frame f1, final Frame f2, double delta) {
return compareFrames(f1, f2, delta, 0.0);
}
/**
* Compares two frames. Two frames are equal if and only if they contain the same number of columns, rows,
* and values at each cell (coordinate) are the same. Column names are ignored, as well as chunks sizes and all other
* aspects besides those explicitly mentioned.
*
* @param f1 Frame to be compared, not null
* @param f2 Frame to be compared, not null
* @param delta absolute tolerance
* @param relativeDelta relative tolerance
* @return True if frames are the same up to tolerance - number of columns, rows & values at each cell.
* @throws AssertionError If any inequalities are found
* @throws IllegalArgumentException If input frames don't have the same column and row count
*/
public static boolean compareFrames(final Frame f1, final Frame f2, double delta, double relativeDelta) {
Objects.requireNonNull(f1);
Objects.requireNonNull(f2);
if (f1.numCols() != f2.numCols())
throw new IllegalArgumentException(String.format("Number of columns is not the same: {%o, %o}",
f1.numCols(), f2.numCols()));
if (f1.numRows() != f2.numRows())
throw new IllegalArgumentException(String.format("Number of rows is not the same: {%o, %o}",
f1.numRows(), f2.numRows()));
for (int vecNum = 0; vecNum < f1.numCols(); vecNum++) {
final Vec f1Vec = f1.vec(vecNum);
final Vec f2Vec = f2.vec(vecNum);
assertVecEquals(f1Vec, f2Vec, delta, relativeDelta);
}
return true;
}
public static final String[] ignoredColumns(final Frame frame, final String... usedColumns) {
Set<String> ignored = new HashSet(Arrays.asList(frame.names()));
ignored.removeAll(Arrays.asList(usedColumns));
return ignored.toArray(new String[ignored.size()]);
}
public static boolean compareFrames(final Frame f1, final Frame f2) throws IllegalStateException {
return compareFrames(f1, f2, 0);
}
/**
* Sets a locale cluster-wide. Consider returning it back to the default value.
*
* @param locale Locale to set to the whole cluster
*/
public static void setLocale(final Locale locale) {
new ChangeLocaleTsk(locale)
.doAllNodes();
}
private static class ChangeLocaleTsk extends MRTask<ChangeLocaleTsk> {
private final Locale _locale;
public ChangeLocaleTsk(Locale locale) {
this._locale = locale;
}
@Override
protected void setupLocal() {
Locale.setDefault(_locale);
}
}
/**
* Converts a H2OFrame to a csv file for debugging purposes.
*
* @param fileNameWithPath: String containing filename with path that will contain the H2O Frame
* @param h2oframe: H2O Frame to be saved as CSV file.
* @param header: boolean to decide if column names should be saved. Set to false if don't care.
* @param hex_string: boolean to decide if the double values are written in hex. Set to false if don't care.
* @throws IOException
*/
public static void writeFrameToCSV(String fileNameWithPath, Frame h2oframe, boolean header, boolean hex_string)
throws IOException {
Frame.CSVStreamParams params = new Frame.CSVStreamParams()
.setHeaders(header)
.setHexString(hex_string);
File targetFile = new File(fileNameWithPath);
byte[] buffer = new byte[1 << 20];
int bytesRead;
try (InputStream frameToStream = h2oframe.toCSV(params);
OutputStream outStream = new FileOutputStream(targetFile)) {
while ((bytesRead = frameToStream.read(buffer)) > 0) { // for our toCSV stream, return 0 as EOF, not -1
outStream.write(buffer, 0, bytesRead);
}
}
}
/**
* @param len Length of the resulting vector
* @param randomSeed Seed for the random generator (for reproducibility)
* @return An instance of {@link Vec} with binary weights (either 0.0D or 1.0D, nothing in between).
*/
public static Vec createRandomBinaryWeightsVec(final long len, final long randomSeed) {
final Vec weightsVec = Vec.makeZero(len, Vec.T_NUM);
final Random random = RandomUtils.getRNG(randomSeed);
for (int i = 0; i < weightsVec.length(); i++) {
weightsVec.set(i, random.nextBoolean() ? 1.0D : 0D);
}
return weightsVec;
}
/**
* @param len Length of the resulting vector
* @param randomSeed Seed for the random generator (for reproducibility)
* @return An instance of {@link Vec} with random double values
*/
public static Vec createRandomDoubleVec(final long len, final long randomSeed) {
final Vec vec = Vec.makeZero(len, Vec.T_NUM);
final Random random = RandomUtils.getRNG(randomSeed);
for (int i = 0; i < vec.length(); i++) {
vec.set(i, random.nextDouble());
}
return vec;
}
/**
* @param len Length of the resulting vector
* @param randomSeed Seed for the random generator (for reproducibility)
* @return An instance of {@link Vec} with random categorical values
*/
public static Vec createRandomCategoricalVec(final long len, final long randomSeed) {
String[] domain = new String[100];
for (int i = 0; i < domain.length; i++) domain[i] = "CAT_" + i;
final Vec vec = Scope.track(Vec.makeZero(len, Vec.T_NUM)).makeZero(domain);
final Random random = RandomUtils.getRNG(randomSeed);
for (int i = 0; i < vec.length(); i++) {
vec.set(i, random.nextInt(domain.length));
}
return vec;
}
@SuppressWarnings("rawtypes")
public static GenModel toMojo(Model model, String testName, boolean readModelMetaData) {
final String filename = testName + ".zip";
StreamingSchema ss = new StreamingSchema(model.getMojo(), filename);
try (FileOutputStream os = new FileOutputStream(ss.getFilename())) {
ss.getStreamWriter().writeTo(os);
} catch (IOException e) {
throw new IllegalStateException("MOJO writing failed", e);
}
try {
MojoReaderBackend cr = MojoReaderBackendFactory.createReaderBackend(filename);
return ModelMojoReader.readFrom(cr, readModelMetaData);
} catch (IOException e) {
throw new IllegalStateException("MOJO loading failed", e);
} finally {
boolean deleted = new File(filename).delete();
if (!deleted) Log.warn("Failed to delete the file");
}
}
public static boolean isCI() {
return System.getProperty("user.name").equals("jenkins");
}
public static <T extends Keyed<T>> void assertInDKV(Key<T> key, T object) {
assertEquals(key, object._key);
T dkvObject = DKV.getGet(key);
assertNotNull(dkvObject);
assertEquals(object.checksum(true), dkvObject.checksum(true));
}
public static Vec transformVec(Vec vec, Function<Double, Double> transform) {
new MRTask() {
@Override
public void map(Chunk c) {
for (int i = 0; i < c._len; i++) {
if (c.isNA(i))
continue;
c.set(i, transform.apply(c.atd(i)));
}
}
}.doAll(vec);
return vec;
}
/**
* Debugging-only function that lets the developer open Flow (or R/Py) during execution of a junit test
* and inspect the model.
*/
@SuppressWarnings("unused")
@Deprecated // just to make it noticeable in IDE
public static void browser() {
if (isCI()) {
throw new IllegalStateException("Never leave browser() calls in committed source code - only for debugging");
}
File root = new File(".");
while (!new File(root, "h2o-core").isDirectory()) {
root = new File(root, "..");
}
H2O.registerResourceRoot(new File(root, "h2o-web/src/main/resources/www"));
H2O.registerResourceRoot(new File(root, "h2o-core/src/main/resources/www"));
String message = "Open H2O Flow in your web browser: ";
System.err.println(message + H2O.getURL(NetworkInit.h2oHttpView.getScheme()));
while (!H2O.getShutdownRequested()) {
try {
Thread.sleep(60 * 1000);
System.err.println("Still waiting for H2O to shutdown");
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/TestUtilTest.java
|
package water;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import water.fvec.Frame;
import water.fvec.TestFrameBuilder;
import water.fvec.Vec;
import java.io.File;
import java.io.IOException;
import static org.junit.Assert.assertTrue;
/**
* We need to make sure that our tools for testing are reliable as well
*/
public class TestUtilTest extends TestUtil {
@Rule
public transient TemporaryFolder tmp = new TemporaryFolder();
@BeforeClass public static void setup() {
stall_till_cloudsize(1);
}
@Test
public void asFactor() {
Scope.enter();
try {
Frame fr = new TestFrameBuilder()
.withName("testFrame")
.withColNames("ColA")
.withVecTypes(Vec.T_STR)
.withDataForCol(0, ar("yes", "no"))
.build();
Scope.track(fr);
assertTrue(fr.vec(0).isString());
Frame res = asFactor(fr, "ColA");
assertTrue(res.vec(0).isCategorical());
Scope.track(res);
} finally {
Scope.exit();
}
}
@Test
public void writeFrameToCSV() throws IOException {
Assume.assumeTrue(H2O.getCloudSize() == 1);
try {
Scope.enter();
Frame f = TestFrameCatalog.oneChunkFewRows();
f.remove("col_3");
File exportedFile = new File(tmp.getRoot(), "export.csv");
writeFrameToCSV(exportedFile.getAbsolutePath(), f, true, false);
Frame imported = Scope.track(parseTestFile(exportedFile.getAbsolutePath()));
assertFrameEquals(f, imported, 0);
} finally {
Scope.exit();
}
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/fvec/FVecFactory.java
|
package water.fvec;
import water.*;
import water.util.StringUtils;
public class FVecFactory {
public static Key makeByteVec(Key k, String... data) {
byte [][] chunks = new byte[data.length][];
long [] espc = new long[data.length+1];
for(int i = 0; i < chunks.length; ++i){
chunks[i] = StringUtils.bytesOf(data[i]);
espc[i+1] = espc[i] + data[i].length();
}
Futures fs = new Futures();
Key key = Vec.newKey();
ByteVec bv = new ByteVec(key,Vec.ESPC.rowLayout(key,espc));
for(int i = 0; i < chunks.length; ++i){
Key chunkKey = bv.chunkKey(i);
DKV.put(chunkKey, new Value(chunkKey,chunks[i].length,chunks[i], TypeMap.C1NCHUNK,Value.ICE),fs);
}
DKV.put(bv._key,bv,fs);
Frame fr = new Frame(k,new String[]{"makeByteVec"},new Vec[]{bv});
DKV.put(k, fr, fs);
fs.blockForPending();
return k;
}
public static Key makeByteVec(String... data) {
Futures fs = new Futures();
long[] espc = new long[data.length+1];
for( int i = 0; i < data.length; ++i ) espc[i+1] = espc[i]+data[i].length();
Key k = Vec.newKey();
ByteVec bv = new ByteVec(k,Vec.ESPC.rowLayout(k,espc));
DKV.put(k,bv,fs);
for( int i = 0; i < data.length; ++i ) {
Key ck = bv.chunkKey(i);
DKV.put(ck, new Value(ck,new C1NChunk(StringUtils.bytesOf(data[i]))),fs);
}
fs.blockForPending();
return k;
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/fvec/FrameTestUtil.java
|
package water.fvec;
import hex.CreateFrame;
import org.junit.Assert;
import org.junit.Ignore;
import water.DKV;
import water.Key;
import water.MRTask;
import water.parser.BufferedString;
import water.util.Log;
import java.util.ArrayList;
import java.util.Arrays;
/**
* Methods to access frame internals.
*/
@Ignore("Support for tests, but no actual tests here")
public class FrameTestUtil {
public static Frame createFrame(String fname, long[] chunkLayout, String[][] data) {
Frame f = new Frame(Key.<Frame>make(fname));
f.preparePartialFrame(new String[]{"C0"});
f.update();
// Create chunks
byte[] types = new byte[] {Vec.T_STR};
for (int i=0; i<chunkLayout.length; i++) {
createNC(fname, data[i], i, (int) chunkLayout[i], types);
}
// Reload frame from DKV
f = DKV.get(fname).get();
// Finalize frame
f.finalizePartialFrame(chunkLayout, new String[][] { null }, types);
return f;
}
public static NewChunk createNC(String fname, String[] data, int cidx, int len, byte[] types) {
NewChunk[] nchunks = Frame.createNewChunks(fname, types, cidx);
for (int i=0; i<len; i++) {
nchunks[0].addStr(data[i] != null ? data[i] : null);
}
Frame.closeNewChunks(nchunks);
return nchunks[0];
}
public static Frame createFrame(String fname, long[] chunkLayout) {
// Create a frame
Frame f = new Frame(Key.<Frame>make(fname));
f.preparePartialFrame(new String[]{"C0"});
f.update();
byte[] types = new byte[] {Vec.T_NUM};
// Create chunks
for (int i=0; i<chunkLayout.length; i++) {
createNC(fname, i, (int) chunkLayout[i], types);
}
// Reload frame from DKV
f = DKV.get(fname).get();
// Finalize frame
f.finalizePartialFrame(chunkLayout, new String[][] { null }, types);
return f;
}
public static NewChunk createNC(String fname, int cidx, int len, byte[] types) {
NewChunk[] nchunks = Frame.createNewChunks(fname, types, cidx);
int starVal = cidx * 1000;
for (int i=0; i<len; i++) {
nchunks[0].addNum(starVal + i);
}
Frame.closeNewChunks(nchunks);
return nchunks[0];
}
public static void assertValues(Frame f, String[] expValues) {
assertValues(f.vec(0), expValues);
}
public static void assertValues(Vec v, String[] expValues) {
Assert.assertEquals("Number of rows", expValues.length, v.length());
BufferedString tmpStr = new BufferedString();
for (int i = 0; i < v.length(); i++) {
if (v.isNA(i)) Assert.assertEquals("NAs should match", null, expValues[i]);
else Assert.assertEquals("Values should match", expValues[i], v.atStr(tmpStr, i).toString());
}
}
public static String[] collectS(Vec v) {
String[] res = new String[(int) v.length()];
BufferedString tmpStr = new BufferedString();
for (int i = 0; i < v.length(); i++)
res[i] = v.isNA(i) ? null : v.atStr(tmpStr, i).toString();
return res;
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/fvec/TestFrameBuilder.java
|
package water.fvec;
import org.junit.Ignore;
import water.DKV;
import water.Key;
import water.Scope;
import water.rapids.Env;
import water.rapids.Session;
import water.util.ArrayUtils;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
/**
* Class used for creating simple test frames using builder pattern
* <p>
* Example usage:
* <pre>{@code
* final Frame builder = new TestFrameBuilder()
* .withName("testFrame")
* .withColNames("ColA", "ColB", "ColC")
* .withVecTypes(Vec.T_NUM, Vec.T_STR, Vec.T_CAT)
* .withDataForCol(0, ard(Double.NaN, 1, 2, 3, 4, 5.6, 7))
* .withDataForCol(1, ar("A", "B", "C", "E", "F", "I", "J"))
* .withDataForCol(2, ar("A", "B,", "A", "C", "A", "B", "A"))
* .withChunkLayout(2, 2, 2, 1)
* .build();
* }
* </pre>
* Data for categorical column are set in the same way as for string column and leves are created automatically.</br>
* All methods in this builder are optional:
* <ul>
* <li> Frame name is created it not provided.</li>
* <li> Column names are created automatically if not provided.</li>
* <li> Vector types are initialized to all T_NUMs when not provided. For example, creating empty frame (
* no data, co columns) can be created as {@code Frame fr = new TestFrameBuilder().build()}.</li>
* <li> Column data are initialized to empty array when not provided. The following example creates frames with 2 columns,
* but no data. {@code Frame fr = new TestFrameBuilder().withVecTypes(Vec.T_NUM).build()}.</li>
* <li> Only one chunk is created when chunk layout is not provided.</li>
* </ul>
*
* The frame created will be automatically tracked in the currently active {@link Scope}.
*/
@Ignore
public class TestFrameBuilder {
private static final long NOT_SET = -1;
private Map<Integer, String[]> stringData = new HashMap<>();
private Map<Integer, double[]> numericData = new HashMap<>();
private Map<Integer, String[]> givenDomains = new HashMap<>();
private String frameName;
private byte[] vecTypes;
private String[] colNames;
private long[] chunkLayout;
private int numCols;
private Key<Frame> key;
private long numRows = NOT_SET;
private String[][] domains = null;
private Map<Integer, Integer[]> categoriesPerCol = new HashMap<>();
/**
* Sets the name for the frame. Default name is created if this method is not called.
*/
public TestFrameBuilder withName(String frameName) {
throwIf(frameName.startsWith("$"), "Frame name " + frameName + " may only be used with a Session object.");
this.frameName = frameName;
return this;
}
public TestFrameBuilder withName(String frameName, Session session) {
return withName(new Env(session).expand(frameName));
}
/**
* Sets the names for the columns. Default names are created if this method is not called.
*/
public TestFrameBuilder withColNames(String... colNames) {
this.colNames = colNames;
return this;
}
/**
* Sets the vector types. Vector types are initialized to empty array if this method is not called.
*/
public TestFrameBuilder withVecTypes(byte... vecTypes) {
this.vecTypes = vecTypes;
return this;
}
/**
* Sets the vectors types to a single, uniform value for each vector
*
* @param nvecs Number of vectors in the frame
* @param vecType Uniform type of the vectors
*/
public TestFrameBuilder withUniformVecTypes(final int nvecs, final byte vecType) {
byte[] vecTypes = new byte[nvecs];
for (int i = 0; i < nvecs; i++) {
vecTypes[i] = vecType;
}
this.vecTypes = vecTypes;
return this;
}
/**
* Generate sequence of integer data
* @param column for which to set data
* @param from minimal value to generate (included)
* @param to maximum value to generate (excluded)
*/
public TestFrameBuilder withSequenceIntDataForCol(int column, int from, int to) {
assert to > from;
int size = to-from;
double[] arr = new double[size];
for(int i = from; i < to; i++) {
arr[i] = i;
}
numericData.put(column, arr);
return this;
}
/**
* Genarate random double data for a particular column
* @param column for which to set data
* @param size size of randomly generated column
* @param min minimal value to generate
* @param max maximum value to generate
*/
public TestFrameBuilder withRandomIntDataForCol(int column, int size, int min, int max, long seed) {
assert max > min;
assert seed + size * size <= Long.MAX_VALUE;
double[] arr = new double[size];
for(int i = 0; i < size; i++) {
arr[i] = min + new Random(seed + i * size).nextInt(max - min);
}
numericData.put(column, arr);
return this;
}
/**
* Genarate random double data for a particular column
* @param column for which to set data
* @param size size of randomly generated column
* @param min minimal value to generate
* @param max maximum value to generate
*/
public TestFrameBuilder withRandomDoubleDataForCol(int column, int size, int min, int max, long seed) {
assert max >= min;
double[] arr = new double[size];
for(int i = 0; i < size; i++) {
arr[i] = min + (max - min) * new Random(seed + i * size).nextDouble();
}
numericData.put(column, arr);
return this;
}
/**
* Genarate random binary data for a particular column
*
* @param column for which to set data
*/
public TestFrameBuilder withRandomBinaryDataForCol(int column, int size, long seed) {
String[] arr = new String[size];
Random generator = new Random();
long multiplierFromRandomClass = 0x5DEECE66DL;
assert seed + size * multiplierFromRandomClass < Long.MAX_VALUE;
for(int i = 0; i < size; i++) {
generator.setSeed(seed + i * multiplierFromRandomClass);
arr[i] = Boolean.toString( generator.nextBoolean());
}
stringData.put(column, arr);
return this;
}
/**
* Sets data for a particular column
*
* @param column for which to set data
* @param data array of string data
*/
public TestFrameBuilder withDataForCol(int column, String[] data) {
stringData.put(column, data);
return this;
}
/**
* Sets data for a particular column
*
* @param column for which to set data
* @param data array of double data
*/
public TestFrameBuilder withDataForCol(int column, double[] data) {
numericData.put(column, data);
return this;
}
/**
* Sets data for a particular column
*
* @param column for which to set data
* @param data array of long data
*/
public TestFrameBuilder withDataForCol(int column, long[] data) {
if(data == null){
numericData.put(column, null);
}else {
double[] doubles = new double[data.length];
for (int i = 0; i < data.length; i++) {
doubles[i] = data[i];
}
numericData.put(column, doubles);
}
return this;
}
/**
* Sets data for a particular column
*
* @param column for which to set data
* @param data array of long data
*/
public TestFrameBuilder withDataForCol(int column, int[] data) {
double[] doubles = ArrayUtils.toDouble(data);
return withDataForCol(column, doubles);
}
public TestFrameBuilder withDomain(int column, String[] domain) {
givenDomains.put(column, domain);
return this;
}
public TestFrameBuilder withChunkLayout(long... chunkLayout) {
this.chunkLayout = chunkLayout;
return this;
}
public Frame build() {
prepareAndCheck();
// Create a frame
Frame f = new Frame(key);
f.preparePartialFrame(colNames);
f.update();
// Create chunks
int cidx = 0;
long start = 0;
for (long chnkSize : chunkLayout) {
createChunks(start, chnkSize, cidx);
cidx++;
start = start + chnkSize;
}
// Reload frame from DKV
f = DKV.get(key).get();
// Finalize frame
f.finalizePartialFrame(chunkLayout, domains, vecTypes);
Scope.track(f);
return f;
}
//--------------------------------------------------------------------------------------------------------------------
// Private
//--------------------------------------------------------------------------------------------------------------------
private void prepareAndCheck(){
// this check has to be run as the first one
checkVecTypes();
checkNames();
// check that we have data for all columns and all columns have the same number of elements
checkColumnData();
checkFrameName();
checkChunkLayout();
prepareCategoricals();
}
// Utility method to get unique values from categorical domain
private String[] getUniqueValues(Map<String, Integer> mapping){
String[] values = new String[mapping.size()];
for (String key : mapping.keySet())
values[mapping.get(key)] = key;
return values;
}
// Utility method to convert domain into categories
private Integer[] applyDomainMapping(Map<String, Integer> mapping, String[] original){
Integer[] categoricals = new Integer[original.length];
for(int i = 0; i < original.length; i++) {
categoricals[i] = original[i] == null ? null : mapping.get(original[i]);
}
return categoricals;
}
private Map<String, Integer> getMapping(String[] array) {
return getMapping(array, false);
}
// Utility method to get mapping from domain member to its level
private Map<String, Integer> getMapping(String[] array, boolean useOrderInArray){
Map<String, Integer> mapping = new TreeMap<>();
int level = 0;
for (String item : array) {
if ((item != null) && (! mapping.containsKey(item))) {
mapping.put(item, useOrderInArray ? level++ : 0);
}
}
if (!useOrderInArray) { // use lexicographic order instead (default behaviour of H2O parser)
for (Map.Entry<String, Integer> entry : mapping.entrySet()) {
entry.setValue(level++);
}
}
return mapping;
}
private void prepareCategoricals(){
// domains is not null if there is any T_CAT
for (int colIdx = 0; colIdx < vecTypes.length; colIdx++) {
if (givenDomains.containsKey(colIdx)) { // domain set explicitly
String[] doms = givenDomains.get(colIdx);
domains[colIdx] = doms;
Map<String, Integer> mapping = getMapping(doms, true);
Integer[] categories = applyDomainMapping(mapping, stringData.get(colIdx));
categoriesPerCol.put(colIdx, categories);
} else if (vecTypes[colIdx]==Vec.T_CAT) { // default domain extraction (use lexicographical order)
Map<String, Integer> mapping = getMapping(stringData.get(colIdx));
Integer[] categories = applyDomainMapping(mapping, stringData.get(colIdx));
domains[colIdx] = getUniqueValues(mapping);
categoriesPerCol.put(colIdx, categories);
} else {
if(domains != null) {
domains[colIdx] = null;
}
}
}
}
private void createChunks(long start, long length, int cidx) {
NewChunk[] nchunks = Frame.createNewChunks(key.toString(), vecTypes, cidx);
for (int i = (int) start; i < start + length; i++) {
for (int colIdx = 0; colIdx < vecTypes.length; colIdx++) {
switch (vecTypes[colIdx]) {
case Vec.T_NUM:
nchunks[colIdx].addNum(numericData.get(colIdx)[i]);
break;
case Vec.T_STR:
nchunks[colIdx].addStr(stringData.get(colIdx)[i]);
break;
case Vec.T_TIME:
nchunks[colIdx].addNum(numericData.get(colIdx)[i]);
break;
case Vec.T_CAT:
Integer cat = categoriesPerCol.get(colIdx)[i];
if (cat != null)
nchunks[colIdx].addCategorical(cat);
else
nchunks[colIdx].addNA();
break;
case Vec.T_UUID:
final String strValue = stringData.get(colIdx)[i];
if (strValue == null)
nchunks[colIdx].addNA();
else {
UUID uuidValue = UUID.fromString(strValue);
nchunks[colIdx].addUUID(uuidValue);
}
break;
case Vec.T_BAD:
nchunks[colIdx].addNum(numericData.get(colIdx)[i]);
break;
default:
throw new UnsupportedOperationException("Unsupported Vector type for the builder");
}
}
}
Frame.closeNewChunks(nchunks);
}
// this check has to be called as the first one
private void checkVecTypes() {
if(vecTypes==null){
if (colNames == null) {
vecTypes = new byte[0];
} else {
vecTypes = new byte[colNames.length];
for (int i = 0; i < colNames.length; i++)
vecTypes[i] = Vec.T_NUM;
}
}
numCols = vecTypes.length;
for(int i=0; i<vecTypes.length; i++){
switch (vecTypes[i]){
case Vec.T_TIME:
case Vec.T_NUM:
case Vec.T_BAD:
if(numericData.get(i)==null){
numericData.put(i, new double[0]); // init with no data as default
}
break;
case Vec.T_CAT:
// initiate domains if there is any categorical column and fall-through
domains = new String[vecTypes.length][];
case Vec.T_UUID:
case Vec.T_STR:
if(stringData.get(i)==null){
stringData.put(i, new String[0]); // init with no data as default
}
break;
}
}
}
private void checkNames() {
if (colNames == null) {
colNames = new String[vecTypes.length];
for (int i = 0; i < vecTypes.length; i++) {
colNames[i] = "col_" + i;
}
}else {
throwIf(colNames.length != vecTypes.length, "Number of vector types and number of column names differ.");
}
}
private void checkFrameName() {
if (frameName == null) {
key = Key.make();
} else {
key = Key.make(frameName);
}
}
private void checkChunkLayout() {
// this expects that method checkColumnData has been executed
if (chunkLayout != null) {
// sum all numbers in the chunk layout, it should be smaller than the number of rows in the frame
int sum = 0;
for (long numPerChunk : chunkLayout) {
sum += numPerChunk;
}
throwIf(sum > numRows, "Total chunk capacity is higher then available number of elements. " +
"Check withChunkLayout() and make sure that sum of the arguments is equal to number of the rows in frame.");
throwIf(sum < numRows, "Not enough chunk capacity to store " + numRows + " rows. " +
"Check withChunkLayout() and make sure that sum of the arguments is equal to number of the rows in frame.");
} else {
// create chunk layout - by default 1 chunk
chunkLayout = new long[]{numRows};
}
}
private void checkColumnData() {
for (int colIdx = 0; colIdx < numCols; colIdx++) {
switch (vecTypes[colIdx]) {
case Vec.T_TIME: // fall-through to T_NUM
case Vec.T_NUM:
if (numRows == NOT_SET) {
numRows = numericData.get(colIdx).length;
} else {
throwIf(numRows != numericData.get(colIdx).length, "Columns have different number of elements");
}
break;
case Vec.T_CAT: // fall-through to T_CAT
case Vec.T_STR:
case Vec.T_UUID:
if (numRows == NOT_SET) {
numRows = stringData.get(colIdx).length;
} else {
throwIf(numRows != stringData.get(colIdx).length, "Columns have different number of elements");
}
break;
case Vec.T_BAD:
final double[] data = numericData.get(colIdx);
numRows = data.length;
for (double datum : data) {
throwIf(!Double.isNaN(datum), "All elements in a bad column must be NAs.");
}
break;
default:
throw new UnsupportedOperationException("Unsupported Vector type for the builder");
}
}
}
private void throwIf(boolean condition, String msg){
if(condition){
throw new IllegalArgumentException(msg);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/fvec/TestFrameBuilderTest.java
|
package water.fvec;
import org.junit.BeforeClass;
import org.junit.Test;
import water.Scope;
import water.TestUtil;
import water.parser.BufferedString;
import water.util.ArrayUtils;
import water.util.FrameUtils;
import water.util.VecUtils;
import java.util.UUID;
import static org.junit.Assert.*;
public class TestFrameBuilderTest extends TestUtil {
@BeforeClass
public static void setup() { stall_till_cloudsize(1); }
private static double DELTA = 0.00001;
@Test
public void testEmpty(){
Frame fr = new TestFrameBuilder().build();
assertEquals(fr.vecs().length, 0);
assertEquals(fr.numRows(), 0);
assertEquals(fr.numCols(), 0);
assertNull(fr.anyVec()); // because we don't have any vectors
fr.remove();
}
@Test
public void testName(){
Frame fr = new TestFrameBuilder()
.withName("FrameName")
.build();
assertEquals(fr._key.toString(), "FrameName");
assertEquals(fr.vecs().length, 0);
assertEquals(fr.numRows(), 0);
assertEquals(fr.numCols(), 0);
assertNull(fr.anyVec()); // because we don't have any vectors
fr.remove();
}
@Test
public void testVecTypes(){
Frame fr = new TestFrameBuilder()
.withVecTypes(Vec.T_CAT, Vec.T_NUM, Vec.T_TIME, Vec.T_STR)
.build();
assertArrayEquals(fr.names(), ar("col_0", "col_1", "col_2", "col_3"));
assertEquals(fr.vecs().length, 4);
assertEquals(fr.numRows(), 0);
assertEquals(fr.numCols(), 4);
assertEquals(fr.vec(0).get_type(), Vec.T_CAT);
assertEquals(fr.vec(1).get_type(), Vec.T_NUM);
assertEquals(fr.vec(2).get_type(), Vec.T_TIME);
assertEquals(fr.vec(3).get_type(), Vec.T_STR);
fr.remove();
}
/**
* This test throws exception because size of specified vectors and size of specified names differ
*/
@Test(expected = IllegalArgumentException.class)
public void testWrongVecNameSize(){
Frame fr = new TestFrameBuilder()
.withVecTypes(Vec.T_CAT, Vec.T_NUM, Vec.T_TIME, Vec.T_STR)
.withColNames("A", "B")
.build();
fr.remove();
}
@Test
public void testColNames(){
Frame fr = new TestFrameBuilder()
.withVecTypes(Vec.T_CAT, Vec.T_NUM, Vec.T_TIME, Vec.T_STR)
.withColNames("A", "B", "C", "D")
.build();
assertEquals(fr.vecs().length, 4);
assertEquals(fr.numRows(), 0);
assertEquals(fr.numCols(), 4);
assertArrayEquals(fr.names(), ar("A", "B", "C", "D"));
fr.remove();
}
@Test
public void testDefaultChunks(){
Frame fr = new TestFrameBuilder()
.withVecTypes(Vec.T_CAT, Vec.T_NUM, Vec.T_TIME, Vec.T_STR)
.withColNames("A", "B", "C", "D")
.build();
assertArrayEquals(fr.anyVec().espc(), ar(0, 0)); // no data
assertEquals(fr.anyVec().nChunks(), 1); // 1 empty chunk
fr.remove();
}
/**
* This test throws exception because it expects more data ( via chunk layout) than is actually available
*/
@Test(expected = IllegalArgumentException.class)
public void testSetChunksToMany(){
Frame fr = new TestFrameBuilder()
.withVecTypes(Vec.T_CAT, Vec.T_NUM, Vec.T_TIME, Vec.T_STR)
.withColNames("A", "B", "C", "D")
.withChunkLayout(2, 2, 2, 1) // we are requesting 7 rows to be able to create 4 chunks, but we have 0 rows
.build();
fr.remove();
}
/**
* This test throws exception because it gets more data than the chunks can contain (Total size of chunks is less
* than the size of provided data) and it would result with frame with missing data in last rows
*/
@Test(expected = IllegalArgumentException.class)
public void testSetChunksFewerThanProvidedData(){
Frame fr = new TestFrameBuilder()
.withVecTypes(Vec.T_CAT, Vec.T_NUM)
.withColNames("A", "B")
.withDataForCol(0, ar("A", "B", "B", null, "F", "I"))
.withDataForCol(1, ard(Double.NaN, 1, 2, 3, 4, 5.6))
.withChunkLayout(1, 1, 2, 1) // we are requesting chunk capacity for 5 rows but provide 6 rows
.build();
fr.remove();
}
@Test
public void testSetChunks(){
final Frame fr = new TestFrameBuilder()
.withName("frameName")
.withColNames("ColA", "ColB")
.withVecTypes(Vec.T_NUM, Vec.T_STR)
.withDataForCol(0, ard(Double.NaN, 1, 2, 3, 4, 5.6, 7))
.withDataForCol(1, ar("A", "B", "C", null, "F", "I", "J"))
.withChunkLayout(2, 2, 2, 1)
.build();
assertEquals(fr.anyVec().nChunks(), 4);
assertArrayEquals(fr.anyVec().espc(), new long[]{0, 2, 4, 6, 7});
// check data in the frame
assertEquals(fr.vec(0).at(0), Double.NaN, DELTA);
assertEquals(fr.vec(0).at(5), 5.6, DELTA);
assertEquals(fr.vec(0).at(6), 7, DELTA);
BufferedString strBuf = new BufferedString();
assertEquals(fr.vec(1).atStr(strBuf,0).toString(), "A");
assertNull(fr.vec(1).atStr(strBuf,3));
assertEquals(fr.vec(1).atStr(strBuf,6).toString(), "J");
fr.remove();
}
/**
* This test throws exception because the data has different length
*/
@Test(expected = IllegalArgumentException.class)
public void testDataDifferentSize(){
final Frame fr = new TestFrameBuilder()
.withVecTypes(Vec.T_NUM, Vec.T_STR)
.withDataForCol(0, ard(Double.NaN, 1)) // 2 elements
.withDataForCol(1, ar("A", "B", "C")) // 3 elements
.build();
fr.remove();
}
@Test
public void withRandomIntDataForColTest(){
long seed = 44L;
int size = 1000;
int min = 1;
int max = 5;
Frame fr = new TestFrameBuilder()
.withName("testFrame")
.withColNames("ColA")
.withVecTypes(Vec.T_NUM)
.withRandomIntDataForCol(0, size, min, max, seed)
.build();
printOutFrameAsTable(fr, false, size);
Vec generatedVec = fr.vec(0);
for(int i = 0; i < size; i++) {
assertTrue(generatedVec.at(i) <= max && generatedVec.at(i) >= min);
}
fr.delete();
}
@Test
public void withRandomDoubleDataForColTest(){
long seed = 44L;
int size = 1000;
int min = 1;
int max = 5;
Frame fr = new TestFrameBuilder()
.withName("testFrame")
.withColNames("ColA")
.withVecTypes(Vec.T_NUM)
.withRandomDoubleDataForCol(0, size, min, max, seed)
.build();
printOutFrameAsTable(fr, false, size);
Vec generatedVec = fr.vec(0);
for(int i = 0; i < size; i++) {
assertTrue(generatedVec.at(i) <= max && generatedVec.at(i) >= min);
}
fr.delete();
}
@Test
public void numRowsIsWorkingForRandomlyGeneratedColumnsTest(){
long seed = 44L;
Frame fr = new TestFrameBuilder()
.withName("testFrame")
.withColNames("ColA")
.withVecTypes(Vec.T_NUM)
.withRandomDoubleDataForCol(0, 1000, 1, 5, seed)
.build();
long numberOfRowsGenerated = fr.numRows();
assertEquals(1000, numberOfRowsGenerated);
fr.delete();
}
@Test
public void withRandomBinaryDataForColTest(){
long seed = 44L;
Frame fr = new TestFrameBuilder()
.withName("testFrame")
.withColNames("ColA")
.withVecTypes(Vec.T_CAT)
.withRandomBinaryDataForCol(0, 1000, seed)
.build();
assertEquals(2, fr.vec("ColA").cardinality());
fr.delete();
}
@Test
public void testBadVec(){
try {
Scope.enter();
Frame frame = new TestFrameBuilder()
.withVecTypes(Vec.T_BAD)
.withDataForCol(0, ard(Float.NaN, Float.NaN, Float.NaN)) // All NaN column
.withName("fr")
.build();
assertNotNull(frame);
assertEquals(1, frame.numCols());
final Vec badVec = frame.vec(0);
assertEquals(Vec.T_BAD, badVec._type);
assertEquals(3, badVec.length());
assertTrue(badVec.isBad());
for (int i = 0; i < badVec.length(); i++) {
assertEquals(Float.NaN, badVec.at(i), 0D);
}
} finally {
Scope.exit();
}
}
@Test
public void testSequenceIntDataForCol() {
Scope.enter();
try {
Frame f = new TestFrameBuilder()
.withVecTypes(Vec.T_NUM)
.withSequenceIntDataForCol(0, 0, 10)
.build();
assertEquals("Unexpected number of rows", 10, f.numRows());
assertArrayEquals("It is not a valid sequence column",
new int[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
FrameUtils.asInts(f.vec(0)));
} finally {
Scope.exit();
}
}
@Test(expected = AssertionError.class)
public void testSequenceIntDataForColBadInput() {
Scope.enter();
try {
new TestFrameBuilder()
.withVecTypes(Vec.T_NUM)
.withSequenceIntDataForCol(0, 0, 0)
.build();
} finally {
Scope.exit();
}
}
@Test
public void testUUID(){
Scope.enter();
try {
UUID expectedUUID = UUID.randomUUID();
Frame f = new TestFrameBuilder()
.withVecTypes(Vec.T_UUID)
.withDataForCol(0, ar(expectedUUID.toString(), null))
.build();
assertEquals(2, f.numRows());
assertArrayEquals(new byte[]{Vec.T_UUID}, f.types());
assertEquals(expectedUUID.getLeastSignificantBits(), f.vec(0).at16l(0));
assertEquals(expectedUUID.getMostSignificantBits(), f.vec(0).at16h(0));
assertTrue(f.vec(0).isNA(1));
} finally {
Scope.exit();
}
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/fvec/VecHelper.java
|
package water.fvec;
import org.junit.Ignore;
import water.Value;
@Ignore // no actual test here: helps some tests to access package-private API of water.fvec.Vec
public class VecHelper {
public static Value vecChunkIdx(Vec v, int cidx) {
return v.chunkIdx(cidx);
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit/H2OTestRunner.java
|
package water.junit;
import org.junit.Ignore;
import org.junit.internal.JUnitSystem;
import org.junit.internal.RealSystem;
import org.junit.internal.TextListener;
import org.junit.runner.Description;
import org.junit.runner.JUnitCore;
import org.junit.runner.Result;
import org.junit.runner.notification.Failure;
import java.util.ArrayList;
import java.util.List;
/**
* Replacement of JUnitCore runner which handles
* generation into XML file.
*/
@Ignore("Support for tests, but no actual tests here")
public class H2OTestRunner {
public Result run(String[] args) throws Exception {
// List all classes - adapted from JUnitCore code
List<Class<?>> classes = new ArrayList<Class<?>>();
List<Failure> missingClasses = new ArrayList<Failure>();
for (String arg : args) {
try {
classes.add(Class.forName(arg));
} catch (ClassNotFoundException e) {
Description description = Description.createSuiteDescription(arg);
Failure failure = new Failure(description, e);
missingClasses.add(failure);
}
}
// Create standard JUnitCore
JUnitCore jcore = new JUnitCore();
// Create default "system"
JUnitSystem jsystem = new RealSystem();
// Setup default listener
jcore.addListener(new TextListener(jsystem));
// Add XML generator listener
jcore.addListener(new XMLTestReporter());
Result result = jcore.run(classes.toArray(new Class[0]));
for (Failure each : missingClasses) {
System.err.println("FAIL Missing class in H2OTestRunner: " + each);
result.getFailures().add(each);
}
return result;
}
public static void main(String[] args) throws Exception {
H2OTestRunner testRunner = new H2OTestRunner();
Result result = null;
result = testRunner.run(args);
System.exit(result.wasSuccessful() ? 0 : 1);
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit/Priority.java
|
package water.junit;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.*;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
@Retention(RUNTIME)
@Target({FIELD, METHOD, TYPE, TYPE_USE})
public @interface Priority {
int value();
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit/XMLTestReporter.java
|
package water.junit;
import org.apache.commons.lang.StringEscapeUtils;
import org.junit.Ignore;
import org.junit.runner.Description;
import org.junit.runner.Result;
import org.junit.runner.notification.Failure;
import org.junit.runner.notification.RunListener;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Text;
import water.H2ORuntime;
import water.TestUtil;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.net.InetAddress;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.TimeZone;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
/**
* Simple XML reporter.
*
* BY default outputing XML reports into build/test-results folder.
* Can be changed via '-Djunit.reports.dir' JVM property.
*
* Adapted from https://www.redhat.com/archives/pki-devel/2011-December/msg00001.html
*
*/
@Ignore("Support for tests, but no actual tests here")
public class XMLTestReporter extends RunListener {
DateFormat dateFormat;
DocumentBuilderFactory docBuilderFactory;
DocumentBuilder docBuilder;
Document document;
TransformerFactory transFactory;
Transformer trans;
String reportsDir;
Element testSuiteElement;
long testSuiteStartTime;
Element testCaseElement;
long testCaseStartTime;
String currentTestSuiteName;
long testCount;
long successCount;
long failureCount;
PrintStream stdOut;
PrintStream stdErr;
ByteArrayOutputStream out;
ByteArrayOutputStream err;
public XMLTestReporter() throws Exception {
dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
dateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
docBuilderFactory = DocumentBuilderFactory.newInstance();
docBuilder = docBuilderFactory.newDocumentBuilder();
document = docBuilder.newDocument();
testCaseElement = document.createElement("testcase");
transFactory = TransformerFactory.newInstance();
trans = transFactory.newTransformer();
trans.setOutputProperty(OutputKeys.INDENT, "yes");
reportsDir = System.getProperty("junit.reports.dir", "build/test-results");
File rdir = new File(reportsDir);
if (rdir.isFile()) throw new IllegalArgumentException(reportsDir + " is not a directory!");
if (!rdir.exists()) rdir.mkdirs();
}
@Override
public void testRunFinished(Result result) throws Exception {
if (currentTestSuiteName != null) {
finishTestSuite(); // finish last suite
}
}
@Override
public void testStarted(Description description) throws Exception {
final String testSuiteName = makeTestSuiteName(description);
if (currentTestSuiteName == null) {
startTestSuite(testSuiteName); // start first suite
} else if (!currentTestSuiteName.equals(testSuiteName)) {
finishTestSuite(); // finish old suite
startTestSuite(testSuiteName); // start new suite
}
currentTestSuiteName = testSuiteName;
startTestCase(description);
}
private String makeTestSuiteName(Description description) {
String clusterType = TestUtil.MINCLOUDSIZE > 1 ? "multi" : "single";
return clusterType + "." + description.getClassName();
}
@Override
public void testFinished(Description description) {
finishTestCase();
recordTestCaseSuccess();
}
@Override
public void testFailure(Failure failure) {
finishTestCase();
recordTestCaseFailure(failure);
}
private void startTestSuite(String testSuiteName) throws Exception {
testSuiteStartTime = System.currentTimeMillis();
document = docBuilder.newDocument();
// test suite
testSuiteElement = document.createElement("testsuite");
document.appendChild(testSuiteElement);
testSuiteElement.setAttribute("name", StringEscapeUtils.escapeXml(testSuiteName));
testSuiteElement.setAttribute("timestamp",
StringEscapeUtils.escapeXml(dateFormat.format(new Date(testSuiteStartTime))));
testSuiteElement.setAttribute("hostname",
StringEscapeUtils.escapeXml(InetAddress.getLocalHost().getHostName()));
testSuiteElement.setAttribute("ncpu",
StringEscapeUtils.escapeXml(Integer.toString(Runtime.getRuntime().availableProcessors())));
testSuiteElement.setAttribute("activecpu",
StringEscapeUtils.escapeXml(Integer.toString(H2ORuntime.availableProcessors())));
// system properties
Element propertiesElement = document.createElement("properties");
testSuiteElement.appendChild(propertiesElement);
for (String name : System.getProperties().stringPropertyNames()) {
Element propertyElement = document.createElement("property");
propertyElement.setAttribute("name", StringEscapeUtils.escapeXml(name));
propertyElement.setAttribute("value", StringEscapeUtils.escapeXml(System.getProperty(name)));
propertiesElement.appendChild(propertyElement);
}
// reset counters
testCount = 0;
successCount = 0;
failureCount = 0;
// redirect outputs
stdOut = System.out;
out = new ByteArrayOutputStream();
System.setOut(new PrintStream(new TeeOutputStream(out, stdOut), true));
stdErr = System.err;
err = new ByteArrayOutputStream();
System.setErr(new PrintStream(new TeeOutputStream(err, stdErr), true));
}
private void finishTestSuite() throws Exception {
double time = (System.currentTimeMillis() - testSuiteStartTime) / 1000.0;
testSuiteElement.setAttribute("time", "" + time);
// save counters
long errorCount = testCount - successCount - failureCount;
testSuiteElement.setAttribute("tests", "" + testCount);
testSuiteElement.setAttribute("failures", "" + failureCount);
testSuiteElement.setAttribute("errors", "" + errorCount);
// restore outputs
System.setOut(stdOut);
System.setErr(stdErr);
Element systemOutElement = document.createElement("system-out");
testSuiteElement.appendChild(systemOutElement);
systemOutElement.appendChild(
document.createCDATASection(out.toString())
);
Element systemErrElement = document.createElement("system-err");
testSuiteElement.appendChild(systemErrElement);
systemErrElement.appendChild(
document.createCDATASection(err.toString())
);
// write to file
FileWriter fw = new FileWriter(
reportsDir + File.separator + "TEST-" + currentTestSuiteName + ".xml"
);
StreamResult sr = new StreamResult(fw);
DOMSource source = new DOMSource(document);
trans.transform(source, sr);
fw.close();
}
private void startTestCase(Description description) {
testCaseStartTime = System.currentTimeMillis();
testCaseElement = document.createElement("testcase");
testSuiteElement.appendChild(testCaseElement);
testCaseElement.setAttribute("classname", description.getClassName());
testCaseElement.setAttribute("name", description.getMethodName());
testCount++;
}
private void finishTestCase() {
double time = (System.currentTimeMillis() - testCaseStartTime) / 1000.0;
testCaseElement.setAttribute("time", "" + time);
}
private void recordTestCaseSuccess() {
successCount++;
}
private void recordTestCaseFailure(Failure failure) {
Element failureElement = document.createElement("failure");
testCaseElement.appendChild(failureElement);
Description description = failure.getDescription();
String exceptionName = failure.getException().getClass().getName();
failureElement.setAttribute("message", failure.getMessage());
failureElement.setAttribute("type", exceptionName);
Text messageElement = document.createTextNode(
exceptionName + ": " +failure.getMessage() + "\n\tat " +
description.getClassName() + "." + description.getMethodName() + "()"
);
failureElement.appendChild(messageElement);
failureCount++;
}
}
/** Output stream which duplicate given input to given target streams.
*
* WARNING: do not close underlying streams! */
class TeeOutputStream extends OutputStream {
final private OutputStream[] targets;
public TeeOutputStream(OutputStream... targets) {
this.targets = targets;
}
@Override
public void write(int b) throws IOException {
for (OutputStream target : targets)
target.write(b);
}
@Override
public void flush() throws IOException {
for (OutputStream target : targets)
target.flush();
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit/rules/CheckLeakedKeysRule.java
|
package water.junit.rules;
import org.junit.rules.ExternalResource;
import org.junit.runner.Description;
import water.Key;
import water.Value;
import water.fvec.Frame;
import water.fvec.Vec;
import water.junit.Priority;
import water.junit.rules.tasks.CheckKeysTask;
import water.junit.rules.tasks.CollectBeforeTestKeysTask;
import water.runner.H2ORunner;
import water.util.Log;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
@Priority(RulesPriorities.CHECK_LEAKED_KEYS)
public class CheckLeakedKeysRule extends ExternalResource {
@Override
protected void before() throws Throwable {
new CollectBeforeTestKeysTask().doAllNodes();
}
@Override
protected void after() {
checkLeakedKeys(H2ORunner.currentTest.get());
}
private void checkLeakedKeys(final Description description) {
final CheckKeysTask checkKeysTask = new CheckKeysTask().doAllNodes();
if (checkKeysTask.leakedKeys.length == 0) {
return;
}
printLeakedKeys(checkKeysTask.leakedKeys, checkKeysTask.leakInfos);
throw new IllegalStateException(String.format("Test method '%s.%s' leaked %d keys.", description.getTestClass().getName(), description.getMethodName(), checkKeysTask.leakedKeys.length));
}
private void printLeakedKeys(final Key[] leakedKeys, final CheckKeysTask.LeakInfo[] leakInfos) {
final Set<Key> leakedKeysSet = new HashSet<>(leakedKeys.length);
leakedKeysSet.addAll(Arrays.asList(leakedKeys));
for (Key key : leakedKeys) {
final Value keyValue = Value.STORE_get(key);
if (keyValue != null && keyValue.isFrame()) {
Frame frame = (Frame) key.get();
Log.err(String.format("Leaked frame with key '%s' and columns '%s'. This frame contains the following vectors:",
frame._key.toString(), Arrays.toString(frame.names())));
for (Key vecKey : frame.keys()) {
if (!leakedKeysSet.contains(vecKey)) continue;
Log.err(String.format(" Vector '%s'. This vector contains the following chunks:", vecKey.toString()));
final Vec vec = (Vec) vecKey.get();
for (int i = 0; i < vec.nChunks(); i++) {
final Key chunkKey = vec.chunkKey(i);
if (!leakedKeysSet.contains(chunkKey)) continue;
Log.err(String.format(" Chunk id %d, key '%s'", i, chunkKey));
leakedKeysSet.remove(chunkKey);
}
if (leakedKeysSet.contains(vec.rollupStatsKey())) {
Log.err(String.format(" Rollup stats '%s'", vec.rollupStatsKey().toString()));
leakedKeysSet.remove(vec.rollupStatsKey());
}
leakedKeysSet.remove(vecKey);
}
leakedKeysSet.remove(key);
}
}
if (!leakedKeysSet.isEmpty()) {
Log.err(String.format("%nThere are %d uncategorized leaked keys detected:", leakedKeysSet.size()));
}
for (Key key : leakedKeysSet) {
Log.err(String.format("Key '%s' of type %s.", key.toString(), key.valueClass()));
}
for (CheckKeysTask.LeakInfo leakInfo : leakInfos) {
Log.err(String.format("Leak info for key '%s': %s", leakedKeys[leakInfo._keyIdx], leakInfo));
}
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit/rules/DKVIsolation.java
|
package water.junit.rules;
import org.junit.Ignore;
import org.junit.rules.TestRule;
import org.junit.runner.Description;
import org.junit.runners.model.Statement;
import water.DKVManager;
import water.Key;
import water.junit.Priority;
import water.junit.rules.tasks.ClearBeforeTestKeysTask;
@Ignore @Priority(RulesPriorities.DKV_ISOLATION)
public class DKVIsolation implements TestRule {
final Key[] retainedKeys;
public DKVIsolation(Key... retainedKeys) {
this.retainedKeys = retainedKeys;
}
@Override
public Statement apply(Statement base, Description description) {
return new Statement() {
@Override
public void evaluate() throws Throwable {
resetKeys();
base.evaluate();
}
};
}
protected void resetKeys() {
new ClearBeforeTestKeysTask(retainedKeys).doAllNodes();
if (retainedKeys.length > 0)
DKVManager.retain(retainedKeys);
else
DKVManager.clear();
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit/rules/RulesPriorities.java
|
package water.junit.rules;
import org.junit.runner.Description;
import org.junit.runners.model.Statement;
/**
* For rules, the higher the priority, the earlier the rule is evaluated compared with other rules,
* meaning its {@link #apply(Statement, Description)} will be called after others,
* but if it wraps the given statement, then its {@link Statement#evaluate()} logic will be the outermost one.
**/
public final class RulesPriorities {
private RulesPriorities() {}
/**
* all rules with lower priority will be skipped if the test itself is skipped.
*/
public static final int RUN_TEST = 1_000;
/**
* the highest possible priority for non ignored test, and therefore the first applied, ensuring that all other rules are applied in a clean DKV state.
*/
public static final int DKV_ISOLATION = RUN_TEST - 1;
/**
* the highest possible priority that can safely be skipped if test is ignored.
*/
public static final int CHECK_LEAKED_KEYS = RUN_TEST - 2;
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit/rules/ScopeTracker.java
|
package water.junit.rules;
import org.junit.Ignore;
import org.junit.rules.ExternalResource;
import water.Keyed;
import water.Scope;
import water.fvec.Frame;
import java.io.Serializable;
@Ignore // just a simple class that avoid the classic try-Scope.enter-finally-Scope.exit pattern
public class ScopeTracker extends ExternalResource implements Serializable {
@Override
protected void before() {
Scope.reset();
Scope.enter();
}
@Override
protected void after() {
Scope.exit();
assert Scope.nLevel() == 0: "at least one nested Scope was not exited properly: "+Scope.nLevel();
}
public final Frame track(Frame frame) { // no varargs (no use in tests) as the Java compiler is misleading: when calling `track(fr)` it prefers the signature with generic to the signature with Frame varargs.
return Scope.track(frame);
}
@SuppressWarnings("unchecked")
public final <T extends Keyed<T>> T track(T keyed) {
return Scope.track_generic(keyed);
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit/rules
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit/rules/tasks/CheckKeysTask.java
|
package water.junit.rules.tasks;
import org.junit.Ignore;
import water.*;
import water.fvec.Vec;
import water.util.ArrayUtils;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
@Ignore
public class CheckKeysTask extends MRTask<CheckKeysTask> {
public Key[] leakedKeys;
public LeakInfo[] leakInfos;
/**
* Determines if a key leak is ignorable
*
* @param key A leaked key
* @param value An instance of {@link Value} associated with the key
* @return True if the leak is considered to be ignorable, otherwise false
*/
protected static boolean isIgnorableKeyLeak(final Key key, final Value value) {
return value == null || value.isVecGroup() || value.isESPCGroup() || key.equals(Job.LIST) ||
(value.isJob() && value.<Job>get().isStopped());
}
@Override
public void reduce(CheckKeysTask mrt) {
leakedKeys = ArrayUtils.append(leakedKeys, mrt.leakedKeys);
leakInfos = ArrayUtils.append(leakInfos, mrt.leakInfos);
}
@Override
protected void setupLocal() {
final Set<Key> initKeys = LocalTestRuntime.beforeTestKeys;
final Set<Key> keysAfterTest = H2O.localKeySet();
Set<Key> leaks = new HashSet<>(keysAfterTest);
leaks.removeAll(initKeys);
final int numLeakedKeys = leaks.size();
leakedKeys = numLeakedKeys > 0 ? new Key[numLeakedKeys] : new Key[]{};
leakInfos = new LeakInfo[]{};
if (numLeakedKeys > 0) {
int leakedKeysPointer = 0;
for (Key key : leaks) {
final Value keyValue = Value.STORE_get(key);
if (!isIgnorableKeyLeak(key, keyValue)) {
leakedKeys[leakedKeysPointer] = key;
LeakInfo leakInfo = makeLeakInfo(leakedKeysPointer, keyValue);
if (leakInfo != null) {
leakInfos = ArrayUtils.append(leakInfos, leakInfo);
}
leakedKeysPointer++;
}
}
if (leakedKeysPointer < numLeakedKeys) leakedKeys = Arrays.copyOfRange(leakedKeys, 0, leakedKeysPointer);
}
}
private LeakInfo makeLeakInfo(int keyIdx, Value value) {
if (value == null)
return null;
String vClass = value.className();
switch (vClass) {
case "water.fvec.RollupStats":
@SuppressWarnings("unchecked")
Key<Vec> vecKey = Vec.getVecKey(leakedKeys[keyIdx]);
return new LeakInfo(keyIdx, vecKey, String.valueOf(value.get()));
default:
return null;
}
}
public static class LeakInfo extends Iced<LeakInfo> {
public final int _keyIdx;
public final Key<Vec> _vecKey;
public final int _nodeId;
public final String _info;
private LeakInfo(int keyIdx, Key<Vec> vecKey, String info) {
_keyIdx = keyIdx;
_vecKey = vecKey;
_nodeId = H2O.SELF.index();
_info = info;
}
@Override
public String toString() {
return "nodeId=" + _nodeId + ", vecKey=" + String.valueOf(_vecKey) + ", _info='" + _info;
}
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit/rules
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit/rules/tasks/CleanAllKeysTask.java
|
package water.junit.rules.tasks;
import org.junit.Ignore;
import water.H2O;
import water.MRTask;
@Ignore
public class CleanAllKeysTask extends MRTask<CleanAllKeysTask> {
@Override
protected void setupLocal() {
LocalTestRuntime.beforeTestKeys.clear();
H2O.raw_clear();
water.fvec.Vec.ESPC.clear();
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit/rules
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit/rules/tasks/ClearBeforeTestKeysTask.java
|
package water.junit.rules.tasks;
import org.junit.Ignore;
import water.Key;
import water.MRTask;
import java.util.Arrays;
@Ignore
public class ClearBeforeTestKeysTask extends MRTask<ClearBeforeTestKeysTask> {
private final Key[] _retainedKeys;
public ClearBeforeTestKeysTask(Key... retainedKeys) {
_retainedKeys = retainedKeys;
}
@Override
protected void setupLocal() {
if (_retainedKeys.length > 0)
LocalTestRuntime.beforeTestKeys.retainAll(Arrays.asList(_retainedKeys));
else
LocalTestRuntime.beforeTestKeys.clear();
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit/rules
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit/rules/tasks/CollectBeforeTestKeysTask.java
|
package water.junit.rules.tasks;
import org.junit.Ignore;
import water.H2O;
import water.MRTask;
@Ignore
public class CollectBeforeTestKeysTask extends MRTask<CollectBeforeTestKeysTask> {
@Override
protected void setupLocal() {
LocalTestRuntime.beforeTestKeys.addAll(H2O.localKeySet());
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit/rules
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/junit/rules/tasks/LocalTestRuntime.java
|
package water.junit.rules.tasks;
import org.junit.Ignore;
import water.Key;
import java.util.HashSet;
import java.util.Set;
@Ignore
class LocalTestRuntime {
static Set<Key> beforeTestKeys = new HashSet<>();
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/runner/CloudSize.java
|
package water.runner;
import org.junit.Ignore;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Minimal required cloud size for a JUnit test to run on
*/
@Ignore
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface CloudSize {
int value();
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/runner/H2ORunner.java
|
package water.runner;
import org.junit.AfterClass;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.internal.AssumptionViolatedException;
import org.junit.internal.runners.model.EachTestNotifier;
import org.junit.rules.TestRule;
import org.junit.runner.Description;
import org.junit.runner.notification.RunNotifier;
import org.junit.runners.BlockJUnit4ClassRunner;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.InitializationError;
import org.junit.runners.model.Statement;
import org.junit.runners.model.TestClass;
import water.TestUtil;
import water.junit.Priority;
import water.junit.rules.CheckLeakedKeysRule;
import java.util.*;
@Ignore
public class H2ORunner extends BlockJUnit4ClassRunner {
public static final ThreadLocal<Description> currentTest = new ThreadLocal<>();
/**
* Creates a BlockJUnit4ClassRunner to run {@code klass}
*
* @param klass
* @throws InitializationError if the test class is malformed.
*/
public H2ORunner(Class<?> klass) throws InitializationError {
super(klass);
}
@Override
protected Statement withAfterClasses(Statement statement) {
final List<FrameworkMethod> afters = getTestClass()
.getAnnotatedMethods(AfterClass.class);
return new H2ORunnerAfterClass(statement, afters, null);
}
@Override
protected void runChild(FrameworkMethod method, RunNotifier notifier) {
final Description description = describeChild(method);
if (isIgnored(method)) {
notifier.fireTestIgnored(description);
} else {
leaf(methodBlock(method), description, notifier);
}
}
@Override
protected List<TestRule> getTestRules(Object target) {
List<TestRule> rules = new ArrayList<>();
rules.add(new CheckLeakedKeysRule());
rules.addAll(super.getTestRules(target));
if (!(target instanceof TestUtil)) {
// add rules defined in TestUtil
rules.addAll(new TestClass(DefaultRulesBlueprint.class)
.getAnnotatedFieldValues(DefaultRulesBlueprint.INSTANCE, Rule.class, TestRule.class));
}
rules.sort(new Comparator<TestRule>() {
/**
* sort rules from lower (or no priority) to higher priority rules
* so that the latter ones can be "applied" last and therefore "evaluated" first (=outermost rules)
* **/
@Override
public int compare(TestRule lhs, TestRule rhs) {
int lp = 0, rp = 0;
if (lhs.getClass().isAnnotationPresent(Priority.class)) lp = lhs.getClass().getAnnotation(Priority.class).value();
if (rhs.getClass().isAnnotationPresent(Priority.class)) rp = rhs.getClass().getAnnotation(Priority.class).value();
return lp - rp;
}
});
return rules;
}
public static class DefaultRulesBlueprint extends TestUtil {
private static final DefaultRulesBlueprint INSTANCE = new DefaultRulesBlueprint();
}
/**
* Runs a {@link Statement} that represents a leaf (aka atomic) test.
*/
private void leaf(Statement statement, Description description,
RunNotifier notifier) {
TestUtil.stall_till_cloudsize(fetchCloudSize());
final EachTestNotifier eachNotifier = new EachTestNotifier(notifier, description);
eachNotifier.fireTestStarted();
try {
currentTest.set(description);
statement.evaluate();
} catch (AssumptionViolatedException e) {
eachNotifier.addFailedAssumption(e);
} catch (Throwable e) {
eachNotifier.addFailure(e);
} finally {
currentTest.remove();
eachNotifier.fireTestFinished();
}
}
private int fetchCloudSize() {
final CloudSize annotation = getTestClass().getAnnotation(CloudSize.class);
if (annotation == null) {
return 1;
}
final int cloudSize = annotation.value();
if (cloudSize < 1)
throw new IllegalStateException("@CloudSize annotation must specify sizes greater than zero. Given value: " + cloudSize);
return cloudSize;
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/runner/H2ORunnerAfterClass.java
|
package water.runner;
import org.junit.Ignore;
import org.junit.internal.runners.statements.RunAfters;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.MultipleFailureException;
import org.junit.runners.model.Statement;
import water.junit.rules.tasks.CleanAllKeysTask;
import java.util.ArrayList;
import java.util.List;
@Ignore
public class H2ORunnerAfterClass extends RunAfters {
private final Statement next;
private final Object target;
private final List<FrameworkMethod> afters;
public H2ORunnerAfterClass(Statement next, List<FrameworkMethod> afters, Object target) {
super(next, afters, target);
this.next = next;
this.target = target;
this.afters = afters;
}
@Override
public void evaluate() throws Throwable {
List<Throwable> errors = new ArrayList<Throwable>();
try {
next.evaluate();
} catch (Throwable e) {
errors.add(e);
} finally {
// Clean all keys shared for the whole test class, created during @BeforeClass,
// but not cleaned in @AfterClass.
try {
new CleanAllKeysTask().doAllNodes();
} catch (Throwable e) {
errors.add(e);
}
for (FrameworkMethod each : afters) {
try {
each.invokeExplosively(target);
} catch (Throwable e) {
errors.add(e);
}
}
}
MultipleFailureException.assertEmpty(errors);
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test/WebsocketClient.java
|
package water.test;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import org.apache.log4j.Logger;
import org.glassfish.tyrus.client.ClientManager;
import water.H2O;
import javax.websocket.*;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Map;
import java.util.Optional;
import static org.junit.Assert.fail;
public class WebsocketClient extends Endpoint {
private static final Logger LOG = Logger.getLogger(WebsocketClient.class);
private final Session sess;
private final Gson gson = new Gson();
private Map<String, String> receivedMessage;
private String overflowMessage;
public WebsocketClient() throws URISyntaxException, IOException, DeploymentException {
String destUri = "ws://" + H2O.getIpPortString() + "/3/Steam.websocket";
ClientManager client = ClientManager.createClient();
ClientEndpointConfig cec = ClientEndpointConfig.Builder.create().build();
sess = client.connectToServer(this, cec, new URI(destUri));
}
public void close() throws IOException {
if (sess.isOpen()) {
sess.close(new CloseReason(CloseReason.CloseCodes.NORMAL_CLOSURE, "Test Done"));
}
}
@OnOpen
public void onOpen(Session session, EndpointConfig config) {
session.addMessageHandler(String.class, message -> {
LOG.info("Received message from H2O: " + message);
synchronized (this) {
if (receivedMessage != null) {
LOG.info("Received message not stored as last message was not picked up yet.");
overflowMessage = message;
} else {
receivedMessage = gson.fromJson(message, new TypeToken<Map<String, String>>() {}.getType());
}
this.notifyAll();
}
});
}
public void sendMessage(Object msg) throws IOException {
final String msgStr = gson.toJson(msg);
LOG.info("Sending message to H2O: " + msgStr);
sess.getBasicRemote().sendText(msgStr);
}
public Map<String, String> waitToReceiveMessage(String message) {
return waitToReceiveMessage(message, 100_000);
}
public Map<String, String> waitToReceiveMessage(String message, int timeoutMillis) {
return waitToReceiveMessage(message, timeoutMillis, true).get();
}
public synchronized Optional<Map<String, String>> waitToReceiveMessage(String message, int timeoutMillis, boolean failOnNone) {
if (overflowMessage != null) {
fail("Message received but not handled: " + overflowMessage);
}
try {
this.wait(timeoutMillis);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
if (receivedMessage != null) {
Map<String, String> res = receivedMessage;
receivedMessage = null;
return Optional.of(res);
} else if (failOnNone) {
fail("Expected " + message + ", but no message received from H2O.");
}
return Optional.empty();
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test/dummy/DummyAction.java
|
package water.test.dummy;
import water.Iced;
public abstract class DummyAction<T> extends Iced<DummyAction<T>> {
protected abstract String run(DummyModelParameters parms);
protected void cleanUp() {};
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test/dummy/DummyExtension.java
|
package water.test.dummy;
import water.AbstractH2OExtension;
public class DummyExtension extends AbstractH2OExtension {
@Override
public String getExtensionName() {
return "dummy";
}
@Override
public void init() {
DummyModelParameters params = new DummyModelParameters();
new DummyModelBuilder(params, true); // as a side effect DummyModelBuilder will be registered in a static field ModelBuilder.ALGOBASES
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test/dummy/DummyModel.java
|
package water.test.dummy;
import hex.Model;
import hex.ModelMetrics;
import hex.ModelMetricsBinomial;
import water.Futures;
import water.Key;
public class DummyModel extends Model<DummyModel, DummyModelParameters, DummyModelOutput> {
public DummyModel(Key<DummyModel> selfKey, DummyModelParameters parms, DummyModelOutput output) {
super(selfKey, parms, output);
}
@Override
public ModelMetrics.MetricBuilder makeMetricBuilder(String[] domain) {
return new ModelMetricsBinomial.MetricBuilderBinomial(domain);
}
@Override
protected double[] score0(double[] data, double[] preds) { return preds; }
@Override
protected Futures remove_impl(Futures fs, boolean cascade) {
super.remove_impl(fs, cascade);
if (_parms._action != null) {
_parms._action.cleanUp();
}
return fs;
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test/dummy/DummyModelBuilder.java
|
package water.test.dummy;
import hex.Model;
import hex.ModelBuilder;
import hex.ModelCategory;
import jsr166y.CountedCompleter;
import water.Job;
public class DummyModelBuilder
extends ModelBuilder<DummyModel, DummyModelParameters, DummyModelOutput> {
public DummyModelBuilder(DummyModelParameters parms) {
super(parms);
init(false);
}
public DummyModelBuilder(DummyModelParameters parms, boolean startup_once ) { super(parms,startup_once); }
@Override
protected Driver trainModelImpl() {
return new Driver() {
@Override
public void computeImpl() {
if (_parms._cancel_job)
throw new Job.JobCancelledException();
String msg = null;
if (_parms._action != null)
msg = _parms._action.run(_parms);
if (! _parms._makeModel)
return;
init(true);
Model model = null;
try {
model = new DummyModel(dest(), _parms, new DummyModelOutput(DummyModelBuilder.this, train(), msg));
model.delete_and_lock(_job);
model.update(_job);
} finally {
if (model != null)
model.unlock(_job);
}
}
@Override
public boolean onExceptionalCompletion(Throwable ex, CountedCompleter caller) {
if (_parms._on_exception_action != null) {
_parms._on_exception_action.run(_parms);
}
return super.onExceptionalCompletion(ex, caller);
}
};
}
@Override
public ModelCategory[] can_build() {
return new ModelCategory[0];
}
@Override
public boolean isSupervised() {
return true;
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test/dummy/DummyModelOutput.java
|
package water.test.dummy;
import hex.Model;
import hex.ModelBuilder;
import hex.ModelCategory;
import water.fvec.Frame;
public class DummyModelOutput extends Model.Output {
public final String _msg;
public DummyModelOutput(ModelBuilder b, Frame train, String msg) {
super(b, train);
_msg = msg;
}
@Override
public ModelCategory getModelCategory() {
return ModelCategory.Binomial;
}
@Override
public boolean isSupervised() {
return true;
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test/dummy/DummyModelParameters.java
|
package water.test.dummy;
import hex.Model;
import water.Key;
public class DummyModelParameters extends Model.Parameters {
public DummyAction _action;
public boolean _makeModel;
public boolean _cancel_job;
public String _column_param;
public String[] _column_list_param;
public String _dummy_string_param;
public String[] _dummy_string_array_param;
public DummyAction _on_exception_action;
public DummyModelParameters() {}
public DummyModelParameters(String msg, Key trgt) {
_action = new MessageInstallAction(trgt, msg);
}
@Override public String fullName() { return algoName(); }
@Override public String algoName() { return "dummymodelbuilder"; }
@Override public String javaName() { return DummyModelBuilder.class.getName(); }
@Override public long progressUnits() { return 1; }
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test/dummy/MessageInstallAction.java
|
package water.test.dummy;
import water.DKV;
import water.Key;
import water.parser.BufferedString;
public class MessageInstallAction extends DummyAction<MessageInstallAction> {
private final Key _trgt;
private final String _msg;
public MessageInstallAction(Key trgt, String msg) {
_trgt = trgt;
_msg = msg;
}
@Override
protected String run(DummyModelParameters parms) {
DKV.put(_trgt, new BufferedString("Computed " + _msg));
return _msg;
}
@Override
protected void cleanUp() {
DKV.remove(_trgt);
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test/util/ConfusionMatrixUtils.java
|
package water.test.util;
import hex.ConfusionMatrix;
import org.junit.Assert;
import water.MRTask;
import water.Scope;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
import water.util.ArrayUtils;
public class ConfusionMatrixUtils {
/**
* Build the CM data from the actuals and predictions, using the default
* threshold. Print to Log.info if the number of classes is below the
* print_threshold. Actuals might have extra levels not trained on (hence
* never predicted). Actuals with NAs are not scored, and their predictions
* ignored.
*/
public static ConfusionMatrix buildCM(Vec actuals, Vec predictions) {
if (!actuals.isCategorical()) throw new IllegalArgumentException("actuals must be categorical.");
if (!predictions.isCategorical()) throw new IllegalArgumentException("predictions must be categorical.");
Scope.enter();
try {
Vec adapted = predictions.adaptTo(actuals.domain());
int len = actuals.domain().length;
Frame fr = new Frame(actuals);
fr.add("C2", adapted);
CMBuilder cm = new CMBuilder(len).doAll(fr);
return new ConfusionMatrix(cm._arr, actuals.domain());
} finally {
Scope.exit();
}
}
public static void assertCMEqual(String[] expectedDomain, double[][] expectedCM, ConfusionMatrix actualCM) {
Assert.assertArrayEquals("Expected domain differs", expectedDomain, actualCM._domain);
double[][] acm = actualCM._cm;
Assert.assertEquals("CM dimension differs", expectedCM.length, acm.length);
for (int i=0; i < acm.length; i++) Assert.assertArrayEquals("CM row " +i+" differs!", expectedCM[i], acm[i],1e-10);
}
private static class CMBuilder extends MRTask<CMBuilder> {
final int _len;
double _arr[/*actuals*/][/*predicted*/];
CMBuilder(int len) {
_len = len;
}
@Override
public void map(Chunk ca, Chunk cp) {
// After adapting frames, the Actuals have all the levels in the
// prediction results, plus any extras the model was never trained on.
// i.e., Actual levels are at least as big as the predicted levels.
_arr = new double[_len][_len];
for (int i = 0; i < ca._len; i++)
if (!ca.isNA(i))
_arr[(int) ca.at8(i)][(int) cp.at8(i)]++;
}
@Override
public void reduce(CMBuilder cm) {
ArrayUtils.add(_arr, cm._arr);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test/util/GridTestUtils.java
|
package water.test.util;
import org.junit.Assert;
import org.junit.Ignore;
import java.lang.reflect.Field;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import hex.Model;
/**
* Helper function for grid testing.
*/
@Ignore("Support for tests, but no actual tests here")
public class GridTestUtils {
public static Map<String, Set<Object>> initMap(String[] paramNames) {
Map<String, Set<Object>> modelParams = new HashMap<>();
for (String name : paramNames) {
modelParams.put(name, new HashSet<>());
}
return modelParams;
}
public static <P extends Model.Parameters> Map<String, Set<Object>> extractParams(Map<String, Set<Object>> params,
P modelParams,
String[] paramNames) {
try {
for (String paramName : paramNames) {
Field f = modelParams.getClass().getField(paramName);
params.get(paramName).add(f.get(modelParams));
}
return params;
} catch (NoSuchFieldException e) {
throw new IllegalArgumentException(e);
} catch (IllegalAccessException e) {
throw new IllegalArgumentException(e);
}
}
public static void assertParamsEqual(String message, Map<String, Object[]> expected, Map<String, Set<Object>> actual) {
String[] expectedNames = expected.keySet().toArray(new String[expected.size()]);
String[] actualNames = actual.keySet().toArray(new String[actual.size()]);
Assert.assertArrayEquals(message + ": names of used hyper parameters have to match",
expectedNames,
actualNames);
for (String name : expectedNames) {
Object[] expectedValues = expected.get(name);
Arrays.sort(expectedValues);
Object[] actualValues = actual.get(name).toArray(new Object[0]);
Arrays.sort(actualValues);
Assert.assertArrayEquals(message + ": used hyper values have to match",
expectedValues,
actualValues);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/test/util/NaiveTreeSHAP.java
|
package water.test.util;
import ai.h2o.algos.tree.INode;
import ai.h2o.algos.tree.INodeStat;
import org.junit.Ignore;
import java.util.*;
@Ignore
public class NaiveTreeSHAP<R, N extends INode<R>, S extends INodeStat> {
private final int rootNodeId;
private final N[] nodes;
private final S[] stats;
public NaiveTreeSHAP(N[] nodes, S[] stats, int rootNodeId) {
this.rootNodeId = rootNodeId;
this.nodes = nodes;
this.stats = stats;
}
public double calculateContributions(R row, double[] contribsNaive) {
final Set<Integer> usedFeatures = usedFeatures();
final int M = usedFeatures.size();
// last element is the bias term
contribsNaive[contribsNaive.length - 1] += treeMeanValue() /* tree bias */;
// pre-calculate expValue for each subset
Map<Set<Integer>, Double> expVals = new HashMap<>();
for (Set<Integer> subset : allSubsets(usedFeatures)) {
expVals.put(subset, expValue(row, subset));
}
// calculate contributions using pre-calculated expValues
for (Integer feature : usedFeatures) {
for (Set<Integer> subset : expVals.keySet()) {
if (subset.contains(feature)) {
Set<Integer> noFeature = new HashSet<>(subset);
noFeature.remove(feature);
double mult = fact(noFeature.size()) * (long) fact(M - subset.size()) / (double) fact(M);
double contrib = mult * (expVals.get(subset) - expVals.get(noFeature));
contribsNaive[feature] += contrib;
}
}
}
// expValue of a tree with all features marked as used should sum-up to the total prediction
return expValue(row, usedFeatures);
}
private double expValue(R v, Set<Integer> s) {
return expValue(rootNodeId, v, s, 1.0);
}
private static int fact(int v) {
int f = 1;
for (int i = 1; i <= v; i++) {
f *= i;
}
return f;
}
private static List<Set<Integer>> allSubsets(Set<Integer> s) {
List<Set<Integer>> result = new LinkedList<>();
Integer[] ary = s.toArray(new Integer[0]);
// Run a loop from 0 to 2^n
for (int i = 0; i < (1<<ary.length); i++) {
Set<Integer> subset = new HashSet<>(s.size());
int m = 1;
for (Integer item : ary) {
if ((i & m) > 0) {
subset.add(item);
}
m = m << 1;
}
result.add(subset);
}
return result;
}
private Set<Integer> usedFeatures() {
Set<Integer> features = new HashSet<>();
for(N n : nodes) {
features.add(n.getSplitIndex());
}
return features;
}
private double expValue(int node, R v, Set<Integer> s, double w) {
final INode<R> n = nodes[node];
if (n.isLeaf()) {
return w * n.getLeafValue();
} else {
if (s.contains(n.getSplitIndex())) {
return expValue(n.next(v), v, s, w);
} else {
double wP = stats[node].getWeight();
double wL = stats[n.getLeftChildIndex()].getWeight();
double wR = stats[n.getRightChildIndex()].getWeight();
return expValue(n.getLeftChildIndex(), v, s, w * wL / wP) +
expValue(n.getRightChildIndex(), v, s, w * wR / wP);
}
}
}
private double treeMeanValue() {
return nodeMeanValue(rootNodeId);
}
private double nodeMeanValue(int node) {
final INode n = nodes[node];
if (n.isLeaf()) {
return n.getLeafValue();
} else {
return (stats[n.getLeftChildIndex()].getWeight() * nodeMeanValue(n.getLeftChildIndex()) +
stats[n.getRightChildIndex()].getWeight() * nodeMeanValue(n.getRightChildIndex())) / stats[node].getWeight();
}
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/udf/CBlock.java
|
package water.udf;
import org.junit.Ignore;
import water.fvec.Chunk;
@Ignore("Support for tests, but no actual tests here")
public class CBlock {
protected CBlock(Chunk[] c) {
this(c, 0, c.length);
}
protected CBlock(Chunk[] c, int off, int len) {
assert c != null : "Chunk array cannot be null!";
this.c = c;
this.off = off;
this.len = len;
}
public class CRow {
private int row;
public double readDouble(int col) {
return column(col).atd(row);
}
public long readLong(int col) {
return column(col).at8(row);
}
public double[] readDoubles() {
double[] res = new double[len()];
for (int i = 0; i < len; i++) {
res[i] = readDouble(i);
}
return res;
}
private CRow setRow(int row) {
this.row = row;
return this;
}
public int len() {
return len;
}
}
final private int off;
final private int len;
final private Chunk[] c;
final private CRow row = new CRow();
public int columns() {
return len;
}
public int rows() {
return c[0]._len;
}
private final Chunk column(int col) {
return c[off + col];
}
public CRow row(int idx) {
return row.setRow(idx);
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/udf/CFunc1.java
|
package water.udf;
import org.junit.Ignore;
@Ignore("Support for tests, but no actual tests here")
public interface CFunc1 extends CFunc {
double apply(CBlock.CRow row);
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/udf/CFunc1Task.java
|
package water.udf;
import org.junit.Ignore;
import water.fvec.Chunk;
import water.fvec.NewChunk;
@Ignore("Support for tests, but no actual tests here")
public class CFunc1Task extends CFuncTask<CFunc1, CFunc1Task> {
private final int len;
private final int ofs;
public CFunc1Task(CFuncRef cFuncRef, int ofs, int len) {
super(cFuncRef);
this.len = len;
this.ofs = ofs;
}
public CFunc1Task(CFuncRef cFuncRef, int len) {
this(cFuncRef, 0, len);
}
@Override
public void map(Chunk c[], NewChunk nc) {
CBlock block = new CBlock(c, ofs, len);
for(int i = 0; i < block.rows(); i++) {
nc.addNum(func.apply(block.row(i)));
}
}
@Override
protected Class<CFunc1> getFuncType() {
return CFunc1.class;
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/udf/CFunc2.java
|
package water.udf;
import org.junit.Ignore;
@Ignore("Support for tests, but no actual tests here")
public interface CFunc2 extends CFunc {
double apply(CBlock.CRow row1, CBlock.CRow row2);
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/udf/CFunc2Task.java
|
package water.udf;
import org.junit.Ignore;
import water.fvec.Chunk;
import water.fvec.NewChunk;
@Ignore("Support for tests, but no actual tests here")
public class CFunc2Task extends CFuncTask<CFunc2, CFunc2Task> {
private final int len1;
private final int len2;
private final int ofs1;
private final int ofs2;
public CFunc2Task(CFuncRef cFuncRef, int ofs1, int len1, int ofs2, int len2) {
super(cFuncRef);
this.len1 = len1;
this.len2 = len2;
this.ofs1 = ofs1;
this.ofs2 = ofs2;
}
public CFunc2Task(CFuncRef cFuncRef, int len1, int len2) {
this(cFuncRef, 0, len1, len1, len2);
}
@Override
public void map(Chunk c[], NewChunk nc) {
CBlock block1 = new CBlock(c, ofs1, len1);
CBlock block2 = new CBlock(c, ofs2, len2);
for(int i = 0; i < block1.rows(); i++) {
nc.addNum(func.apply(block1.row(i), block2.row(i)));
}
}
@Override
protected Class<CFunc2> getFuncType() {
return CFunc2.class;
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/udf/CustomMetricUtils.java
|
package water.udf;
import hex.*;
import org.junit.Assert;
import water.DKV;
import water.Key;
import water.TestUtil;
import water.fvec.Frame;
import water.udf.metric.MEACustomMetric;
import water.util.FrameUtils;
import java.io.IOException;
import java.util.Arrays;
import static water.udf.JFuncUtils.loadTestFunc;
public class CustomMetricUtils {
static void testNullModelRegression(final CFuncRef func) throws Exception {
final Frame f = TestUtil.Datasets.iris();
Frame pred = null; Model model = null;
try {
NullModelParameters params = new NullModelParameters() {{
_train = f._key;
_response_column = "sepal_len";
_custom_metric_func = func.toRef();
}};
model = new NullModelBuilder(params).trainModel().get();
pred = model.score(f, null, null, true, func);
Assert.assertEquals("Null model generates only a single model metrics",
1, model._output.getModelMetrics().length);
ModelMetrics mm = model._output.getModelMetrics()[0].get();
Assert.assertEquals("Custom model metrics should compute mean of response column",
f.vec("sepal_len").mean(), mm._custom_metric.value, 1e-8);
} finally {
FrameUtils.delete(f, pred, model);
DKV.remove(func.getKey());
}
}
static class NullModelOutput extends Model.Output {
public NullModelOutput(ModelBuilder b) {
super(b);
}
}
static class NullModelParameters extends Model.Parameters {
@Override public String fullName() { return "nullModel"; }
@Override public String algoName() { return "nullModel"; }
@Override public String javaName() { return NullModelBuilder.class.getName(); }
@Override public long progressUnits() { return 1; }
}
static class NullModel extends Model<NullModel, NullModelParameters, NullModelOutput> {
public NullModel(Key<NullModel> selfKey, NullModelParameters parms, NullModelOutput output) {
super(selfKey, parms, output);
}
@Override
public ModelMetrics.MetricBuilder makeMetricBuilder(String[] domain) {
switch(_output.getModelCategory()) {
case Binomial: return new ModelMetricsBinomial.MetricBuilderBinomial(domain);
case Multinomial: return new ModelMetricsMultinomial.MetricBuilderMultinomial(_output.nclasses(), domain, _parms._auc_type);
case Regression: return new ModelMetricsRegression.MetricBuilderRegression();
default: return null;
}
}
@Override
protected double[] score0(double[] data, double[] preds) {
Arrays.fill(preds, 0);
return preds;
}
}
static class NullModelBuilder extends ModelBuilder<NullModel, NullModelParameters, NullModelOutput> {
public NullModelBuilder(NullModelParameters parms) {
super(parms);
init(false);
}
@Override
public void init(boolean expensive) {
super.init(expensive);
}
@Override
protected Driver trainModelImpl() {
return new Driver() {
@Override
public void computeImpl() {
init(true);
NullModel model = new NullModel(dest(), _parms, new NullModelOutput(NullModelBuilder.this));
try {
model.delete_and_lock(_job);
} finally {
model.unlock(_job);
}
}
};
}
@Override
public ModelCategory[] can_build() {
return new ModelCategory[]{
ModelCategory.Regression,
ModelCategory.Binomial,
ModelCategory.Multinomial,
};
}
@Override
public boolean isSupervised() {
return true;
}
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/udf/JFuncUtils.java
|
package water.udf;
import org.apache.commons.io.IOUtils;
import org.junit.Ignore;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.jar.JarOutputStream;
import java.util.zip.ZipEntry;
import water.DKV;
import water.Key;
import water.Value;
import water.util.ArrayUtils;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@Ignore("Support for tests, but no actual tests here")
public class JFuncUtils {
/**
* Load given jar file into K/V store under given name as untyped Value.
* @param keyName name of key
* @param testJarPath path to jar file
* @return KV-store key referencing loaded jar-file
* @throws IOException
*/
public static Key loadTestJar(String keyName, String testJarPath) throws IOException {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
InputStream is = cl.getResourceAsStream(testJarPath);
try {
byte[] ba = IOUtils.toByteArray(is);
Key key = Key.make(keyName);
DKV.put(key, new Value(key, ba));
return key;
} finally {
is.close();
}
}
/**
* Load test function given as Class reference.
* The method get the class via resources, and store it in K/V under given key name.
*
* @param keyName name of key to store the class in K/V
* @param klazz class to save into K/V
* @return test function definition
* @throws IOException
*/
public static CFuncRef loadTestFunc(String keyName, Class klazz) throws IOException {
String klazzName = klazz.getName().replaceAll("\\.", "/") + ".class";
return loadTestFunc("java", keyName, new String[] {klazzName}, klazz.getName());
}
public static CFuncRef loadTestFunc(String lang, String keyName, String[] resourcePaths, String entryFuncName) throws IOException {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
// Output jar in-memory jar file
ByteArrayOutputStream bos = new ByteArrayOutputStream();
JarOutputStream jos = new JarOutputStream(bos);
// Save all resources from classpath to jar-file
try {
for (String resourcePath : resourcePaths) {
InputStream is = cl.getResourceAsStream(resourcePath);
byte[] ba;
try {
ba = IOUtils.toByteArray(is);
} finally {
is.close();
}
jos.putNextEntry(new ZipEntry(resourcePath));
jos.write(ba);
}
} finally {
jos.close();
}
Key key = Key.make(keyName);
DKV.put(key, new Value(key, bos.toByteArray()));
return new CFuncRef(lang, keyName, entryFuncName);
}
public static CFuncRef loadRawTestFunc(String lang, String keyName, String funcName, byte[] rawDef, String pathInJar)
throws IOException {
// Output jar in-memory jar file
ByteArrayOutputStream bos = new ByteArrayOutputStream();
JarOutputStream jos = new JarOutputStream(bos);
try {
jos.putNextEntry(new ZipEntry(pathInJar));
jos.write(rawDef);
} finally {
jos.close();
}
Key key = Key.make(keyName);
DKV.put(key, new Value(key, bos.toByteArray()));
return new CFuncRef(lang, keyName, funcName);
}
public static ClassLoader getSkippingClassloader(ClassLoader parent,
final String[] skipClassNames) {
return getSkippingClassloader(parent, skipClassNames, new String[] {});
}
public static ClassLoader getSkippingClassloader(ClassLoader parent,
final String[] skipClassNames,
final String[] skipResources) {
return new ClassLoader(parent) {
@Override
protected Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
// For test classes avoid loading from parent
return ArrayUtils.contains(skipClassNames, name) ? null : super.loadClass(name, resolve);
}
@Override
public URL getResource(String name) {
return ArrayUtils.contains(skipResources, name) ? null : super.getResource(name);
}
};
}
static CBlock.CRow mockedRow(int len, double value) {
CBlock.CRow row = mock(CBlock.CRow.class);
when(row.len()).thenReturn(len);
when(row.readDouble(anyInt())).thenReturn(value);
when(row.readDoubles()).thenReturn(ArrayUtils.constAry(len, value));
return row;
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/udf/UdfUtils.java
|
package water.udf;
import water.Scope;
import water.fvec.Vec;
import java.lang.reflect.Method;
public class UdfUtils {
public static <T> T willDrop(T vh) {
try { // using reflection so that Paula Bean's code is intact
Method vec = vh.getClass().getMethod("vec");
Scope.track((Vec)vec.invoke(vh));
} catch (Exception e) {
// just ignore
}
return vh;
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/udf
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/udf/metric/BernoulliCustomDistribution.java
|
package water.udf.metric;
import org.junit.Ignore;
import water.udf.CDistributionFunc;
@Ignore("Support for tests, but no actual tests here")
public class BernoulliCustomDistribution implements CDistributionFunc {
public double MIN_LOG = -19;
public double MAX = 1e19;
public double exp(double x) { return Math.min(MAX, Math.exp(x)); }
public double log(double x) {
x = Math.max(0, x);
return x == 0 ? MIN_LOG : Math.max(MIN_LOG, Math.log(x));
}
@Override
public String link() { return "logit";}
@Override
public double[] init(double w, double o, double y) {
return new double[]{w * (y - o), w};
}
@Override
public double gradient(double y, double f) {
return y - (1 / (1 + exp(-f)));
}
@Override
public double gradient(double y, double f, int l) {
return gradient(y, f);
}
@Override
public double[] gamma(double w, double y, double z, double f) {
double ff = y - z;
return new double[]{w * z, w * ff * (1 - ff)};
}
}
|
0
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/udf
|
java-sources/ai/h2o/h2o-test-support/3.46.0.7/water/udf/metric/MEACustomMetric.java
|
package water.udf.metric;
import org.junit.Ignore;
import hex.Model;
import water.udf.CMetricFunc;
import water.util.ArrayUtils;
@Ignore("Support for tests, but no actual tests here")
public class MEACustomMetric implements CMetricFunc {
@Override
public double[] map(double[] preds, float[] yact, double weight, double offset, Model m) {
return new double[] { Math.abs(preds[0] - yact[0]), 1};
}
@Override
public double[] reduce(double[] l, double[] r) {
ArrayUtils.add(l, r);
return l;
}
@Override
public double metric(double[] r) {
return r[0]/r[1];
}
}
|
0
|
java-sources/ai/h2o/h2o-tree-api/0.3.20/ai/h2o/algos
|
java-sources/ai/h2o/h2o-tree-api/0.3.20/ai/h2o/algos/tree/INode.java
|
package ai.h2o.algos.tree;
public interface INode<T> {
boolean isLeaf();
float getLeafValue();
int getSplitIndex();
int next(T value);
int getLeftChildIndex();
int getRightChildIndex();
}
|
0
|
java-sources/ai/h2o/h2o-tree-api/0.3.20/ai/h2o/algos
|
java-sources/ai/h2o/h2o-tree-api/0.3.20/ai/h2o/algos/tree/INodeStat.java
|
package ai.h2o.algos.tree;
public interface INodeStat {
float getWeight();
}
|
0
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver/config/ConnectionConfiguration.java
|
package water.webserver.config;
import water.webserver.iface.H2OHttpConfig;
public class ConnectionConfiguration {
private final boolean _secure;
private final String _scheme;
public ConnectionConfiguration(boolean isSecured) {
_secure = isSecured;
_scheme = isSecured ? "https" : "http";
}
public boolean isSecure() {
return _secure;
}
public int getRequestHeaderSize() {
return getSysPropInt( "requestHeaderSize", 32 * 1024);
}
public int getRequestBufferSize() {
return getSysPropInt( "requestBufferSize", 32 * 1024);
}
public int getResponseHeaderSize() {
return getSysPropInt("responseHeaderSize", 32 * 1024);
}
public int getOutputBufferSize(int defaultOutputBufferSize) {
return getSysPropInt("responseBufferSize", defaultOutputBufferSize);
}
public int getIdleTimeout() {
return getSysPropInt("jetty.idleTimeout", 5 * 60 * 1000);
}
public boolean isRelativeRedirectAllowed() {
return getSysPropBool("relativeRedirectAllowed", true);
}
private int getSysPropInt(String suffix, int defaultValue) {
return Integer.parseInt(
getProperty(H2OHttpConfig.SYSTEM_PROP_PREFIX + _scheme + "." + suffix, String.valueOf(defaultValue))
);
}
private boolean getSysPropBool(String suffix, boolean defaultValue) {
return Boolean.parseBoolean(
getProperty(H2OHttpConfig.SYSTEM_PROP_PREFIX + _scheme + "." + suffix, String.valueOf(defaultValue))
);
}
protected String getProperty(String name, String defaultValue) {
return System.getProperty(name, defaultValue);
}
}
|
0
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver/iface/Credentials.java
|
package water.webserver.iface;
import org.apache.commons.codec.binary.Base64;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
/**
* Representation of the User-Password pair
*/
public class Credentials {
private final String _user;
private final String _password;
public Credentials(String _user, String _password) {
this._user = _user;
this._password = _password;
}
public String toBasicAuth() {
return "Basic " + base64EncodeToString(_user + ":" + _password);
}
public String toHashFileEntry() {
return _user + ": " + credentialMD5digest(_password) + "\n";
}
public String toDebugString() {
return "Credentials[_user='" + _user + "', _password='" + _password + "']";
}
/**
* This replaces Jetty's B64Code.encode().
*/
private static String base64EncodeToString(String s) {
final byte[] bytes = s.getBytes(StandardCharsets.ISO_8859_1);
return Base64.encodeBase64String(bytes);
}
// following part is copied out of Jetty's class org.eclipse.jetty.util.security.Credential$MD5, because we cannot depend on the library
private static final String __TYPE = "MD5:";
private static final Object __md5Lock = new Object();
private static MessageDigest __md;
/**
* This replaces Jetty's Credential.MD5.digest().
*/
private static String credentialMD5digest(String password) {
try {
byte[] digest;
synchronized (__md5Lock) {
if (__md == null) {
try {
__md = MessageDigest.getInstance("MD5");
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
__md.reset();
__md.update(password.getBytes(StandardCharsets.ISO_8859_1));
digest = __md.digest();
}
return __TYPE + toString(digest, 16);
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
private static String toString(byte[] bytes, int base)
{
StringBuilder buf = new StringBuilder();
for (byte b : bytes)
{
int bi=0xff&b;
int c='0'+(bi/base)%base;
if (c>'9')
c= 'a'+(c-'0'-10);
buf.append((char)c);
c='0'+bi%base;
if (c>'9')
c= 'a'+(c-'0'-10);
buf.append((char)c);
}
return buf.toString();
}
}
|
0
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver/iface/H2OHttpConfig.java
|
package water.webserver.iface;
/**
* Holds configuration relevant to HTTP server.
*/
public class H2OHttpConfig {
/**
* Prefix of hidden system properties, same as in H2O.OptArgs.SYSTEM_PROP_PREFIX.
*/
public static final String SYSTEM_PROP_PREFIX = "sys.ai.h2o.";
public String jks;
public String jks_pass;
public String jks_alias;
public LoginType loginType;
public String login_conf;
public String spnego_properties;
public boolean form_auth;
public int session_timeout; // parsed value (in minutes)
public String user_name;
public String context_path;
public boolean ensure_daemon_threads = false;
@Override // autogenerated by IntelliJ
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
H2OHttpConfig that = (H2OHttpConfig) o;
if (form_auth != that.form_auth) return false;
if (session_timeout != that.session_timeout) return false;
if (ensure_daemon_threads != that.ensure_daemon_threads) return false;
if (jks != null ? !jks.equals(that.jks) : that.jks != null) return false;
if (jks_pass != null ? !jks_pass.equals(that.jks_pass) : that.jks_pass != null) return false;
if (jks_alias != null ? !jks_alias.equals(that.jks_alias) : that.jks_alias != null) return false;
if (loginType != that.loginType) return false;
if (login_conf != null ? !login_conf.equals(that.login_conf) : that.login_conf != null) return false;
if (spnego_properties != null ? !spnego_properties.equals(that.spnego_properties) : that.spnego_properties != null)
return false;
if (user_name != null ? !user_name.equals(that.user_name) : that.user_name != null) return false;
return context_path != null ? context_path.equals(that.context_path) : that.context_path == null;
}
@Override
public int hashCode() {
int result = jks != null ? jks.hashCode() : 0;
result = 31 * result + (jks_pass != null ? jks_pass.hashCode() : 0);
result = 31 * result + (jks_alias != null ? jks_alias.hashCode() : 0);
result = 31 * result + (loginType != null ? loginType.hashCode() : 0);
result = 31 * result + (login_conf != null ? login_conf.hashCode() : 0);
result = 31 * result + (spnego_properties != null ? spnego_properties.hashCode() : 0);
result = 31 * result + (form_auth ? 1 : 0);
result = 31 * result + session_timeout;
result = 31 * result + (user_name != null ? user_name.hashCode() : 0);
result = 31 * result + (context_path != null ? context_path.hashCode() : 0);
result = 31 * result + (ensure_daemon_threads ? 1 : 0);
return result;
}
@Override // autogenerated by IntelliJ
public String toString() {
return "H2OHttpConfig{" +
"jks='" + jks + '\'' +
", jks_pass='" + jks_pass + '\'' +
", jks_alias='" + jks_alias + '\'' +
", loginType=" + loginType +
", login_conf='" + login_conf + '\'' +
", spnego_properties='" + spnego_properties + '\'' +
", form_auth=" + form_auth +
", session_timeout=" + session_timeout +
", user_name='" + user_name + '\'' +
", context_path='" + context_path + '\'' +
", ensure_daemon_threads=" + ensure_daemon_threads +
'}';
}
}
|
0
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver/iface/H2OHttpView.java
|
package water.webserver.iface;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.Collection;
import java.util.LinkedHashMap;
/**
* Exposes part of H2O functionality for the purposes of HTTP server adapter.
* Contains also logic for handling authentication and other functionality, so that it can be easily shared among various server implementations.
*/
public interface H2OHttpView {
/**
* @return configuration related to HTTP server
*/
H2OHttpConfig getConfig();
/**
* @return map of servlets with their context paths
*/
LinkedHashMap<String, Class<? extends HttpServlet>> getServlets();
LinkedHashMap<String, Class<? extends H2OWebsocketServlet>> getWebsockets();
/**
* @return custom authentication extensions if any
*/
Collection<RequestAuthExtension> getAuthExtensions();
boolean authenticationHandler(HttpServletRequest request, HttpServletResponse response) throws IOException;
boolean gateHandler(HttpServletRequest request, HttpServletResponse response);
boolean loginHandler(String target, HttpServletRequest request, HttpServletResponse response) throws IOException;
boolean proxyLoginHandler(String target, HttpServletRequest request, HttpServletResponse response) throws IOException;
}
|
0
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver/iface/H2OWebsocketServlet.java
|
package water.webserver.iface;
public interface H2OWebsocketServlet {
WebsocketHandler onConnect(WebsocketConnection conn);
}
|
0
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver/iface/HttpServerFacade.java
|
package water.webserver.iface;
/**
* Facade for an HTTP server implementation. We typically use Jetty behind this; however, due to use of various major versions,
* we cannot afford anymore to depend on Jetty directly; the changes between its major versions are as significant
* as if it was a completely different webserver.
*
* This interface is supposed to hide all those dependencies.
*/
public interface HttpServerFacade {
/**
* @param h2oHttpView a partial view of H2O's functionality
* @return a new instance of web server adapter
*/
WebServer createWebServer(H2OHttpView h2oHttpView);
/**
* @param h2oHttpView a partial view of H2O's functionality
* @param credentials -
* @param proxyTo -
* @return a new instance of web proxy adapter
*/
ProxyServer createProxyServer(H2OHttpView h2oHttpView, Credentials credentials, String proxyTo);
}
|
0
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver/iface/HttpServerLoader.java
|
package water.webserver.iface;
import java.util.Iterator;
import java.util.ServiceLoader;
/**
* Finds implementation of {@link HttpServerFacade} found on the classpath.
* There must be exactly one present.
*/
public class HttpServerLoader {
public static final HttpServerFacade INSTANCE;
static {
final ServiceLoader<HttpServerFacade> serviceLoader = ServiceLoader.load(HttpServerFacade.class);
final Iterator<HttpServerFacade> iter = serviceLoader.iterator();
if (! iter.hasNext()) {
throw new IllegalStateException("HTTP Server cannot be loaded: No implementation of HttpServerFacade found on classpath. Please refer to https://github.com/h2oai/h2o-3/discussions/15522 for details.");
}
INSTANCE = iter.next();
if (iter.hasNext()) {
final StringBuilder sb = new StringBuilder(INSTANCE.getClass().getName());
while (iter.hasNext()) {
sb.append(", ");
sb.append(iter.next().getClass().getName());
}
throw new IllegalStateException("HTTP Server cannot be loaded: Multiple implementations of HttpServerFacade found on classpath: " + sb + ". Please refer to https://github.com/h2oai/h2o-3/discussions/15522 for details.");
}
}
}
|
0
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver/iface/LoginType.java
|
package water.webserver.iface;
/**
* Supported login methods
*/
public enum LoginType {
NONE(null, false),
HASH(null, false),
LDAP("ldaploginmodule", true),
KERBEROS("krb5loginmodule", true),
SPNEGO(null, true),
PAM("pamloginmodule", true);
public final String jaasRealm;
private final boolean checkUserName;
LoginType(final String jaasRealm, boolean checkUserName) {
this.jaasRealm = jaasRealm;
this.checkUserName = checkUserName;
}
public boolean needToCheckUserName() {
return checkUserName;
}
}
|
0
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver/iface/ProxyServer.java
|
package water.webserver.iface;
import java.io.IOException;
/**
* All the functionality that we need to call on an existing instance of HTTP proxy.
*/
public interface ProxyServer {
void start(String ip, int port) throws IOException;
}
|
0
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver/iface/RequestAuthExtension.java
|
package water.webserver.iface;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
/**
* Extension point for HTTP request handling. Managed by ExtensionManager.
*/
public interface RequestAuthExtension {
/**
* Extended handler for customizing HTTP request authentication.
*
* @param target -
* @param request -
* @param response -
* @return true if the request should be considered handled, false otherwise
* @throws IOException -
* @throws ServletException -
*/
boolean handle(String target, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException;
/**
* @return True if the given extension is enabled. Otherwise false.
*/
default boolean isEnabled(){
return true;
}
/**
* @return name of extension. By default, returns (simple) class name.
*/
default String getName() {
return getClass().getSimpleName();
}
}
|
0
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver/iface/WebServer.java
|
package water.webserver.iface;
import java.io.IOException;
/**
* All the functionality that we need to call on an existing instance of HTTP server (servlet container).
*/
public interface WebServer {
void start(String ip, int port) throws IOException;
void stop() throws IOException;
}
|
0
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver/iface/WebsocketConnection.java
|
package water.webserver.iface;
import java.io.IOException;
public interface WebsocketConnection {
void sendMessage(String message) throws IOException;
}
|
0
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver
|
java-sources/ai/h2o/h2o-webserver-iface/3.46.0.7/water/webserver/iface/WebsocketHandler.java
|
package water.webserver.iface;
public interface WebsocketHandler {
void onClose(WebsocketConnection connection);
void onMessage(String message);
}
|
0
|
java-sources/ai/h2o/libtorch-bundle/1.9.0-alpha-3/ai/h2o
|
java-sources/ai/h2o/libtorch-bundle/1.9.0-alpha-3/ai/h2o/libtorch/LibtorchBundle.java
|
package ai.h2o.libtorch;
import com.facebook.soloader.nativeloader.NativeLoader;
import com.facebook.soloader.nativeloader.SystemDelegate;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import org.pytorch.IValue;
import org.pytorch.Module;
import org.pytorch.Tensor;
public class LibtorchBundle {
static {
init();
}
public static void init() {
NativeLoader.initIfUninitialized(new MySystemDelegate());
}
static class MySystemDelegate extends SystemDelegate {
private final Set<String> alreadyLoaded = new HashSet<>();
@Override
public boolean loadLibrary(String shortName, int flags) {
if (alreadyLoaded.contains(shortName)) {
return false;
}
try {
org.scijava.nativelib.NativeLoader.loadLibrary(shortName);
// Problem 1: scijava only extracts the target SO but not its dependencies
// Problem 2: even if I do that manually, the call crashes (coredumps)
} catch (IOException e) {
throw new IllegalStateException(e);
}
alreadyLoaded.add(shortName);
return true;
}
}
/**
* This entry-point only serves for functionality testing, both during build and on the deployment.
* It is not meant as a regular CLI.
* @param args -
*/
public static void main(String[] args) {
if (args.length != 1) {
throw new RuntimeException("Expected exactly 1 argument");
}
final File file = new File(args[0]).getAbsoluteFile();
System.err.println("Model file: " + file);
final Module mod = Module.load(file.getPath());
final Tensor data =
Tensor.fromBlob(
new int[] {1, 2, 3, 4, 5, 6}, // data
new long[] {2, 3} // shape
);
final IValue result = mod.forward(IValue.from(data), IValue.from(3.0));
final Tensor output = result.toTensor();
System.out.println("shape: " + Arrays.toString(output.shape()));
System.out.println("data: " + Arrays.toString(output.getDataAsFloatArray()));
}
}
|
0
|
java-sources/ai/h2o/libtorch-bundle/1.9.0-alpha-3/com/facebook
|
java-sources/ai/h2o/libtorch-bundle/1.9.0-alpha-3/com/facebook/jni/DestructorThread.java
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.jni;
import java.lang.ref.PhantomReference;
import java.lang.ref.ReferenceQueue;
import java.util.concurrent.atomic.AtomicReference;
/**
* A thread which invokes the "destruct" routine for objects after they have been garbage collected.
*
* <p>An object which needs to be destructed should create a static subclass of {@link Destructor}.
* Once the referent object is garbage collected, the DestructorThread will callback to the {@link
* Destructor#destruct()} method.
*
* <p>The underlying thread in DestructorThread starts when the first Destructor is constructed and
* then runs indefinitely.
*/
public class DestructorThread {
/**
* N.B The Destructor <b>SHOULD NOT</b> refer back to its referent object either explicitly or
* implicitly (for example, as a non-static inner class). This will create a reference cycle where
* the referent object will never be garbage collected.
*/
public abstract static class Destructor extends PhantomReference<Object> {
private Destructor next;
private Destructor previous;
public Destructor(Object referent) {
super(referent, sReferenceQueue);
sDestructorStack.push(this);
}
private Destructor() {
super(null, sReferenceQueue);
}
/** Callback which is invoked when the original object has been garbage collected. */
protected abstract void destruct();
}
/** A list to keep all active Destructors in memory confined to the Destructor thread. */
private static final DestructorList sDestructorList;
/** A thread safe stack where new Destructors are placed before being add to sDestructorList. */
private static final DestructorStack sDestructorStack;
private static final ReferenceQueue sReferenceQueue;
private static final Thread sThread;
static {
sDestructorStack = new DestructorStack();
sReferenceQueue = new ReferenceQueue();
sDestructorList = new DestructorList();
sThread =
new Thread("HybridData DestructorThread") {
@Override
public void run() {
while (true) {
try {
Destructor current = (Destructor) sReferenceQueue.remove();
current.destruct();
// If current is in the sDestructorStack,
// transfer all the Destructors in the stack to the list.
if (current.previous == null) {
sDestructorStack.transferAllToList();
}
DestructorList.drop(current);
} catch (InterruptedException e) {
// Continue. This thread should never be terminated.
}
}
}
};
sThread.setDaemon(true);
sThread.start();
}
private static class Terminus extends Destructor {
@Override
protected void destruct() {
throw new IllegalStateException("Cannot destroy Terminus Destructor.");
}
}
/** This is a thread safe, lock-free Treiber-like Stack of Destructors. */
private static class DestructorStack {
private final AtomicReference<Destructor> mHead = new AtomicReference<>();
public void push(Destructor newHead) {
Destructor oldHead;
do {
oldHead = mHead.get();
newHead.next = oldHead;
} while (!mHead.compareAndSet(oldHead, newHead));
}
public void transferAllToList() {
Destructor current = mHead.getAndSet(null);
while (current != null) {
Destructor next = current.next;
sDestructorList.enqueue(current);
current = next;
}
}
}
/** A doubly-linked list of Destructors. */
private static class DestructorList {
private final Destructor mHead;
public DestructorList() {
mHead = new Terminus();
mHead.next = new Terminus();
mHead.next.previous = mHead;
}
public void enqueue(Destructor current) {
current.next = mHead.next;
mHead.next = current;
current.next.previous = current;
current.previous = mHead;
}
private static void drop(Destructor current) {
current.next.previous = current.previous;
current.previous.next = current.next;
}
}
}
|
0
|
java-sources/ai/h2o/libtorch-bundle/1.9.0-alpha-3/com/facebook
|
java-sources/ai/h2o/libtorch-bundle/1.9.0-alpha-3/com/facebook/jni/package-info.java
|
/**
* This is the workarround proposed in https://github.com/facebookincubator/fbjni/pull/67
*/
package com.facebook.jni;
|
0
|
java-sources/ai/h2o/libtorch-bundle/1.9.0-alpha-3/org/scijava
|
java-sources/ai/h2o/libtorch-bundle/1.9.0-alpha-3/org/scijava/nativelib/BaseJniExtractor.java
|
/*
* #%L
* Native library loader for extracting and loading native libraries from Java.
* %%
* Copyright (C) 2010 - 2021 Board of Regents of the University of
* Wisconsin-Madison and Glencoe Software, Inc.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
// This code is derived from Richard van der Hoff's mx-native-loader project:
// http://opensource.mxtelecom.com/maven/repo/com/wapmx/native/mx-native-loader/1.7/
// See NOTICE.txt for details.
// Copyright 2006 MX Telecom Ltd
package org.scijava.nativelib;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.URL;
import java.net.URLConnection;
import java.util.Enumeration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Richard van der Hoff (richardv@mxtelecom.com)
*/
public abstract class BaseJniExtractor implements JniExtractor {
private static final Logger LOGGER = LoggerFactory.getLogger(
"org.scijava.nativelib.BaseJniExtractor");
protected static final String JAVA_TMPDIR = "java.io.tmpdir";
protected static final String ALTR_TMPDIR = "."+ NativeLibraryUtil.DELIM + "tmplib";
protected static final String TMP_PREFIX = "nativelib-loader_";
private static final String LEFTOVER_MIN_AGE = "org.scijava.nativelib.leftoverMinAgeMs";
private static final long LEFTOVER_MIN_AGE_DEFAULT = 5 * 60 * 1000; // 5 minutes
private Class<?> libraryJarClass;
/**
* We use a resource path of the form META-INF/lib/${mx.sysinfo}/ This way
* native builds for multiple architectures can be packaged together without
* interfering with each other And by setting mx.sysinfo the jvm can pick the
* native libraries appropriate for itself.
*/
private String[] nativeResourcePaths;
public BaseJniExtractor() throws IOException {
init(null);
}
public BaseJniExtractor(final Class<?> libraryJarClass) throws IOException {
init(libraryJarClass);
}
private void init(final Class<?> libraryJarClass) {
this.libraryJarClass = libraryJarClass;
final String mxSysInfo = MxSysInfo.getMxSysInfo();
if (mxSysInfo != null) {
nativeResourcePaths =
new String[] { "natives/", "META-INF/lib/" + mxSysInfo + "/", "META-INF/lib/" };
}
else {
nativeResourcePaths = new String[] { "natives/", "META-INF/lib/" };
}
// clean up leftover libraries from previous runs
deleteLeftoverFiles();
}
private static boolean deleteRecursively(final File directory) {
if (directory == null) return true;
final File[] list = directory.listFiles();
if (list == null) return true;
for (final File file : list) {
if (file.isFile()) {
if (!file.delete()) return false;
}
else if (file.isDirectory()) {
if (!deleteRecursively(file)) return false;
}
}
return directory.delete();
}
protected static File getTempDir() throws IOException {
// creates a temporary directory for hosting extracted files
// If system tempdir is not available, use tmplib
File tmpDir = new File(System.getProperty(JAVA_TMPDIR, ALTR_TMPDIR));
if (!tmpDir.isDirectory()) {
tmpDir.mkdirs();
if (!tmpDir.isDirectory())
throw new IOException("Unable to create temporary directory " + tmpDir);
}
File tempFile = File.createTempFile(TMP_PREFIX, "");
tempFile.delete();
return tempFile;
}
/**
* this is where native dependencies are extracted to (e.g. tmplib/).
*
* @return native working dir
*/
public abstract File getNativeDir();
/**
* this is where JNI libraries are extracted to (e.g.
* tmplib/classloaderName.1234567890000.0/).
*
* @return jni working dir
*/
public abstract File getJniDir();
@Override
public File extractJni(final String libPath, final String libname)
throws IOException
{
String mappedlibName = System.mapLibraryName(libname);
debug("mappedLib is " + mappedlibName);
/*
* On Darwin, the default mapping is to .jnilib; but we use .dylibs so that library interdependencies are
* handled correctly. if we don't find a .jnilib, try .dylib instead.
*/
URL lib = null;
// if no class specified look for resources in the jar of this class
if (null == libraryJarClass) {
libraryJarClass = this.getClass();
}
// foolproof
String combinedPath = (libPath.equals("") || libPath.endsWith(NativeLibraryUtil.DELIM) ?
libPath : libPath + NativeLibraryUtil.DELIM) + mappedlibName;
extractDependenciesFor(combinedPath);
lib = libraryJarClass.getClassLoader().getResource(combinedPath);
if (null == lib) {
/*
* On OS X, the default mapping changed from .jnilib to .dylib as of JDK 7, so
* we need to be prepared for the actual library and mapLibraryName disagreeing
* in either direction.
*/
final String altLibName;
if (mappedlibName.endsWith(".jnilib")) {
altLibName =
mappedlibName.substring(0, mappedlibName.length() - 7) + ".dylib";
}
else if (mappedlibName.endsWith(".dylib")) {
altLibName =
mappedlibName.substring(0, mappedlibName.length() - 6) + ".jnilib";
}
else {
altLibName = null;
}
if (altLibName != null) {
lib = getClass().getClassLoader().getResource(libPath + altLibName);
if (lib != null) {
mappedlibName = altLibName;
}
}
}
if (null != lib) {
debug("URL is " + lib.toString());
debug("URL path is " + lib.getPath());
return extractResource(getJniDir(), lib, mappedlibName);
}
debug("Couldn't find resource " + combinedPath);
return null;
}
/**
* Locates the dependency descriptor ("*.DEPENDENCIES" file) and attempts to load all libraries mentioned there.
* Each line in it, except comments, represents one dependency to be loaded.
* <p>
* The process is recursive, i.e. each dependency can bring its own dependencies.
* <p>
* Every dependency is expected to reside at the same resource location as the descriptor.
* Missing libraries are ignored, as we expect the OS to provide them.
*
* @param lib name of the library for which we want dependencies to load
*/
private void extractDependenciesFor(String lib) throws IOException {
final Enumeration<URL> resources = this.getClass().getClassLoader().getResources(lib + ".DEPENDENCIES");
while (resources.hasMoreElements()) {
extractDependencies(resources.nextElement(), lib);
}
}
private void extractDependencies(URL dependenciesDesc, String lib) throws IOException {
debug("Extracting dependencies listed in " + dependenciesDesc);
BufferedReader reader = null;
try {
final String base = lib
.replace('\\', '/')
.replaceFirst("/[^/]*$", "/");
final URLConnection connection = dependenciesDesc.openConnection();
connection.setUseCaches(false);
reader = new BufferedReader(new InputStreamReader(connection.getInputStream(), "UTF-8"));
for (String line; (line = reader.readLine()) != null;) {
if (line.startsWith("#")) continue;
final String file = base + line;
// extract its own deps first
extractDependenciesFor(file);
// extract the dep itself
final URL dep = this.getClass().getClassLoader().getResource(file);
if (dep == null) {
debug("Not found: " + file);
} else {
extractResource(getNativeDir(), dep, line);
}
}
debug("Completed dependencies: " + dependenciesDesc);
}
finally {
if (reader != null) {
reader.close();
}
}
}
@Override
public void extractRegistered() throws IOException {
debug("Extracting libraries registered in classloader " +
this.getClass().getClassLoader());
for (final String nativeResourcePath : nativeResourcePaths) {
final Enumeration<URL> resources =
this.getClass().getClassLoader().getResources(
nativeResourcePath + "AUTOEXTRACT.LIST");
while (resources.hasMoreElements()) {
final URL res = resources.nextElement();
extractLibrariesFromResource(res);
}
}
}
private void extractLibrariesFromResource(final URL resource)
throws IOException
{
debug("Extracting libraries listed in " + resource);
BufferedReader reader = null;
try {
URLConnection connection = resource.openConnection();
connection.setUseCaches(false);
reader = new BufferedReader(new InputStreamReader(connection.getInputStream(), "UTF-8"));
for (String line; (line = reader.readLine()) != null;) {
URL lib = null;
for (final String nativeResourcePath : nativeResourcePaths) {
lib =
this.getClass().getClassLoader().getResource(
nativeResourcePath + line);
if (lib != null) break;
}
if (lib != null) {
extractResource(getNativeDir(), lib, line);
}
else {
throw new IOException("Couldn't find native library " + line +
"on the classpath");
}
}
}
finally {
if (reader != null) {
reader.close();
}
}
}
/**
* Extract a resource to the tmp dir (this entry point is used for unit
* testing)
*
* @param dir the directory to extract the resource to
* @param resource the resource on the classpath
* @param outputName the filename to copy to (within the tmp dir)
* @return the extracted file
* @throws IOException
*/
File extractResource(final File dir, final URL resource,
final String outputName) throws IOException
{
final File outfile = new File(getJniDir(), outputName);
if (outfile.exists()) {
return outfile;
}
InputStream in = null;
try {
URLConnection connection = resource.openConnection();
connection.setUseCaches(false);
in = connection.getInputStream();
// TODO there's also a getResourceAsStream
// make a lib file with exactly the same lib name
debug("Extracting '" + resource + "' to '" +
outfile.getAbsolutePath() + "'");
// copy resource stream to temporary file
FileOutputStream out = null;
try {
out = new FileOutputStream(outfile);
copy(in, out);
} finally {
if (out != null) { out.close(); }
}
// note that this doesn't always work:
outfile.deleteOnExit();
return outfile;
} finally {
if (in != null) { in.close(); }
}
}
/**
* Looks in the temporary directory for leftover versions of temporary shared
* libraries.
* <p>
* If a temporary shared library is in use by another instance it won't
* delete.
* <p>
* An old library will be deleted only if its last modified date is at least
* LEFTOVER_MIN_AGE milliseconds old (default to 5 minutes)
* This was introduced to avoid a possible race condition when two instances (JVMs) run the same unpacking code
* and one of which manage to delete the extracted file of the other before the other gets a chance to load it
* <p>
* Another issue is that createTempFile only guarantees to use the first three
* characters of the prefix, so I could delete a similarly-named temporary
* shared library if I haven't loaded it yet.
*/
void deleteLeftoverFiles() {
final File tmpDirectory = new File(System.getProperty(JAVA_TMPDIR, ALTR_TMPDIR));
final File[] folders = tmpDirectory.listFiles(new FilenameFilter() {
@Override
public boolean accept(final File dir, final String name) {
return name.startsWith(TMP_PREFIX);
}
});
if (folders == null) return;
long leftoverMinAge = getLeftoverMinAge();
for (final File folder : folders) {
// attempt to delete
long age = System.currentTimeMillis() - folder.lastModified();
if (age < leftoverMinAge) {
debug("Not deleting leftover folder " + folder + ": is " + age + "ms old");
continue;
}
debug("Deleting leftover folder: " + folder);
deleteRecursively(folder);
}
}
long getLeftoverMinAge() {
try {
return Long.parseLong(System.getProperty(LEFTOVER_MIN_AGE, String.valueOf(LEFTOVER_MIN_AGE_DEFAULT)));
} catch (NumberFormatException e) {
error("Cannot load leftover minimal age system property", e);
return LEFTOVER_MIN_AGE_DEFAULT;
}
}
/**
* copy an InputStream to an OutputStream.
*
* @param in InputStream to copy from
* @param out OutputStream to copy to
* @throws IOException if there's an error
*/
static void copy(final InputStream in, final OutputStream out)
throws IOException
{
final byte[] tmp = new byte[8192];
int len = 0;
while (true) {
len = in.read(tmp);
if (len <= 0) {
break;
}
out.write(tmp, 0, len);
}
}
private static void debug(final String message) {
LOGGER.debug(message);
}
private static void error(final String message, final Throwable t) {
LOGGER.error(message, t);
}
}
|
0
|
java-sources/ai/h2o/libtorch-bundle/1.9.0-alpha-3/org/scijava
|
java-sources/ai/h2o/libtorch-bundle/1.9.0-alpha-3/org/scijava/nativelib/package-info.java
|
/**
* TODO: for now, this hack is embedded, until they approve my PRs:
* - https://github.com/scijava/native-lib-loader/issues/45
* - https://github.com/pkozelka/native-lib-loader/pull/1
* - https://github.com/scijava/native-lib-loader/pull/46
*/
package org.scijava.nativelib;
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/MojoPipeline.java
|
package ai.h2o.mojos.runtime;
import ai.h2o.mojos.runtime.api.BasePipelineListener;
import ai.h2o.mojos.runtime.api.MojoPipelineService;
import ai.h2o.mojos.runtime.api.PipelineConfig;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import ai.h2o.mojos.runtime.frame.MojoColumn;
import ai.h2o.mojos.runtime.frame.MojoFrame;
import ai.h2o.mojos.runtime.frame.MojoFrameBuilder;
import ai.h2o.mojos.runtime.frame.MojoFrameMeta;
import ai.h2o.mojos.runtime.lic.LicenseException;
import ai.h2o.mojos.runtime.readers.MojoReaderBackend;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.io.Serializable;
import org.joda.time.DateTime;
/**
* This class represents the MOJO scoring pipeline.
* <p>
* It transforms an instance of {@link MojoFrame} and produces a new {@link MojoFrame}.
*/
abstract public class MojoPipeline implements Serializable {
private final String uuid;
private final DateTime timestamp;
private final String license;
//----------------------------------------------------------------------------
// Constructors
//----------------------------------------------------------------------------
protected MojoPipeline(String uuid, DateTime timestamp, String license) {
this.uuid = uuid;
this.timestamp = timestamp;
this.license = license;
}
/**
* Instantiate MojoPipeline from the provided .mojo file. This is the primary
* method for creating MojoPipeline instances.
*
* @param file Name of the .mojo file with saved model information.
* @return New MojoPipeline instance that is built according to the description
* in the .mojo file.
* @throws IOException In case the file cannot be read, or has invalid format,
* or is not a Mojo-2 file.
* @deprecated use {@link MojoPipelineService#loadPipeline(File, ai.h2o.mojos.runtime.api.PipelineConfig)} instead
*/
@Deprecated
public static MojoPipeline loadFrom(String file) throws IOException, LicenseException {
return MojoPipelineService.loadPipeline(new File(file));
}
/**
* @deprecated use {@link MojoPipelineService#loadPipeline(ReaderBackend, ai.h2o.mojos.runtime.api.PipelineConfig)} instead
*/
@Deprecated
public static MojoPipeline loadFrom(MojoReaderBackend mrb) throws IOException, LicenseException {
return MojoPipelineService.loadPipeline(mrb.internalGetReaderBackend());
}
/**
* Get the pipeline's uuid, if stated in the creation file.
*
* @return The model's uuid
*/
public String getUuid() {
return uuid;
}
/**
* Get the creation time of the mojo file
*
* @return The mojo file's creation time
*/
public DateTime getCreationTime() {
return timestamp;
}
/**
* Get the license of the mojo file
*
* @return the mojo file's license
*/
public String getLicense() {
return license;
}
/**
* Get an instance of an {@link MojoFrameBuilder} that can be used to make an input frame
*
* @return A new input frame builder
*/
public MojoFrameBuilder getInputFrameBuilder() {
return getFrameBuilder(MojoColumn.Kind.Feature);
}
/**
* Get an instance of an {@link MojoFrameBuilder} that can be used to make an output frame
*
* @param inputFrameBuilder a frame builder potentially containing some shared columns (for feature `exposeInputColumns`)
* @return A new input frame builder
*/
public abstract MojoFrameBuilder getOutputFrameBuilder(MojoFrameBuilder inputFrameBuilder);
protected abstract MojoFrameBuilder getFrameBuilder(MojoColumn.Kind kind);
/**
* Get the meta data for the input frame (see {@link MojoFrameMeta}
*
* @return The input frame meta data
*/
public MojoFrameMeta getInputMeta() {
return getMeta(MojoColumn.Kind.Feature);
}
/**
* Get the meta data for the output frame (see {@link MojoFrameMeta}
*
* @return The output frame meta data
*/
public MojoFrameMeta getOutputMeta() {
return getMeta(MojoColumn.Kind.Output);
}
protected abstract MojoFrameMeta getMeta(MojoColumn.Kind kind);
/**
* Executes the pipeline of transformers as stated in this model's mojo file.
*
* @param inputFrameBuilder A MojoFrameBuilder from which an input frame can be retrieved
* @return A MojoFrame containing the results of the transform pipeline
*/
public MojoFrame transform(MojoFrameBuilder inputFrameBuilder) {
return transform(inputFrameBuilder.toMojoFrame());
}
/**
* Executes the pipeline of transformers as stated in this model's mojo file.
*
* @param inputFrame A MojoFrame containing the input data
* @return A MojoFrame containing the results of the transform pipeline
* @todo mark this method `final` after https://github.com/h2oai/dai-deployment-templates/pull/172 is merged
*/
public /*final*/ MojoFrame transform(MojoFrame inputFrame) {
final MojoFrameMeta outputMeta = getMeta(MojoColumn.Kind.Output);
final MojoFrame outputFrame = MojoFrameBuilder.getEmpty(outputMeta, inputFrame.getNrows());
return transform(inputFrame, outputFrame);
}
/**
* Executes the pipeline of transformers as stated in this model's mojo file.
*
* @param inputFrame A MojoFrame containing the input data
* @param outputFrame A MojoFrame to which the results of the transform pipeline should be written
* @return `outputFrame` reference
*/
public abstract MojoFrame transform(MojoFrame inputFrame, MojoFrame outputFrame);
/**
* @deprecated use {@link PipelineConfig.Builder#enableShap(boolean)} instead.
*/
@Deprecated
public abstract void setShapPredictContrib(boolean enable);
/**
* @deprecated use {@link PipelineConfig.Builder#enableShapOriginal(boolean)} instead.
*/
@Deprecated
public abstract void setShapPredictContribOriginal(boolean enable);
/**
* @deprecated use {@link PipelineConfig.Builder#listener(BasePipelineListener)} instead.
*/
@Deprecated
public abstract void setListener(final BasePipelineListener listener);
public abstract void printPipelineInfo(PrintStream out);
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/package-info.java
|
/**
* Entry point package to load MOJO.
*
* The package exposes two main concepts:
*
* - {@link ai.h2o.mojos.runtime.MojoPipeline} represents an interface of MOJO scoring pipeline.
* - {@link ai.h2o.mojos.runtime.api.MojoPipelineService} provide a factory to instantiate MOJO pipelines
* from different sources represented by subclasses of {@link ai.h2o.mojos.runtime.api.backend.ReaderBackend}.
*/
package ai.h2o.mojos.runtime;
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api/BasePipelineListener.java
|
package ai.h2o.mojos.runtime.api;
import ai.h2o.mojos.runtime.frame.MojoFrame;
import ai.h2o.mojos.runtime.transforms.MojoTransform;
/**
* Listener, called by pipeline while processing the transformation.
*/
public class BasePipelineListener {
/**
* An instance of listener that ignores everything.
*/
public static BasePipelineListener NOOP = new BasePipelineListener();
/**
* Called once before every batch.
*
* @param globalFrame global frame representing all values in the batch.
* Implementation will typically need to remember it, for use in other methods
* - {@link #onTransformStep(String, int)} and {@link #onTransformResult(MojoTransform)}
* @param iindices
*/
public void onBatchStart(final MojoFrame globalFrame, final int[] iindices) {
//
}
/**
* @deprecated use {@link #onBatchStart(MojoFrame, int[])} instead
*/
@Deprecated
public void onBatchStart(final MojoFrame globalFrame) {
onBatchStart(globalFrame, new int[0]);
}
/**
* Called once after every batch.
*/
public void onBatchEnd() {
//
}
/**
* Expose heading of current transformation.
*
* @param transform current transformation
*/
public void onTransformHead(final MojoTransform transform) {
//
}
/**
* Step reporting callback.
* Transformer calls it to expose intermediate values that have changed during recent step.
* This method should be called once per each changed value during the step (if there are more).
*
* @param stepId simple but unique identification of the step; for instance "tree 23" - main rule is: it must be informative
* @param oindex index of the column that has changed
*/
public void onTransformStep(final String stepId, int oindex) {
//
}
/**
* Expose values of current transformation
* @param transform current transformation
*/
public void onTransformResult(final MojoTransform transform) {
//
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api/MojoColumnMeta.java
|
package ai.h2o.mojos.runtime.api;
import ai.h2o.mojos.runtime.frame.MojoColumn;
import java.util.ArrayList;
import java.util.List;
/**
* Represents all metadata about a column.
* It can be input feature, intermediate result, or output column.
*
* Does NOT contain values - for that, see {@link ai.h2o.mojos.runtime.frame.MojoColumn} and its descendants.
*/
public class MojoColumnMeta {
private final String columnName;
private final MojoColumn.Type columnType;
private MojoColumnMeta(String columnName, MojoColumn.Type columnType) {
this.columnName = columnName;
this.columnType = columnType;
}
public static MojoColumnMeta create(String columnName, MojoColumn.Type columnType) {
return new MojoColumnMeta(columnName, columnType);
}
/**
* @deprecated use {@link #create(String, MojoColumn.Type)} instead
*/
@Deprecated
public static MojoColumnMeta newInput(String columnName, MojoColumn.Type columnType) {
return create(columnName, columnType);
}
/**
* @deprecated use {@link #create(String, MojoColumn.Type)} instead
*/
@Deprecated
public static MojoColumnMeta newOutput(String name, MojoColumn.Type columnType) {
return create(name, columnType);
}
/**
* Compatibility helper. You should preferably construct {@link List} of columns as standard java collection, and then pass it to {@link ai.h2o.mojos.runtime.frame.MojoFrameMeta}.
*/
public static List<MojoColumnMeta> toColumns(final String[] columnNames, final MojoColumn.Type[] columnTypes) {
if (columnNames.length != columnTypes.length) {
throw new IllegalArgumentException("columnNames and columnTypes arguments must have the same length");
}
final List<MojoColumnMeta> cols = new ArrayList<>();
for (int i = 0; i < columnNames.length; i++) {
final MojoColumnMeta col = create(columnNames[i], columnTypes[i]);
cols.add(col);
}
return cols;
}
/**
* @deprecated use {@link #toColumns(String[], MojoColumn.Type[])} instead
*/
@Deprecated
public static List<MojoColumnMeta> toColumns(final String[] columnNames, final MojoColumn.Type[] columnTypes, final MojoColumn.Kind kind_ignored) {
return toColumns(columnNames, columnTypes);
}
public String getColumnName() {
return columnName;
}
public MojoColumn.Type getColumnType() {
return columnType;
}
@Override
public String toString() {
return columnName + ":" + columnType;
}
/**
* It is essential that both this and {@link #hashCode()} remain instance-based, and definitely not name-based.
* For that reason, the override here is explicit and calls derived code.
*/
@Override
public boolean equals(final Object o) {
// DO NOT CHANGE!
return super.equals(o);
}
/**
* It is essential that both {@link #equals(Object)} and this remain instance-based, and definitely not name-based.
* For that reason, the override here is explicit and calls derived code.
*/
@Override
public int hashCode() {
// DO NOT CHANGE!
return super.hashCode();
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api/MojoPipelineService.java
|
package ai.h2o.mojos.runtime.api;
import ai.h2o.mojos.runtime.MojoPipeline;
import ai.h2o.mojos.runtime.api.backend.DirReaderBackend;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import ai.h2o.mojos.runtime.api.backend.ZipFileReaderBackend;
import ai.h2o.mojos.runtime.lic.LicenseException;
import ai.h2o.mojos.runtime.utils.ClassLoaderUtils;
import ai.h2o.mojos.runtime.utils.Consts;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.ServiceLoader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Primary pipeline service.
*
* This class takes care of instantiating pipeline based on what's found in a given backend.
* And, because one backed can contain multiple pipeline formats, the choice can be configured with property <code>sys.ai.h2o.mojos.pipelineFormats</code>.
* It contains a comma separated list of format provider names, which are defined by their {@link PipelineLoaderFactory#getName()} method.
*/
public class MojoPipelineService {
private static final Logger log = LoggerFactory.getLogger(MojoPipelineService.class);
private final LinkedHashMap<String, PipelineLoaderFactory> registry = new LinkedHashMap<>();
private MojoPipelineService(String... pipelineFormats) {
// gather all providers
final LinkedHashMap<String, PipelineLoaderFactory> factories = new LinkedHashMap<>();
final ServiceLoader<PipelineLoaderFactory> loader = ServiceLoader.load(
PipelineLoaderFactory.class,
ClassLoaderUtils.getPreferredSpiClassLoader(PipelineLoaderFactory.class));
for (PipelineLoaderFactory factory : loader) {
final String name = factory.getName();
final PipelineLoaderFactory existing = factories.put(name, factory);
if (existing != null) {
throw new IllegalStateException(String.format("Pipeline loader '%s' is already registered with class '%s'", name, existing.getClass().getName()));
}
}
// first, register preferred format providers in the order of preference
for (String pipelineFormat : pipelineFormats) {
final PipelineLoaderFactory provider = factories.remove(pipelineFormat);
if (provider != null) {
registry.put(pipelineFormat, provider);
} else {
log.warn("No pipeline format provider for '{}'", pipelineFormat);
}
}
// then add the rest
registry.putAll(factories);
}
public PipelineLoaderFactory get(ReaderBackend backend) throws IOException {
if (registry.isEmpty()) {
throw new IllegalStateException("No pipeline factory is available");
}
for (Map.Entry<String, PipelineLoaderFactory> entry : registry.entrySet()) {
final PipelineLoaderFactory factory = entry.getValue();
final String rootResource = factory.getRootResource();
if (backend.exists(rootResource)) {
return factory;
}
}
throw new IOException(String.format("None of %d available pipeline factories %s can read this mojo.", registry.size(), registry.keySet()));
}
/**
* The global service to provide pipeline service.
* @see MojoPipelineService
*/
public static MojoPipelineService INSTANCE = new MojoPipelineService(Consts.getSysProp("pipelineFormats", "pbuf,toml,klime,h2o3").split(","));
private static ReaderBackend autodetectBackend(File file) throws IOException {
if (!file.exists()) {
throw new FileNotFoundException(file.getAbsolutePath());
} else if (file.isDirectory()) {
return DirReaderBackend.open(file);
} else if (file.isFile()) {
return ZipFileReaderBackend.open(file);
} else {
throw new IOException("Unsupported file type: " + file.getAbsolutePath());
}
}
/**
* Loads {@link MojoPipeline pipeline} from a file.
* @param file the file or directory containing pipeline resources
* @param config -
* @return pipeline
*/
public static MojoPipeline loadPipeline(File file, final PipelineConfig config) throws IOException, LicenseException {
final ReaderBackend backend = autodetectBackend(file);
return loadPipeline(backend, config);
}
/**
* @deprecated use {@link #loadPipeline(File, PipelineConfig)} instead
*/
@Deprecated
public static MojoPipeline loadPipeline(File file) throws IOException, LicenseException {
return loadPipeline(file, PipelineConfig.DEFAULT);
}
/**
* Loads {@link MojoPipeline pipeline} from a backend.
* @param backend the backend providing access to pipeline resources
* @return pipeline
*/
public static MojoPipeline loadPipeline(ReaderBackend backend, final PipelineConfig config) throws IOException, LicenseException {
final PipelineLoaderFactory loaderFactory = INSTANCE.get(backend);
final PipelineLoader loader = loaderFactory.createLoader(backend, null, config);
try {
// This check is to ensure that h2o implementations are always protected against simple overriding (and avoiding AccessManager check)
// It will fail only in our CI; customer will never experience it.
final Method method = loader.getClass().getMethod("load");
final int modifiers = method.getModifiers();
if (!Modifier.isFinal(modifiers)) {
throw new IllegalStateException(String.format("Internal error: Method %s#%s() is required to be declared final", loader.getClass().getName(), method.getName()));
}
// In future, we might also want detect if AccessManager was used during call to load.
return loader.load();
} catch (NoSuchMethodException e) {
throw new IllegalStateException(e);
}
}
/**
* @deprecated use {@link #loadPipeline(ReaderBackend, PipelineConfig)} instead
*/
@Deprecated
public static MojoPipeline loadPipeline(ReaderBackend backend) throws IOException, LicenseException {
return loadPipeline(backend, PipelineConfig.DEFAULT);
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api/MojoTransformMeta.java
|
package ai.h2o.mojos.runtime.api;
import ai.h2o.mojos.runtime.frame.MojoFrame;
import ai.h2o.mojos.runtime.transforms.MojoTransform;
/**
* Temporary representation of a transformer - before it is fully loaded.
*/
public class MojoTransformMeta extends MojoTransform {
private final Object desc;
private final int transformationIndex;
private MojoTransform transform;
/**
* WARNING: Do not use this constructor.
* Only for internal use of mojo2 implementation.
* Subject to change without notice.
*/
public MojoTransformMeta(String name, Object desc, int[] iindices, int[] oindices, final String id, int transformationIndex, final MojoTransformationGroup transformationGroup) {
super(iindices, oindices);
this.desc = desc;
this.transformationIndex = transformationIndex;
this.setName(name);
this.setId(id);
this.setTransformationGroup(transformationGroup);
}
/**
* Only for internal use - subject to change without notice
*/
public MojoTransform getTransform() {
return transform;
}
/**
* Only for internal use - subject to change without notice
*/
public void setTransform(MojoTransform transform) {
this.transform = transform;
}
/**
* Description, can be multi-line.
* @return object whose {@link #toString()} has the description
*/
public Object getDesc() {
return desc;
}
public int[] getInputIndices() {
if (transform != null) {
// may differ when there featureMap is applied
return transform.iindices;
}
return iindices;
}
public int[] getOutputIndices() {
return oindices;
}
/**
* @return index of transformation operation in the pipeline, before removing unused part
*/
public int getTransformationIndex() {
return transformationIndex;
}
@Override
public void transform(final MojoFrame frame) {
throw new UnsupportedOperationException("This is not meant to perform any operation");
}
@Override
public String toString() {
return "{{" + super.toString() + "}}";
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api/MojoTransformationGroup.java
|
package ai.h2o.mojos.runtime.api;
import java.util.Map;
import java.util.Objects;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MojoTransformationGroup {
private static final Logger log = LoggerFactory.getLogger(MojoTransformationGroup.class);
private final String id;
private final String name;
public MojoTransformationGroup(final String id, final String name) {
this.id = id;
this.name = name;
}
public static MojoTransformationGroup findGroup(final Map<String, MojoTransformationGroup> groups, final String groupId) {
if (groupId == null) return null;
MojoTransformationGroup group = groups.get(groupId);
if (group == null) {
log.warn("Undefined group ID: {}", groupId); // this will unlikely ever happen (corrupted mojo only)
group = new MojoTransformationGroup(groupId, String.format("#undefined#%s", groupId));
groups.put(groupId, group);
}
return group;
}
public String getId() {
return id;
}
public String getName() {
return name;
}
@Override
public boolean equals(final Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final MojoTransformationGroup that = (MojoTransformationGroup) o;
return id == that.id;
}
@Override
public int hashCode() {
return Objects.hash(id);
}
@Override
public String toString() {
return String.format("G:%s{%s}", id, name);
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api/PipelineConfig.java
|
package ai.h2o.mojos.runtime.api;
/**
* Immutable configuration of the mojo pipeline.
*/
public class PipelineConfig {
boolean shapEnabled;
boolean shapOriginal;
BasePipelineListener listener = BasePipelineListener.NOOP;
boolean withPredictionInterval;
String[] exposedColumnNames;
public static final PipelineConfig DEFAULT;
static {
DEFAULT = PipelineConfig.builder()
.build()
;
}
public static PipelineConfig.Builder builder() {
return new PipelineConfig.Builder();
}
private PipelineConfig() {
// create in building mode
}
public boolean isShapEnabled() {
return shapEnabled;
}
public boolean isShapOriginal() {
return shapOriginal;
}
public BasePipelineListener getListener() {
return listener;
}
public boolean isWithPredictionInterval() {
return withPredictionInterval;
}
public String[] getExposedColumnNames() {
return exposedColumnNames;
}
public static class Builder extends PipelineConfig {
public PipelineConfig build() {
if (shapOriginal) {
shapEnabled = true;
}
return this;
}
/**
* Allows prediction contribution columns (computed by SHAP algo) to appear in the transformation result.
* Current implementation removes the standard transformation result. This will be improved in future.
*
* @param shapEnabled - currently required to be true.
* @see #enableShapOriginal(boolean)
*/
public PipelineConfig.Builder enableShap(final boolean shapEnabled) {
this.shapEnabled = shapEnabled;
return this;
}
/**
* If true, switches the pipeline to produce so called "original" SHAP contributions,
* ie. an estimation how does each pipeline's output contribute to changes in each pipeline's input.
* And also bias per each output.
*
* @param shapOriginal -
* @see #enableShap(boolean)
*/
public PipelineConfig.Builder enableShapOriginal(final boolean shapOriginal) {
if (shapOriginal) {
enableShap(true);
}
this.shapOriginal = shapOriginal;
return this;
}
/**
* Configures a listener that can report the progress of computation.
* Useful primarily for debugging and trouble-shooting purposes.
* <p>
* Only one listener at a time is currently supported.
*/
public PipelineConfig.Builder listener(final BasePipelineListener listener) {
this.listener = listener;
return this;
}
/**
* Enables exposing prediction interval per each output value.
*/
public PipelineConfig.Builder withPredictionInterval(final boolean withPredictionInterval) {
this.withPredictionInterval = withPredictionInterval;
return this;
}
/**
* @param exposedColumnNames column names that should be part of output frame, no matter if they participate in computation graph.
*/
public PipelineConfig.Builder exposedColumnNames(final String... exposedColumnNames) {
this.exposedColumnNames = exposedColumnNames;
return this;
}
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api/PipelineLoader.java
|
package ai.h2o.mojos.runtime.api;
import ai.h2o.mojos.runtime.MojoPipeline;
import ai.h2o.mojos.runtime.frame.MojoFrameMeta;
import ai.h2o.mojos.runtime.lic.LicenseException;
import java.io.IOException;
import java.util.List;
/**
* Representation of a pipeline
*/
public interface PipelineLoader {
/**
* @deprecated use {@link #getColumns()} instead
*/
@Deprecated
MojoFrameMeta getInput();
/**
* @deprecated use {@link #getColumns()} instead
*/
@Deprecated
MojoFrameMeta getOutput();
/**
* @return list of all columns defined by pipeline
*/
List<MojoColumnMeta> getColumns();
/**
* @return list of all transformations
*/
List<MojoTransformMeta> getTransformations();
/**
* @return pipeline on which prediction can be computed
*/
MojoPipeline load() throws IOException, LicenseException;
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api/PipelineLoaderFactory.java
|
package ai.h2o.mojos.runtime.api;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import ai.h2o.mojos.runtime.lic.LicenseException;
import java.io.IOException;
public interface PipelineLoaderFactory {
/**
* Creates the loader with a given backend
* @param backend -
* @param optionalMainFile primary file to load; if null, the one from {@link #getRootResource()} must be used.
* Can be occasionally used with different values, for instance for nested pipelines.
* @param config configuration of the pipeline
* @return the pipeline loader; never returns null
* @throws IOException -
* @throws LicenseException -
*/
PipelineLoader createLoader(ReaderBackend backend, String optionalMainFile, final PipelineConfig config) throws IOException, LicenseException;
/**
* Same as {@link #createLoader(ReaderBackend, String, PipelineConfig)} but uses {@link PipelineConfig#DEFAULT default} configuration.
*/
default PipelineLoader createLoader(ReaderBackend backend, String optionalMainFile) throws IOException, LicenseException {
return this.createLoader(backend, optionalMainFile, PipelineConfig.DEFAULT);
}
/**
* @return simple, constant identification of the loader; like "protobuf" or "toml". Used for debugging,
* and explicit specification of loader detection order - see {@link MojoPipelineService}.
*/
String getName();
/**
* Primary root resource. It is used a) for determining if that backend can be used to {@link #createLoader(ReaderBackend, String) create a loader} with this factory,
* and if so, it is passed to the loader as the primary file to load.
* @return path to primary file inside a {@link ReaderBackend}
*/
String getRootResource();
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api/backend/DirReaderBackend.java
|
package ai.h2o.mojos.runtime.api.backend;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Access to directory-based fileset */
public class DirReaderBackend implements ReaderBackend {
private static final Logger log = LoggerFactory.getLogger(DirReaderBackend.class);
private final File dir;
private final ResourceInfo.Cache cache = new ResourceInfo.Cache();
private DirReaderBackend(File dir) {
this.dir = dir;
}
public static ReaderBackend open(File dir) throws IOException {
log.info("Opening mojo directory: {}", dir);
if (!dir.isDirectory()) {
throw new FileNotFoundException(dir.getAbsolutePath());
}
return new DirReaderBackend(dir.getCanonicalFile());
}
@Override
public ResourceInfo getResourceInfo(final String resourceName) throws IOException {
ResourceInfo info = cache.get(resourceName);
if (info == null) {
final File file = new File(dir, resourceName);
if (! file.isFile()) {
throw new FileNotFoundException(file.getAbsolutePath());
}
info = new ResourceInfo(file.length(), "FILESIZE:" + file.length()); // remember, it's considered weak anyway
}
return info;
}
@Override
public InputStream getInputStream(String resourceName) throws FileNotFoundException {
final File file = new File(dir, resourceName);
if (! file.isFile()) {
throw new FileNotFoundException(file.getAbsolutePath());
}
return new FileInputStream(file);
}
@Override
public boolean exists(String resourceName) {
final File file = new File(dir, resourceName);
return file.isFile();
}
@Override
public Collection<String> list() {
final List<String> result = new ArrayList<>();
listFiles(result, "", dir.listFiles());
return result;
}
/**
* Collects subtree file names, recursively, sorted by name
*/
private void listFiles(List<String> result, String prefix, File[] files) {
if (files == null) return;
Arrays.sort(files);
for (File file : files) {
if (file.isFile()) {
result.add(prefix + file.getName());
} else if (!file.isDirectory()) {
// we ignore strange file types, like devices pipes etc
continue;
}
// recurse into subdirectories
listFiles(result, prefix + file.getName() + "/", file.listFiles());
}
}
@Override
public void close() {}
@Override
public String toString() {
return String.format("%s[%s]", getClass().getSimpleName(), dir);
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api/backend/MemoryReaderBackend.java
|
package ai.h2o.mojos.runtime.api.backend;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.security.NoSuchAlgorithmException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Access to in-memory fileset; primarily for testing */
public class MemoryReaderBackend implements ReaderBackend {
private static final Logger log = LoggerFactory.getLogger(MemoryReaderBackend.class);
private final Map<String, byte[]> files;
private final ResourceInfo.Cache cache = new ResourceInfo.Cache();
private MemoryReaderBackend(Map<String, byte[]> files) {
this.files = files;
}
public static ReaderBackend open(Map<String, byte[]> files) {
return new MemoryReaderBackend(files);
}
/**
* Read zipped files from inputstream. Useful when the container is nested in a java resource.
*/
public static ReaderBackend fromZipStream(final InputStream is) throws IOException {
log.info("Opening mojo stream: {}", is);
final HashMap<String, byte[]> files = new HashMap<>();
try (final ZipInputStream zis = new ZipInputStream(is)) {
while (true) {
final ZipEntry entry = zis.getNextEntry();
if (entry == null) {
break;
}
if (entry.getSize() > Integer.MAX_VALUE) {
throw new IOException("File is too large: " + entry.getName());
}
if (!entry.isDirectory()) {
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
ReaderBackendUtils.copy(zis, baos, 8 * 1024);
files.put(entry.getName(), baos.toByteArray());
}
zis.closeEntry();
}
}
return MemoryReaderBackend.open(files);
}
public void put(final String resourceName, final byte[] bytes) {
files.put(resourceName, bytes);
}
@Override
public ResourceInfo getResourceInfo(final String resourceName) throws IOException {
ResourceInfo info = cache.get(resourceName);
if (info == null) {
final byte[] bytes = files.get(resourceName);
if (bytes == null) {
throw new FileNotFoundException(resourceName);
}
try {
final String md5 = ResourceInfo.computeMD5(bytes); // here we compute strong hash instead, as the bytes are already in memory anyway
info = new ResourceInfo(bytes.length, "MD5:" + md5);
cache.put(resourceName, info);
} catch (NoSuchAlgorithmException e) {
throw new IOException(e);
}
}
return info;
}
@Override
public InputStream getInputStream(String resourceName) throws FileNotFoundException {
final byte[] bytes = files.get(resourceName);
if (bytes == null) {
throw new FileNotFoundException(resourceName);
}
return new ByteArrayInputStream(bytes);
}
@Override
public boolean exists(String resourceName) {
return files.containsKey(resourceName);
}
@Override
public Collection<String> list() {
return files.keySet();
}
@Override
public void close() {}
@Override
public String toString() {
return String.format("%s[%s]", getClass().getSimpleName(), files.size());
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api/backend/ReaderBackend.java
|
package ai.h2o.mojos.runtime.api.backend;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collection;
/** Represents access to a storage */
public interface ReaderBackend extends Closeable {
ResourceInfo getResourceInfo(String resourceName) throws IOException;
/**
* @param resourceName -
* @return binary representation of the resource in form of a stream
* @throws IOException -
*/
InputStream getInputStream(String resourceName) throws IOException;
/**
* @param resourceName -
* @return true if such resource exists
*/
boolean exists(String resourceName);
/**
* @return list of all contained resources
*/
Collection<String> list();
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api/backend/ReaderBackendUtils.java
|
package ai.h2o.mojos.runtime.api.backend;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
/**
* Utilities for working with {@link ReaderBackend} contents.
*
* Note: in Java8, some of these will be best suited as default interface methods in {@link ReaderBackend}.
*/
public class ReaderBackendUtils {
public static final int BUFFER_SIZE = 16 * 1024;
// not instantiable
private ReaderBackendUtils() {}
/**
* @return inputstream wrapped as reader, for text processing
*/
public static BufferedReader asReader(InputStream is) throws IOException {
return new BufferedReader(new InputStreamReader(is));
}
/**
* Read whole stream and return its bytes
* @param is -
* @return bytes from the stream
* @throws IOException -
*/
public static byte[] getBytes(InputStream is) throws IOException {
try (final ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
copy(is, baos, BUFFER_SIZE);
return baos.toByteArray();
}
}
/**
* Extracts subtree of the storage to filesystem.
* @param backend -
* @param subdir the root directory (of the contained files) to be exported
* @param dest where to place the exported files
*/
public static void export(ReaderBackend backend, String subdir, File dest) throws IOException {
final String prefix = subdir + "/";
for (String name : backend.list()) {
if (!name.startsWith(prefix)) continue;
final File file = new File(dest, name);
file.getParentFile().mkdirs();
try (final OutputStream os = new FileOutputStream(file);
final InputStream is = backend.getInputStream(name))
{
copy(is, os, BUFFER_SIZE);
}
}
}
/**
* This is copied from <code>org.codehaus.plexus.util.copy(final InputStream input, final OutputStream output)</code>.
* Avoiding additional dependency for just one simple method.
*
* Copy bytes from an <code>InputStream</code> to an <code>OutputStream</code>.
*
* @param bufferSize Size of internal buffer to use.
*/
public/*?*/ static void copy(final InputStream input, final OutputStream output, final int bufferSize )
throws IOException
{
final byte[] buffer = new byte[bufferSize];
int n = 0;
while ( 0 <= ( n = input.read( buffer ) ) )
{
output.write( buffer, 0, n );
}
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api/backend/ResourceInfo.java
|
package ai.h2o.mojos.runtime.api.backend;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.HashMap;
/**
* Basic information about a resource in {@link ReaderBackend}.
*/
public final class ResourceInfo {
public final long size;
public final String weakHash;
public ResourceInfo(final long size, final String weakHash) {
this.size = size;
this.weakHash = weakHash;
}
public static String computeMD5(final byte[] bytes) throws NoSuchAlgorithmException {
final MessageDigest m = MessageDigest.getInstance("MD5");
m.update(bytes,0, bytes.length);
final BigInteger i = new BigInteger(1,m.digest());
return String.format("%1$032X", i);
}
static class Cache extends HashMap<String, ResourceInfo> {}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api/backend/SubtreeReaderBackend.java
|
package ai.h2o.mojos.runtime.api.backend;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* This reader operates on a subtree of another {@link ReaderBackend}.
*/
public class SubtreeReaderBackend implements ReaderBackend {
private final ReaderBackend parent;
private final String prefix;
private SubtreeReaderBackend(ReaderBackend parent, String subdir) {
this.parent = parent;
this.prefix = subdir.endsWith("/") ? subdir : subdir + "/";
}
public static SubtreeReaderBackend from(ReaderBackend parent, String subdir) {
return new SubtreeReaderBackend(parent, subdir);
}
@Override
public ResourceInfo getResourceInfo(final String resourceName) throws IOException {
return parent.getResourceInfo(prefix + resourceName);
}
@Override
public InputStream getInputStream(String resourceName) throws IOException {
return parent.getInputStream(prefix + resourceName);
}
@Override
public boolean exists(String resourceName) {
return parent.exists(prefix + resourceName);
}
@Override
public Collection<String> list() {
final List<String> result = new ArrayList<>();
for (String name : parent.list()) {
if (name.startsWith(prefix)) {
result.add(name.substring(prefix.length()));
}
}
return result;
}
@Override
public void close() {}
@Override
public String toString() {
return String.format("%s.%s[%s]", parent.toString(), getClass().getSimpleName(), prefix);
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/api/backend/ZipFileReaderBackend.java
|
package ai.h2o.mojos.runtime.api.backend;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Enumeration;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Access to zip-compressed fileset */
public class ZipFileReaderBackend implements ReaderBackend {
private static final Logger log = LoggerFactory.getLogger(ZipFileReaderBackend.class);
private final ZipFile zipFile;
private final ResourceInfo.Cache cache = new ResourceInfo.Cache();
private ZipFileReaderBackend(ZipFile zipFile) {
this.zipFile = zipFile;
}
public static ReaderBackend open(File zippedMojoFile) throws IOException {
log.info("Opening mojo file: {}", zippedMojoFile);
return new ZipFileReaderBackend(new ZipFile(zippedMojoFile));
}
@Override
public ResourceInfo getResourceInfo(final String resourceName) throws IOException {
ResourceInfo info = cache.get(resourceName);
if (info == null) {
final ZipEntry entry = zipFile.getEntry(resourceName);
if (entry == null) {
throw new FileNotFoundException(resourceName);
}
info = new ResourceInfo(entry.getSize(), "ZIPCRC:" + entry.getCrc());
}
return info;
}
@Override
public InputStream getInputStream(String resourceName) throws IOException {
final ZipEntry entry = zipFile.getEntry(resourceName);
if (entry == null) {
throw new FileNotFoundException(resourceName);
}
return zipFile.getInputStream(entry);
}
@Override
public boolean exists(String resourceName) {
return zipFile.getEntry(resourceName) != null;
}
@Override
public Collection<String> list() {
final List<String> result = new ArrayList<>(zipFile.size());
final Enumeration<? extends ZipEntry> entries = zipFile.entries();
while (entries.hasMoreElements()) {
final ZipEntry zipEntry = entries.nextElement();
result.add(zipEntry.getName());
}
return result;
}
@Override
public void close() throws IOException {
zipFile.close();
}
@Override
public String toString() {
return String.format("%s[%s]", getClass().getSimpleName(), zipFile.getName());
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/frame/MojoColumn.java
|
package ai.h2o.mojos.runtime.frame;
import ai.h2o.mojos.runtime.utils.MojoDateTime;
import java.io.Serializable;
import java.util.Arrays;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Container for values across a column in a {@link MojoFrame}.
* <p>
* MojoColumn are merely wrappers of an array of data, meaning that a column instance does not have a name property.
* Columns are only "named" when they are in a MojoFrame. These structures are immutable in terms of size, but the
* values in its underlying array may be modified.
*/
public abstract class MojoColumn implements Serializable {
private static final Logger log = LoggerFactory.getLogger(MojoColumn.class);
/**
* @deprecated
*/
@Deprecated
public abstract String debug();
/**
* Get the type of data the column instance contains
*
* @return The column type
*/
public abstract Type getType();
/**
* Get the array reference that the column instance is wrapping.
*
* @return The array that is wrapped by the column. This value should be cast into an array with a type corresponding
* to the column instance's `Type` value
*/
public abstract Object getData();
/**
* Retrieve the data in a column as an array of Strings.
*
* @return The column data in String format
*/
public abstract String[] getDataAsStrings();
/**
* Get the length of the array wrapped by the column instance
*
* @return The length of the internal array
*/
abstract public int size();
abstract void resize(int n);
abstract void fillFromParsedListData(List values);
/**
* MojoColumn Type Enum
* ====================
* <p>
* An enum for determining the type of array a MojoColumn instance is wrapping.
*/
public enum Type {
Bool(byte.class, Byte.MIN_VALUE, false, false, new Class[]{Boolean.class, Byte.class, Short.class, Integer.class, Float.class, Long.class, Double.class}) {
@Override
protected Object castFromJavaType(Object value) {
if (value instanceof Boolean) {
return (Boolean) value ? (byte) 1 : (byte) 0;
} else {
byte b = ((Number) value).byteValue();
// it's Byte type, check if it's either 1 or 0
if (b == 1) {
return true;
} else if (b == 0) {
return false;
} else {
throw new IllegalArgumentException("Only 1 or 0 is allowed when storing data into Boolean column from numeric types.");
}
}
}
@Override
protected Object parseImpl(String val) {
return parseBoolean(val) ? (byte) 1 : (byte) 0;
}
@Override
public boolean isNA(Object val) {
return val instanceof Byte && (((Byte) val).byteValue() == ((Byte) ona).byteValue());
}
},
Int32(int.class, Integer.MIN_VALUE, true, false, new Class[]{Byte.class, Short.class, Integer.class}) {
@Override
protected Object castFromJavaType(Object value) {
// To ensure the backend type is consistent
if (value instanceof Number) {
return ((Number) value).intValue();
}
return value;
}
@Override
protected Integer parseImpl(String val) {
try {
return Integer.valueOf(val);
} catch (NumberFormatException nfe) {
try {
final int result = (int) parseDoubleOrBoolean(val);
log.warn("Int32: value '{}' parsed as {}", val, result);
return result;
} catch (NumberFormatException tmp) {
throw nfe;
}
}
}
@Override
public boolean isNA(Object val) {
return val instanceof Integer && (((Integer) val).intValue() == ((Integer) ona).intValue());
}
},
Int64(long.class, Long.MIN_VALUE, true, false, new Class[]{Byte.class, Short.class, Integer.class, Long.class}) {
@Override
protected Long parseImpl(String val) {
try {
return Long.valueOf(val);
} catch (NumberFormatException nfe) {
try {
final long result = (long) parseDoubleOrBoolean(val);
log.warn("Int64: value '{}' parsed as {}", val, result);
return result;
} catch (NumberFormatException tmp) {
throw nfe;
}
}
}
@Override
public boolean isNA(Object val) {
return val instanceof Long && (((Long) val).longValue() == ((Long) ona).longValue());
}
},
Float32(float.class, Float.NaN, true, true, new Class[]{Byte.class, Short.class, Integer.class, Float.class, Double.class}) {
@Override
protected Float parseImpl(String val) {
try {
return Float.valueOf(val);
} catch (NumberFormatException nfe) {
try {
return (float) parseDoubleOrBoolean(val);
} catch (NumberFormatException tmp) {
throw nfe;
}
}
}
@Override
protected Object castFromJavaType(Object value) {
if (value instanceof Number) {
return ((Number) value).floatValue();
}
return value;
}
@Override
public boolean isNA(Object val) {
return val instanceof Float && Float.isNaN((Float) val);
}
},
Float64(double.class, Double.NaN, true, true, new Class[]{Byte.class, Short.class, Integer.class, Float.class, Long.class, Double.class}) {
@Override
protected Double parseImpl(String val) {
return parseDoubleOrBoolean(val);
}
@Override
protected Object castFromJavaType(Object value) {
if (value instanceof Number) {
return ((Number) value).doubleValue();
}
return value;
}
@Override
public boolean isNA(Object val) {
return val instanceof Double && Double.isNaN((Double) val);
}
},
Str(String.class, null, false, false, new Class[]{String.class, Character.class}) {
@Override
protected Object castFromJavaType(Object value) {
return value != null ? value.toString() : null;
}
@Override
public Object parse(String val) {
if (val != null && val.length() > 1) {
// TODO: Is this necessary?
int eidx = val.length() - 1;
if (val.charAt(0) == '"' && val.charAt(eidx) == '"') {
val = val.substring(1, eidx);
}
}
return val;
}
@Override
protected Object parseImpl(String val) {
throw new UnsupportedOperationException("This method should not be called!");
}
@Override
public boolean isNA(Object val) {
return val == ona;
}
},
Time64(MojoDateTime.class, null, false, false, new Class[]{java.sql.Timestamp.class, java.sql.Date.class}) {
@Override
protected Object castFromJavaType(Object value) {
return value != null ? MojoDateTime.parse(value.toString()) : null;
}
@Override
protected Object parseImpl(String val) {
return MojoDateTime.parse(val);
}
@Override
public boolean isNA(Object val) {
return val == ona;
}
};
/**
* This map holds all recognized representations of {@link Type#Bool} values.
* All other strings are considered parsing error.
*/
private static final Map<String, Boolean> MOJO2_BOOL_STRINGS = new LinkedHashMap<>();
static {
MOJO2_BOOL_STRINGS.put("true", Boolean.TRUE);
MOJO2_BOOL_STRINGS.put("True", Boolean.TRUE);
MOJO2_BOOL_STRINGS.put("TRUE", Boolean.TRUE);
MOJO2_BOOL_STRINGS.put("1", Boolean.TRUE);
MOJO2_BOOL_STRINGS.put("1.0", Boolean.TRUE);
MOJO2_BOOL_STRINGS.put("false", Boolean.FALSE);
MOJO2_BOOL_STRINGS.put("False", Boolean.FALSE);
MOJO2_BOOL_STRINGS.put("FALSE", Boolean.FALSE);
MOJO2_BOOL_STRINGS.put("0", Boolean.FALSE);
MOJO2_BOOL_STRINGS.put("0.0", Boolean.FALSE);
}
public final Object NULL;
public final Class<?> javaclass;
public final Object ona;
public final boolean isnumeric;
public final boolean isfloat;
final HashSet<Class> assignableFromJavaTypes;
Type(Class<?> javaclass, Object ona, boolean isnumeric, boolean isfloat, Class[] assignableFromJavaTypes) {
this.javaclass = javaclass;
this.ona = ona;
this.NULL = parse(null);
this.isnumeric = isnumeric;
this.isfloat = isfloat;
this.assignableFromJavaTypes = new HashSet<>(Arrays.asList(assignableFromJavaTypes));
}
/**
* A custom implementation of parsing boolean values. Unlike {@link Boolean#parseBoolean(String)}, this method
* throws a {@link NumberFormatException} if the String argument doesn't match any valid value.
* <p>
* Valid values are defined in {@link #MOJO2_BOOL_STRINGS} constant.
*
* @param val The String to be used to retrieve a boolean value
* @return The boolean value `val` falls under
*/
private static boolean parseBoolean(String val) {
final Boolean result = MOJO2_BOOL_STRINGS.get(val.trim());
if (result == null) {
throw new NumberFormatException(String.format("For input string: '%s'", val));
}
return result;
}
private static double parseDoubleOrBoolean(String s) {
try {
return Double.parseDouble(s);
} catch (NumberFormatException e) {
try {
final double result = parseBoolean(s) ? 1.0 : 0.0;
log.warn("Bool value '{}' parsed as '{}' (double)", s, result); // very doubtful fallback, discused at https://github.com/h2oai/mojo2/pull/1145
return result;
} catch (NumberFormatException ignored) {
throw e;
}
}
}
public Object parse(String val) {
if (val == null || val.isEmpty()) {
return ona;
} else {
return parseImpl(val);
}
}
public boolean isAssignableFrom(Class<?> javaClazz) {
return assignableFromJavaTypes.contains(javaClazz);
}
private void failIfNotAssignableFrom(Class<?> javaClazz) {
if (!isAssignableFrom(javaClazz)) {
throw new ClassCastException(String.format("Mojo column of type %s can be assigned Java values only from the following types: %s , Java class on the input was: %s",
this.name(),
this.assignableFromJavaTypes.toString(),
javaClazz.getSimpleName()));
}
}
protected Object castFromJavaType(Object value) {
return value;
}
protected Object fromJavaClass(Object value) {
if (value != null) {
failIfNotAssignableFrom(value.getClass());
return castFromJavaType(value);
} else {
return null;
}
}
protected abstract Object parseImpl(String val);
public abstract boolean isNA(Object val);
}
public enum Kind {
Feature,
Output,
Interim,
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/frame/MojoColumnFactory.java
|
package ai.h2o.mojos.runtime.frame;
/**
* Column factory.
*
* Create a {@link MojoColumn} instances based on give column type.
*/
public interface MojoColumnFactory {
/**
* Creates an instance of {@link MojoColumn} based on given `type` and `nrows`.
* @param type type of column to create.
* @param nrows number of rows which column represents.
* @return
*/
MojoColumn create(MojoColumn.Type type, int nrows);
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/frame/MojoColumnFactoryService.java
|
package ai.h2o.mojos.runtime.frame;
import java.util.ArrayList;
import java.util.List;
import java.util.ServiceLoader;
import ai.h2o.mojos.runtime.utils.ClassLoaderUtils;
/**
* Service to obtain instance of {@link MojoColumnFactory}.
*
* The service is using Java SPI to obtain all classes
* implementing {@link MojoColumnFactory} interface.
*
* https://en.wikipedia.org/wiki/Initialization-on-demand_holder_idiom#Example_Java_Implementation
*/
final class MojoColumnFactoryService {
private static class InstanceHolder {
private static MojoColumnFactoryService INSTANCE = new MojoColumnFactoryService();
}
public static MojoColumnFactoryService getInstance() {
return InstanceHolder.INSTANCE;
}
private final MojoColumnFactory mojoColumnFactory;
private final RuntimeException error;
private MojoColumnFactoryService() {
ServiceLoader<MojoColumnFactory> loader = ServiceLoader.load(
MojoColumnFactory.class,
ClassLoaderUtils.getPreferredSpiClassLoader(MojoColumnFactory.class));
MojoColumnFactory[] factories = getAll(loader);
if (factories.length == 0) {
mojoColumnFactory = null;
error = new RuntimeException("Cannot find MOJO column factory implementation! Check the classpath if it contains mojo2-runtime-impl!");
} else if (factories.length > 1) {
mojoColumnFactory = null;
error = new RuntimeException("Found multiple MOJO column factories implementation backends, but expected only one! Check the classpath if it contains mojo2-runtime-impl!");
} else {
error = null;
mojoColumnFactory = factories[0];
}
}
public MojoColumnFactory getMojoColumnFactory() {
if (mojoColumnFactory != null) {
return mojoColumnFactory;
} else {
throw error;
}
}
/**
* Get all implementors of {@link MojoColumnFactory} interface.
*
* @return array of objects implementing {@link MojoColumnFactory} interface. Empty array
* if no implementor is found.
*/
private static MojoColumnFactory[] getAll(ServiceLoader<MojoColumnFactory> loader) {
List<MojoColumnFactory> l = new ArrayList<>();
for (MojoColumnFactory mcf : loader) {
l.add(mcf);
}
return l.toArray(new MojoColumnFactory[0]);
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/frame/MojoFrame.java
|
package ai.h2o.mojos.runtime.frame;
/**
* Container for {@link MojoColumn}s as well as some additional meta data. MojoFrames are mostly immutable; it's column/row
* count, column references, and associated names for said references cannot be modified once created. The data of a
* individual column can be modified, however, through the `getColumnData` method.
*
* MojoFrames can only be constructed through a MojoFrameBuilder (see {@link MojoFrameBuilder} for more details).
*/
public class MojoFrame {
private final MojoFrameMeta _meta;
private final MojoColumn[] _columns;
private final int _nrows;
/**
* @deprecated
*/
@Deprecated
public void debug() {
for (int i = 0; i < _columns.length; ++i) {
System.out.printf("%s\n %d: %s\n", _meta.getColumnName(i), i, _columns[i].debug());
}
}
MojoFrame(MojoFrameMeta meta, MojoColumn[] columns, int nrows) {
// Since a frame should be built by a Frame builder we shouldn't need to ensure that nrows
// matches the length of each column
_meta = meta;
_nrows = nrows;
_columns = columns;
}
public MojoFrameMeta getMeta() {
return _meta;
}
//----------------------------------------------------------------------------
// Public API
//----------------------------------------------------------------------------
/**
* Get the number of columns in the frame
* @return The number of columns in the frame
*/
public int getNcols() {
return _columns.length;
}
/**
* Get the number of rows of each column in the MojoFrame
* @return The number of rows in the frame
*/
public int getNrows() {
return _nrows;
}
/**
* Get the names associated with each column in the frame.
* @return An array containing the given column names for each index
*/
public String[] getColumnNames() {
String[] names = new String[_columns.length];
for (int i = 0; i < names.length; i += 1) {
names[i] = _meta.getColumnName(i);
}
return names;
}
/**
* Get the name of a column at a particular index
* @param index
* @return The name of the column at the given index
*/
public String getColumnName(int index) {
return _meta.getColumnName(index);
}
/**
* Get the type of a column at a particular index
* @param index
* @return The type of the column at the given index
*/
public MojoColumn.Type getColumnType(int index) {
return _meta.getColumnType(index);
}
/**
* Get the types of each column in the frame
* @return An array containing the column types for each index
*/
public MojoColumn.Type[] getColumnTypes() {
MojoColumn.Type[] types = new MojoColumn.Type[_columns.length];
for (int i = 0; i < _columns.length; i++) {
types[i] = _columns[i].getType();
}
return types;
}
/**
* Get the column instance at a particular index
* @param index
* @return The column instance at the given index
*/
public MojoColumn getColumn(int index) {
return _columns[index];
}
/**
* Get the data stored in the column at a particular index
* @param index
* @return The data of a column at the given index. This will be an array of whatever java type the column's Type
* is intended to represent (see {@link MojoColumn.Type})
*/
public Object getColumnData(int index) { return _columns[index].getData(); }
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/frame/MojoFrameBuilder.java
|
package ai.h2o.mojos.runtime.frame;
import ai.h2o.mojos.runtime.api.MojoColumnMeta;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The builder is used for constructing a {@link MojoFrame}. A frame is constructed by the following procedure:
* <p>
* 1. Get a MojoRowBuilder instance from the frame builder
* 2. Construct a row from the MojoRowBuilder
* 3. Append the resulting row to the frame builder
* 4. Repeat steps 1-3 until all rows are constructed
* 5. Construct the MojoFrame
* <p>
* See {@link MojoRowBuilder}
*/
public class MojoFrameBuilder {
private static final Logger log = LoggerFactory.getLogger(MojoFrameBuilder.class);
private final Map<String, StringConverter> stringConvertersMap;
public static final StringConverter DEFAULT_CONVERTER = new StringConverter() {
@Override
public Object convert(String s, MojoColumn.Type outputType) {
return outputType.parse(s);
}
};
private final MojoFrameMeta _meta;
private final Set<String> _missingValues;
private final MojoColumnBuilder[] _columnBuilders;
private final StringConverter[] _stringConverters;
/**
* Constructor for a MojoFrameBuilder.
*
* @param frameMeta The meta data for the resulting frame (see {@link MojoFrameMeta})
*/
public MojoFrameBuilder(final MojoFrameMeta frameMeta) {
this(frameMeta, Collections.<String>emptyList(), Collections.<String, StringConverter>emptyMap());
}
/**
* Constructor for a MojoFrameBuilder.
*
* @param frameMeta The meta data for the resulting frame (see {@link MojoFrameMeta})
* @param missingValues List of string values which are interpreted as missing value.
*/
public MojoFrameBuilder(final MojoFrameMeta frameMeta, final Set<String> missingValues) {
this(frameMeta, missingValues, Collections.<String, StringConverter>emptyMap());
}
/**
* Constructor for a MojoFrameBuilder.
*
* @param frameMeta The meta data for the resulting frame (see {@link MojoFrameMeta})
* @param missingValues List of string values which are interpreted as missing value.
* @param stringConverters A `Map` that associates column names to their respective {@link StringConverter}.
* `DEFAULT_CONVERTER` is used if a column's name is not found in the `Map`
*/
public MojoFrameBuilder(MojoFrameMeta frameMeta, Collection<String> missingValues,
Map<String, StringConverter> stringConverters) {
this(frameMeta, missingValues, stringConverters, null);
}
private MojoFrameBuilder(MojoFrameMeta frameMeta, Collection<String> missingValues,
Map<String, StringConverter> stringConverters, MojoFrameBuilder shared) {
_meta = frameMeta;
if (missingValues != null) {
_missingValues = new HashSet<>(missingValues);
} else {
_missingValues = new HashSet<>(0);
}
_columnBuilders = new MojoColumnBuilder[frameMeta.size()];
_stringConverters = new StringConverter[frameMeta.size()];
int i = 0;
if (shared != null && stringConverters == null) {
stringConverters = shared.stringConvertersMap;
}
log.trace("Contructing MojoFrameBuilder[{}]", frameMeta.size());
for (final MojoColumnMeta column : frameMeta.getColumns()) {
final String name = column.getColumnName();
if (shared != null) {
// if shared contains the same name, use its info
final Integer sharedColumnIndex = shared._meta.indexOf(name);
if (sharedColumnIndex != null) {
log.trace("shared: {} ({})", name, column.getColumnType());
_columnBuilders[i] = shared._columnBuilders[sharedColumnIndex];
_stringConverters[i] = shared._stringConverters[sharedColumnIndex];
i++;
continue;
}
log.trace("not shared: {} ({})", name, column.getColumnType());
}
_columnBuilders[i] = new MojoColumnBuilder(column.getColumnType());
_stringConverters[i] = stringConverters.getOrDefault(name, DEFAULT_CONVERTER);
i++;
}
this.stringConvertersMap = stringConverters;
}
/**
* Use when you need some output columns to just expose input columns.
* @param frameMeta output columns descriptor
* @param shared the input frame builder to use for matching columns (= same name and type)
* @return output frame builder
*/
public static MojoFrameBuilder withSharedColumns(MojoFrameMeta frameMeta, MojoFrameBuilder shared) {
return new MojoFrameBuilder(frameMeta, shared._missingValues, null, shared);
}
/**
* Create a MojoFrame with `nrows` rows based on the meta data provided. The values in this frame will all be NA.
*
* @param meta The meta data of the frame to be constructed
* @param nrows The number of rows
* @return A new MojoFrame filled with NA values
*/
public static MojoFrame getEmpty(MojoFrameMeta meta, int nrows) {
final MojoColumnFactory mojoColumnFactory = MojoColumnFactoryService.getInstance().getMojoColumnFactory();
final MojoColumn[] columnsWithData = new MojoColumn[meta.size()];
for (int i = 0; i < meta.size(); i += 1) {
final MojoColumn.Type colType = meta.getColumnType(i);
columnsWithData[i] = mojoColumnFactory.create(colType, nrows);
}
return new MojoFrame(meta, columnsWithData, nrows);
}
/**
* Create a MojoFrame from an array of MojoColumns as specified by the provided meta data.
*
* @param meta The meta data to be used as a template
* @param columns The columns to be used in the resulting frame
* @return A new MojoFrame
*/
public static MojoFrame fromColumns(MojoFrameMeta meta, MojoColumn[] columns) {
if (columns.length != meta.size())
throw new IllegalArgumentException(String.format("Number of columns(%d) does not match size of frame meta (%d)",columns.length, meta.size()));
final int nrows = columns.length == 0 ? 0 : columns[0].size();
if (columns.length > 0) {
final String firstColumnName = meta.getColumnName(0);
int i = 0;
for (MojoColumnMeta column : meta.getColumns()) {
final MojoColumn c = columns[i];
if (c != null) {
if (c.size() != nrows) {
throw new IllegalArgumentException(String.format("Number of rows in columns %d ('%s') and 0 ('%s') do not match (%d != %d)",
i, column.getColumnName(),
firstColumnName,
c.size(), nrows));
}
if (c.getType() != column.getColumnType()) {
throw new IllegalArgumentException(String.format("Type of column %d ('%s') does not match frame meta: %s != %s",
i, column.getColumnName(), c.getType(), column.getColumnType()));
}
}
i++;
}
}
return new MojoFrame(meta, columns, nrows);
}
/**
* Append a row from the current state of a MojoRowBuilder. The MojoRowBuilder will subsequently be reset.
*
* @param rowBuilder The MojoRowBuilder containing the row to be constructed and appended
* @return The given MojoRowBuilder instance with its state reset
*/
public MojoRowBuilder addRow(MojoRowBuilder rowBuilder) {
addRow(rowBuilder.toMojoRow());
rowBuilder.clear();
return rowBuilder;
}
void addRow(MojoRow row) {
Object[] values = row.getValues();
if (values.length != _columnBuilders.length)
throw new IllegalArgumentException("Row argument does not have the same column count as frame");
for (int i = 0; i < _columnBuilders.length; i += 1) {
_columnBuilders[i].pushValue(values[i]);
}
}
/**
* Get an instance of a MojoRowBuilder that can be used to construct a row for this builder. Each call to this method
* creates a new MojoRowBuilder instance
*
* @return A MojoRowBuilder for constructing rows for this frame builder
*/
public MojoRowBuilder getMojoRowBuilder() {
return getMojoRowBuilder(false);
}
/**
* Get an instance of a MojoRowBuilder that can be used to construct a row for this builder. Each call to this method
* creates a new MojoRowBuilder instance
*
* @param strictMode flag to determine if the created MojoRowBuilder should be in "strict" mode (see {@link MojoRowBuilder}).
* @return A MojoRowBuilder for constructing rows for this frame builder
*/
public MojoRowBuilder getMojoRowBuilder(boolean strictMode) {
return new MojoRowBuilder(_meta.getColumnNamesMap(), _meta.getColumnTypes(), _missingValues, _stringConverters, strictMode);
}
/**
* Create a MojoFrame from the current state of this builder
*
* @return The constructed MojoFrame
*/
public MojoFrame toMojoFrame() {
final int nrows = _columnBuilders.length == 0 ? 0 : _columnBuilders[0].size();
return toMojoFrame(nrows);
}
public MojoFrame toMojoFrame(int nrows) {
final MojoColumn[] columns = new MojoColumn[_columnBuilders.length];
for (int i = 0; i < columns.length; i += 1) {
columns[i] = _columnBuilders[i].toMojoColumn();
columns[i].resize(nrows);
}
return new MojoFrame(_meta, columns, nrows);
}
private static class MojoColumnBuilder {
private final MojoColumn.Type colType;
private final List<Object> values = new ArrayList<>();
MojoColumnBuilder(MojoColumn.Type type) {
colType = type;
}
void pushValue(Object value) {
values.add(value == null ? colType.NULL : value);
}
MojoColumn toMojoColumn() {
final MojoColumn col = MojoColumnFactoryService.getInstance().getMojoColumnFactory().create(colType, values.size());
col.fillFromParsedListData(values);
return col;
}
int size() {
return values.size();
}
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/frame/MojoFrameMeta.java
|
package ai.h2o.mojos.runtime.frame;
import ai.h2o.mojos.runtime.api.MojoColumnMeta;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* MojoFrame meta data
* ===========================
* <p>
* Container for meta data of a {@link MojoFrame}. This immutable class is used as a template for constructing
* MojoFrames in addition to providing names to the columns of an existing MojoFrame.
*/
public class MojoFrameMeta implements Serializable {
private final List<MojoColumnMeta> columns;
private final Map<String, Integer> columnNameToIndex = new LinkedHashMap<>();
private final Map<MojoColumnMeta, Integer> columnToIndex = new LinkedHashMap<>();
/**
* @deprecated use {@link #MojoFrameMeta(List)}, optionally with help of {@link MojoColumnMeta#toColumns(String[], MojoColumn.Type[], MojoColumn.Kind)} instead.
*/
@Deprecated
public MojoFrameMeta(final String[] names, final MojoColumn.Type[] types) {
this(MojoColumnMeta.toColumns(names, types, MojoColumn.Kind.Output));
}
public MojoFrameMeta(final List<MojoColumnMeta> columns) {
this.columns = columns;
int index = 0;
for (MojoColumnMeta column : columns) {
columnNameToIndex.put(column.getColumnName(), index);
columnToIndex.put(column, index);
index++;
}
}
/**
* With this constructor, the full set of columns is used but only some of them can be retrieved by name.
* @param columns -
* @param indices -
*/
public MojoFrameMeta(final List<MojoColumnMeta> columns, Collection<Integer> indices) {
this.columns = columns;
for (int index : indices) {
columnNameToIndex.put(columns.get(index).getColumnName(), index);
}
}
/**
* @param exposedColumnNames column names to convert
* @return indices
*/
public int[] namesToIndices(final String[] exposedColumnNames) {
if (exposedColumnNames == null) {
return null;
}
final int[] others;
others = new int[exposedColumnNames.length];
for (int i = 0; i < exposedColumnNames.length; i++) {
String exposedColumnName = exposedColumnNames[i];
final int index = getColumnIndex(exposedColumnName);
others[i] = index;
}
return others;
}
/**
* @return frame containing only selected indices
*/
public MojoFrameMeta subFrame(int[] indices) {
final List<MojoColumnMeta> columns = new ArrayList<>();
for (int index : indices) {
columns.add(getColumns().get(index));
}
return new MojoFrameMeta(columns);
}
/**
* Make a MojoFrameMeta instance with no columns
*
* @return A MojoFrameMeta instance with no columns
*/
public static MojoFrameMeta getEmpty() {
return new MojoFrameMeta(Collections.<MojoColumnMeta>emptyList());
}
/**
* Get the index of a column with the name `columnName`
*
* @param columnName The name of the column
* @return The index of the column
*/
public int getColumnIndex(String columnName) {
final Integer index = columnNameToIndex.get(columnName);
if (index == null) {
throw new IllegalArgumentException(String.format("Column '%s' was not found in this frame with %d columns.", columnName, size()));
}
return index;
}
/**
* @deprecated Lookup by name is dangerous, as names are not unique. Use {@link #indexOf(MojoColumnMeta)} instead.
*/
@Deprecated
public Integer indexOf(String columnName) {
return columnNameToIndex.get(columnName);
}
public Integer indexOf(MojoColumnMeta column) {
return columnToIndex.get(column);
}
/**
* Get the name of a column at a particular index
*
* @param index The index of the column
* @return The name of the column
*/
public String getColumnName(int index) {
return columns.get(index).getColumnName();
}
/**
* Get the type of a column at a particular index
*
* @param index The index of a column
* @return The type of the column
*/
public MojoColumn.Type getColumnType(int index) {
return columns.get(index).getColumnType();
}
/**
* Get the type of a column with the name `columnName`
*
* @param columnName The name of the column
* @return The type of the column
*/
public MojoColumn.Type getColumnType(String columnName) {
return getColumnType(getColumnIndex(columnName));
}
/**
* Checks if there exists meta data for a column with a particular name
*
* @param columnName The name of the column
* @return true if the name exists in this instance, false otherwise
*/
public boolean contains(String columnName) {
return columnNameToIndex.containsKey(columnName);
}
/**
* Get the number of columns in this instance
*
* @return The number of columns in this instance
*/
public int size() {
return columns.size();
}
protected Map<String, Integer> getColumnNamesMap() {
return columnNameToIndex;
}
/**
* @deprecated use {@link #getColumns()} instead
*/
@Deprecated
public String[] getColumnNames() {
return columnNameToIndex.keySet().toArray(new String[0]);
}
/**
* @deprecated use {@link #getColumns()} instead
*/
@Deprecated
public MojoColumn.Type[] getColumnTypes() {
final MojoColumn.Type[] columnTypes = new MojoColumn.Type[columns.size()];
for (int i = 0; i < columnTypes.length; i++) {
columnTypes[i] = columns.get(i).getColumnType();
}
return columnTypes;
}
public List<MojoColumnMeta> getColumns() {
return columns;
}
@Override
public String toString() {
return niceToString(columns);
}
static String niceToString(final List<MojoColumnMeta> columns) {
final StringBuilder sb = new StringBuilder("MojoFrameMeta{cols:");
sb.append(columns.size());
final Map<String, Integer> typestat = new LinkedHashMap<>();
for (MojoColumnMeta col : columns) {
final String key = col.getColumnType().toString();
final Integer value = typestat.get(key);
typestat.put(key, value == null ? 1 : value + 1);
}
char sep = ';';
for (Map.Entry<String, Integer> entry : typestat.entrySet()) {
sb.append(String.format("%s%dx%s", sep, entry.getValue(), entry.getKey()));
sep = ',';
}
sb.append("}");
return sb.toString();
}
@SuppressWarnings("unused")
public static String debugIndicesToNames(List<MojoColumnMeta> columns, int[] indices) {
return Arrays.stream(indices)
.mapToObj(index -> index < columns.size() ? columns.get(index).getColumnName() : "IndexTooBig("+index+")")
.collect(Collectors.joining(","));
}
@SuppressWarnings("unused")
public static String debugIndicesToNames(List<MojoColumnMeta> columns, Collection<Integer> indices) {
return indices.stream()
.map(index -> index < columns.size() ? columns.get(index).getColumnName() : "IndexTooBig("+index+")")
.collect(Collectors.joining(","));
}
@SuppressWarnings("unused")
public String debugIndicesToNames(int[] indices) {
return debugIndicesToNames(columns, indices);
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/frame/MojoRowBuilder.java
|
package ai.h2o.mojos.runtime.frame;
import java.util.Collection;
import java.util.Map;
/**
* The builder is used to build rows for a {@link MojoFrameBuilder}. Rows are constructed by adding values in String form to a builder
* instance. The rows can then be added to a MojoFrameBuilder by calling {@link MojoFrameBuilder#addRow(MojoRowBuilder)}.
* {@code MojoRowBuilder}s can be initialized in a "strict" mode, where an exception is thrown whenever there is an
* attempt to set a value to a column name that is not defined in the builder. Additionally, "strict" {@code MojoRowBuilder}s
* will throw an exception if there is an attempt to create a row without every column being set a value beforehand.
*/
public class MojoRowBuilder {
private final Map<String, Integer> columnNamesMap;
private final MojoColumn.Type[] columnTypes;
private final Collection<String> missingValues;
private final StringConverter[] stringConverters;
private final boolean strict;
private Object[] values;
private boolean[] setValues;
MojoRowBuilder(Map<String, Integer> columnNamesMap, MojoColumn.Type[] columnTypes, Collection<String> missingValues,
StringConverter[] stringConverters, boolean strict) {
this.columnNamesMap = columnNamesMap;
this.columnTypes = columnTypes;
this.missingValues = missingValues;
this.stringConverters = stringConverters;
this.strict = strict;
values = new Object[this.columnTypes.length];
setValues = new boolean[values.length];
}
/**
* Set a value to the position associated with column {@code name} in the row.
* If this row builder instance is in "strict" mode, an {@link IllegalArgumentException} is thrown if {@code name}
* is not found in this builder. Otherwise nothing happens.
* <p>
* The {@code value} is specified as a string and the call will try to convert the value to
* actual column type.
*
* @param name The name of the column to where the value should be set
* @param value The value to be set
*/
public MojoRowBuilder setValue(String name, String value) {
Integer idx = columnNamesMap.get(name);
if (idx != null) {
setValue(idx, value);
} else if (isStrict()) {
throw new IllegalArgumentException("Column \"" + name + "\" does not exist is this MojoRowBuilder");
}
return this;
}
/**
* Set a value to an index in the row
*
* @param idx The index where the value should be set
* @param value The value to be set
*/
public MojoRowBuilder setValue(int idx, String value) {
if (idx < 0 || idx >= values.length) {
throw new ArrayIndexOutOfBoundsException("Index " + idx + " is out the scope of this MojoRowBuilder.");
}
final Object convertedValue;
if (value != null) {
if (missingValues.contains(value)) {
convertedValue = null;
} else {
final MojoColumn.Type columnType = columnTypes[idx];
convertedValue = stringConverters[idx].convert(value, columnType);
}
} else {
convertedValue = null;
}
values[idx] = convertedValue;
setValues[idx] = true;
return this;
}
public MojoRowBuilder setBool(String name, Boolean value) {
Integer idx = columnNamesMap.get(name);
if (idx != null) {
setBool(idx, value);
} else if (isStrict()) {
throw new IllegalArgumentException("Column \"" + name + "\" does not exist is this MojoRowBuilder");
}
return this;
}
public MojoRowBuilder setChar(String name, Character value) {
Integer idx = columnNamesMap.get(name);
if (idx != null) {
setChar(idx, value);
} else if (isStrict()) {
throw new IllegalArgumentException("Column \"" + name + "\" does not exist is this MojoRowBuilder");
}
return this;
}
public MojoRowBuilder setByte(String name, Byte value) {
Integer idx = columnNamesMap.get(name);
if (idx != null) {
setByte(idx, value);
} else if (isStrict()) {
throw new IllegalArgumentException("Column \"" + name + "\" does not exist is this MojoRowBuilder");
}
return this;
}
public MojoRowBuilder setShort(String name, Short value) {
Integer idx = columnNamesMap.get(name);
if (idx != null) {
setShort(idx, value);
} else if (isStrict()) {
throw new IllegalArgumentException("Column \"" + name + "\" does not exist is this MojoRowBuilder");
}
return this;
}
public MojoRowBuilder setInt(String name, Integer value) {
Integer idx = columnNamesMap.get(name);
if (idx != null) {
setInt(idx, value);
} else if (isStrict()) {
throw new IllegalArgumentException("Column \"" + name + "\" does not exist is this MojoRowBuilder");
}
return this;
}
public MojoRowBuilder setLong(String name, Long value) {
Integer idx = columnNamesMap.get(name);
if (idx != null) {
setLong(idx, value);
} else if (isStrict()) {
throw new IllegalArgumentException("Column \"" + name + "\" does not exist is this MojoRowBuilder");
}
return this;
}
public MojoRowBuilder setFloat(String name, Float value) {
Integer idx = columnNamesMap.get(name);
if (idx != null) {
setFloat(idx, value);
} else if (isStrict()) {
throw new IllegalArgumentException("Column \"" + name + "\" does not exist is this MojoRowBuilder");
}
return this;
}
public MojoRowBuilder setDouble(String name, Double value) {
Integer idx = columnNamesMap.get(name);
if (idx != null) {
setDouble(idx, value);
} else if (isStrict()) {
throw new IllegalArgumentException("Column \"" + name + "\" does not exist is this MojoRowBuilder");
}
return this;
}
public MojoRowBuilder setString(String name, String value) {
Integer idx = columnNamesMap.get(name);
if (idx != null) {
setString(idx, value);
} else if (isStrict()) {
throw new IllegalArgumentException("Column \"" + name + "\" does not exist is this MojoRowBuilder");
}
return this;
}
public MojoRowBuilder setDate(String name, java.sql.Date value) {
Integer idx = columnNamesMap.get(name);
if (idx != null) {
setDate(idx, value);
} else if (isStrict()) {
throw new IllegalArgumentException("Column \"" + name + "\" does not exist is this MojoRowBuilder");
}
return this;
}
public MojoRowBuilder setTimestamp(String name, java.sql.Timestamp value) {
Integer idx = columnNamesMap.get(name);
if (idx != null) {
setTimestamp(idx, value);
} else if (isStrict()) {
throw new IllegalArgumentException("Column \"" + name + "\" does not exist is this MojoRowBuilder");
}
return this;
}
private MojoRowBuilder setJavaValue(int idx, Object value) {
if (idx < 0 || idx >= values.length) {
throw new ArrayIndexOutOfBoundsException("Index " + idx + " is out of the scope of this MojoRowBuilder");
}
values[idx] = columnTypes[idx].fromJavaClass(value);
setValues[idx] = true;
return this;
}
public MojoRowBuilder setBool(int idx, Boolean value) {
return setJavaValue(idx, value);
}
public MojoRowBuilder setByte(int idx, Byte value) {
return setJavaValue(idx, value);
}
public MojoRowBuilder setShort(int idx, Short value) {
return setJavaValue(idx, value);
}
public MojoRowBuilder setChar(int idx, Character value) {
return setJavaValue(idx, value);
}
public MojoRowBuilder setInt(int idx, Integer value) {
return setJavaValue(idx, value);
}
public MojoRowBuilder setLong(int idx, Long value) {
return setJavaValue(idx, value);
}
public MojoRowBuilder setFloat(int idx, Float value) {
return setJavaValue(idx, value);
}
public MojoRowBuilder setDouble(int idx, Double value) {
return setJavaValue(idx, value);
}
public MojoRowBuilder setString(int idx, String value) {
return setJavaValue(idx, value);
}
public MojoRowBuilder setDate(int idx, java.sql.Date value) {
return setJavaValue(idx, value);
}
public MojoRowBuilder setTimestamp(int idx, java.sql.Timestamp value) {
return setJavaValue(idx, value);
}
/**
* Set the entire row to `values`.
* <p>
* The parameter `values` needs to contain actual object matching types of columns.
*
* @param values The array of values to be set into the row.
*/
public MojoRowBuilder setValues(Object[] values) {
if (values.length != this.values.length)
throw new IllegalArgumentException("Length of values argument does not match size of MojoRowBuilder! " +
"Expected: " + this.values.length + ", but got: " + values.length);
System.arraycopy(values, 0, this.values, 0, this.values.length);
return this;
}
MojoRow toMojoRow() {
if (isStrict()) {
for (int i = 0; i < setValues.length; i += 1) {
if (!setValues[i]) {
String indices = "[" + i;
for (int k = i + 1; k < setValues.length; k += 1) {
if (!setValues[k]) {
indices += ", " + k;
}
}
indices += ']';
throw new IllegalStateException("Columns at indices " + indices + " have not been set");
}
}
}
return new MojoRow(values);
}
/**
* Clear the state of the row builder
*/
public void clear() {
values = new Object[values.length];
setValues = new boolean[setValues.length];
}
/**
* Get the number values a row resulting from this builder would have
*
* @return The number of values
*/
public int size() {
return values.length;
}
/**
* Determine if this row builder is in "strict" mode.
* A "strict" row builder will throw an exception if a value is attempted to be set to a column whose name is not
* associated with an index. Additionally, {@link #toMojoRow()} will throw an exception unless all values have been
* set.
*
* @return {@code true} if this row builder instance is in "strict" mode; {@code false} otherwise.
*/
public boolean isStrict() {
return strict;
}
/**
* Determine whether the column associated with name {@code name} has had a value set.
*
* @param name The name of the column
* @return {@code true} if the column associated with name {@code name} has had a value set; {@code false} otherwise.
*/
public boolean isSet(String name) {
Integer idx = columnNamesMap.get(name);
if (idx != null) {
return isSet(idx);
}
throw new IllegalArgumentException("Column \"" + name + "\" does not exist is this MojoRowBuilder");
}
/**
* Determine whether the column at index {@code idx} has had a value set.
*
* @param idx The index of the column
* @return {@code true} if the column at index {@code idx} has had a value set; {@code false} otherwise.
*/
public boolean isSet(int idx) {
if (idx >= 0 && idx < size()) {
return setValues[idx];
}
throw new ArrayIndexOutOfBoundsException("Index " + idx + " is out of the scope of this MojoRowBuilder");
}
/**
* Determine if there is a column associated with name {@code name}.
*
* @param name
* @return {@code true} if there exists a column associated with name {@code name}; {@code false} otherwise.
*/
public boolean containsName(String name) {
return columnNamesMap.containsKey(name);
}
}
class MojoRow {
private Object[] values;
MojoRow(Object[] values) {
this.values = values;
}
public int size() {
return values.length;
}
Object[] getValues() {
return values;
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/frame/StringConverter.java
|
package ai.h2o.mojos.runtime.frame;
/**
* `StringConverter` is an interface for converting `String` values into a {@link MojoColumn.Type}.
*/
public interface StringConverter {
/**
* Convert a `String` into the designated output type.
* @param s The `String` to convert
* @param outputType The type into which `s` should be converted
* @return The converted Object with a type corresponding to `outputType`
*/
Object convert(String s, MojoColumn.Type outputType);
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/frame/package-info.java
|
/**
* The package exposes classes to support frame and row creation.
*
* The package expose the following concepts:
*
* - {@link ai.h2o.mojos.runtime.frame.MojoFrame}
* - {@link ai.h2o.mojos.runtime.frame.MojoFrameBuilder}
* - {@link ai.h2o.mojos.runtime.frame.MojoRowBuilder}
*
* The {@link ai.h2o.mojos.runtime.frame.MojoFrame} represents data which are
* transformed by {@link ai.h2o.mojos.runtime.MojoPipeline}. The frame consists of
* columns {@link ai.h2o.mojos.runtime.frame.MojoColumn},
* each column has defined type {@link ai.h2o.mojos.runtime.frame.MojoColumn.Type}.
*
* The {@link ai.h2o.mojos.runtime.frame.MojoFrame}
* is constructed with help of {@link ai.h2o.mojos.runtime.frame.MojoFrameBuilder}.
* The builder which helps to create input frame for a given MOJO pipeline can be obtain by calling
* {@link ai.h2o.mojos.runtime.MojoPipeline#getInputFrameBuilder()}.
*
* The frame builder provides a method {@link ai.h2o.mojos.runtime.frame.MojoFrameBuilder#getMojoRowBuilder()}
* to obtain an instance of {@link ai.h2o.mojos.runtime.frame.MojoRowBuilder}. The row builder helps
* to create representation of row. The row constructed by the row builder, is appended to the frame builder
* by calling method {@link ai.h2o.mojos.runtime.frame.MojoFrameBuilder#addRow(ai.h2o.mojos.runtime.frame.MojoRowBuilder)}.
*
* The row can be constructed in two ways:
*
* - by passing a string representation of value for a particular column to method {@link ai.h2o.mojos.runtime.frame.MojoRowBuilder#setValue(int, java.lang.String)}.
* This method internally transform the string representation into actual column type
* - by using a strongly typed methods `setX(int columnIndex, X value)` where X is Java type compatible with MOJO column type.
* For example, {@link ai.h2o.mojos.runtime.frame.MojoRowBuilder#setBool(int, java.lang.Boolean)} allows to set boolean value for a given column.
*
* Note: Instances of {@link ai.h2o.mojos.runtime.frame.MojoRowBuilder} can be reused after calling {@link ai.h2o.mojos.runtime.frame.MojoRowBuilder}
*
* ---
*
* Example:
*
* Given a pipeline `modelPipeline` we can construct an input frame in the following way:
*
* ```java
* // Get an input frame builder for given modelPipeline
* MojoFrameBuilder frameBuilder = modelPipeline.getInputFrameBuilder();
*
* // Create a new row builder
* MojoRowBuilder rowBuilder = frameBuilder.getMojoRowBuilder();
* rowBuilder.setValue("AGE", "68");
* rowBuilder.setValue("RACE", "2");
* rowBuilder.setValue("DCAPS", "2");
* rowBuilder.setValue("VOL", "0");
* rowBuilder.setValue("GLEASON", "6");
* frameBuilder.addRow(rowBuilder);
*
* // Create a frame which can be transformed by MOJO pipeline
* MojoFrame iframe = frameBuilder.toMojoFrame();
*
* // Transform frame by the given modelPipeline:
* MojoFrame oframe = modelPipeline.transform(iframe);
* ```
*/
package ai.h2o.mojos.runtime.frame;
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/lic/InvalidLicenseException.java
|
package ai.h2o.mojos.runtime.lic;
/**
* The exception reports an invalid license.
*
* The invalid license means that MOJO runtime was
* not able to load, parse or interpret license file.
*/
public class InvalidLicenseException extends LicenseException {
public InvalidLicenseException(Exception e) {
super("Invalid license", e);
}
public InvalidLicenseException() {
super("Invalid license", null);
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/lic/InvalidSignatureException.java
|
package ai.h2o.mojos.runtime.lic;
/**
* The exception reports an invalid MOJO signature.
*
* The invalid MOJO signature means, that the signature file
* ({@link MojoSignatureConsts#MOJO_SIGNATURE_FILENAME }) is present,
* but the signature does not match expected signature of
* MOJO watermark file ({@link MojoSignatureConsts#MOJO_WATERMARK_FILENAME}).
*
* That means that the signature or watermark file was modified.
*/
public class InvalidSignatureException extends LicenseException {
public InvalidSignatureException(Exception e) {
super("Invalid MOJO signature! It seems like MOJO was modified!", e);
}
public InvalidSignatureException() {
this(null);
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/lic/InvalidWatermarkException.java
|
package ai.h2o.mojos.runtime.lic;
/**
* The exception reports an invalid MOJO watermark (non-perpetual MOJO).
*
* The invalid watermark means that the MOJO is not marked as perpetual.
*/
public class InvalidWatermarkException extends LicenseException {
public InvalidWatermarkException(Exception e) {
super("Invalid MOJO watermark! This is not a perpetual MOJO!", e);
}
public InvalidWatermarkException() {
this(null);
}
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/lic/LicenseConsts.java
|
package ai.h2o.mojos.runtime.lic;
import java.io.File;
/**
* The default configuration properties used by license manager
* to look for license.
*/
public class LicenseConsts {
/** The default name of license file. */
public static final String LICENSE_FILENAME = "license.sig";
/** Name of environment variable which holds license key. */
public static final String ENV_PROP_LICENSE_KEY = "DRIVERLESS_AI_LICENSE_KEY";
/** Name of environment variable which hold location of license file.
*
* For example, `export DRIVERLESS_AI_LICENSE_FILE=/opt/dai/license/license.sig`
*/
public static final String ENV_PROP_LICENSE_FILE = "DRIVERLESS_AI_LICENSE_FILE";
/**
* Name of JVM system property which holds license key.
*/
public static final String SYS_PROP_LICENSE_KEY = "ai.h2o.mojos.runtime.license.key";
/**
* Name of JVM system property which holds location of license key.
*
* For example, `java -Dai.h2o.mojos.runtime.license.file=/opt/dai/license/license.key -cp...`
*/
public static final String SYS_PROP_LICENSE_FILE = "ai.h2o.mojos.runtime.license.file";
/** Name of JVM system property which overrides default license file name ({@link #LICENSE_FILENAME}.
*
*/
public static final String SYS_PROP_LICENSE_FILENAME = "ai.h2o.mojos.runtime.license.filename";
/** Default name of folder where DAI stores configuration or license files */
public static final String DEFAULT_DAI_FOLDER = System.getProperty("user.home") + File.separator + ".driverlessai";
/** Default location of DAI license: ~/.driverlessai/license.sig */
public static final String DEFAULT_DAI_LICENSE_FILE_LOCATION = DEFAULT_DAI_FOLDER + File.separator + System.getProperty(SYS_PROP_LICENSE_FILENAME, LICENSE_FILENAME);
}
|
0
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime
|
java-sources/ai/h2o/mojo2-runtime-api/2.8.9/ai/h2o/mojos/runtime/lic/LicenseException.java
|
package ai.h2o.mojos.runtime.lic;
/**
* A generic license problem.
*/
public class LicenseException extends Exception {
public LicenseException(String msg, Exception e) {
super(msg, e, true, false);
}
@Override
public String toString() {
return super.getMessage();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.