index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex/genmodel
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex/genmodel/utils/MathUtils.java
|
package hex.genmodel.utils;
/**
* Copied (partially) from water.util.MathUtils
*/
public class MathUtils {
/**
* Euler–Mascheroni constant (also called Euler's constant)
*/
public static final double EULER_MASCHERONI_CONSTANT = 0.5772156649;
// Section 4.2: Error bound on recursive sum from Higham, Accuracy and Stability of Numerical Algorithms, 2nd Ed
// |E_n| <= (n-1) * u * \sum_i^n |x_i| + P(u^2)
public static boolean equalsWithinRecSumErr(double actual, double expected, int n, double absum) {
return Math.abs(actual - expected) <= (n-1) * Math.ulp(actual) * absum;
}
/**
* Fast calculation of log base 2 for integers.
* @return log base 2 of n
*/
public static int log2(int n) {
if (n <= 0) throw new IllegalArgumentException();
return 31 - Integer.numberOfLeadingZeros(n);
}
public static int log2(long n) {
return 63 - Long.numberOfLeadingZeros(n);
}
public static int combinatorial(int top, int bottom) {
int denom = 1;
int numer = 1;
for (int index = 1; index <= bottom; index++) {
numer *= (top - index + 1);
denom *= index;
}
return (numer/denom);
}
/**
* Compute harmonic number estimated by natural logarithm
*/
public static double harmonicNumberEstimation(long value) {
if (value <= 0)
return 0;
return Math.log(value) + EULER_MASCHERONI_CONSTANT;
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex/genmodel
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex/genmodel/utils/ParseUtils.java
|
package hex.genmodel.utils;
/**
* Helper function for parsing the serialized model.
*/
public class ParseUtils {
public static double[] parseArrayOfDoubles(String input) {
if (!(input.startsWith("[") && input.endsWith("]")))
throw new NumberFormatException("Array should be enclosed in square brackets");
String[] parts = input.substring(1, input.length()-1).split(",");
double[] res = new double[parts.length];
for (int i = 0; i < parts.length; i++)
res[i] = Double.parseDouble(parts[i].trim());
return res;
}
public static long[] parseArrayOfLongs(String input) {
if (!(input.startsWith("[") && input.endsWith("]")))
throw new NumberFormatException("Array should be enclosed in square brackets");
String[] parts = input.substring(1, input.length()-1).split(",");
long[] res = new long[parts.length];
for (int i = 0; i < parts.length; i++)
res[i] = Long.parseLong(parts[i].trim());
return res;
}
public static int[] parseArrayOfInts(String input) {
if (!(input.startsWith("[") && input.endsWith("]")))
throw new NumberFormatException("Array should be enclosed in square brackets");
String[] parts = input.substring(1, input.length()-1).split(",");
int[] res = new int[parts.length];
for (int i = 0; i < parts.length; i++)
res[i] = Integer.parseInt(parts[i].trim());
return res;
}
// return value type and defValue type don't match, parsed result has different type
// e.g. when using new Long[0] as default value to obtain result of type Long[], parsed value is of type long[]
// when using new long[0] as default value to obtain result of type long[], parsed value is of type int[]
public static Object tryParse(String input, Object defVal) {
if (input.equals("null")) return defVal;
if (input.equals("true")) return true;
if (input.equals("false")) return false;
if (defVal != null && !(defVal.getClass().isArray())) {
if (defVal instanceof Boolean) // parse according to data type of defVal
return Boolean.valueOf(input);
else if (defVal instanceof Long)
return Long.valueOf(input);
else if (defVal instanceof Integer)
return Integer.valueOf(input);
else if (defVal instanceof Float)
return Float.valueOf(input);
else if (defVal instanceof Double)
return Double.valueOf(input);
}
if (defVal != null && (defVal.getClass().isArray())) {
if (defVal instanceof Long[])
return parseArrayOfLongs(input);
else if (defVal instanceof Integer[])
return parseArrayOfInts(input);
else if (defVal instanceof Double[])
return parseArrayOfDoubles(input);
}
if ("[]".equals(input) && (defVal != null) && defVal.getClass().isArray())
return defVal;
try { return Integer.parseInt(input); }
catch (NumberFormatException e) {
if ((defVal instanceof Number) && !(defVal instanceof Double || defVal instanceof Float || defVal instanceof Long))
throw e; // integer number expected but couldn't be parsed
}
try { return Long.parseLong(input); }
catch (NumberFormatException e) {
if (defVal instanceof Number && !(defVal instanceof Double))
throw e; // number expected but couldn't be parsed
}
try { return Double.parseDouble(input); }
catch (NumberFormatException e) {
if (defVal instanceof Number)
throw e; // number expected but couldn't be parsed
}
try { return parseArrayOfInts(input); }
catch (NumberFormatException e) {
if (defVal instanceof int[]) throw e; // int array expected
}
try { return parseArrayOfLongs(input); }
catch (NumberFormatException e) {
if (defVal instanceof long[]) throw e; // long array expected
}
try { return parseArrayOfDoubles(input); }
catch (NumberFormatException e) {
if (defVal instanceof double[]) throw e; // double array expected
}
return input;
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex/genmodel
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex/genmodel/utils/StringEscapeUtils.java
|
package hex.genmodel.utils;
import java.io.StringWriter;
public class StringEscapeUtils {
/**
* Escapes new line characters of a given string.
* It also escapes the forward slash characters.
* @param str string to be escaped
* @return escaped string
*/
public static String escapeNewlines(String str) {
final int len = str.length();
StringWriter out = new StringWriter(len * 2);
for (int i = 0; i < len; i++) {
char c = str.charAt(i);
switch (c) {
case '\\':
out.write('\\');
out.write('\\');
break;
case '\n':
out.write('\\');
out.write('n');
break;
case '\r':
out.write('\\');
out.write('r');
break;
default:
out.write(c);
}
}
return out.toString();
}
/**
* Inverse function to {@see escapeNewlines}
* @param str escaped string
* @return unescaped
*/
public static String unescapeNewlines(String str) {
boolean hadSlash = false;
final int len = str.length();
StringWriter out = new StringWriter(len);
for (int i = 0; i < len; i++) {
char c = str.charAt(i);
if (hadSlash) {
switch (c) {
case 'n':
out.write('\n');
break;
case 'r':
out.write('\r');
break;
case '\\':
out.write('\\');
break;
default:
out.write(c);
}
hadSlash = false;
} else {
if (c == '\\')
hadSlash = true;
else
out.write(c);
}
}
return out.toString();
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/package-info.java
|
/**
* Shared code between the H2O runtime and generated POJO models.
*/
package water;
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/genmodel/AbstractBuildVersion.java
|
package water.genmodel;
abstract public class AbstractBuildVersion {
abstract public String branchName();
abstract public String lastCommitHash();
abstract public String describe();
abstract public String projectVersion();
abstract public String compiledOn();
abstract public String compiledBy();
@Override public String toString() {
return "H2O-GENMODEL v"+projectVersion()+ " ("+branchName()+" - "+lastCommitHash()+")";
}
public String buildNumber() {
String pv = projectVersion();
if (pv.equals(UNKNOWN_VERSION_MARKER)) {
return UNKNOWN_VERSION_MARKER;
}
String[] split_pv = pv.split("\\.");
String bn = split_pv[split_pv.length-1];
return(bn);
}
/** Dummy version of H2O. */
private static final String UNKNOWN_VERSION_MARKER = "(unknown)";
public static final AbstractBuildVersion UNKNOWN_VERSION = new AbstractBuildVersion() {
@Override public String projectVersion() { return UNKNOWN_VERSION_MARKER; }
@Override public String lastCommitHash() { return UNKNOWN_VERSION_MARKER; }
@Override public String describe() { return UNKNOWN_VERSION_MARKER; }
@Override public String compiledOn() { return UNKNOWN_VERSION_MARKER; }
@Override public String compiledBy() { return UNKNOWN_VERSION_MARKER; }
@Override public String branchName() { return UNKNOWN_VERSION_MARKER; }
};
private String getValue(String name) {
switch (name) {
case "projectVersion":
return projectVersion();
case "lastCommitHash":
return lastCommitHash();
case "describe":
return describe();
case "compiledOn":
return compiledOn();
case "compiledBy":
return compiledBy();
case "branchName":
return branchName();
default:
return null;
}
}
public static AbstractBuildVersion getBuildVersion() {
AbstractBuildVersion abv = AbstractBuildVersion.UNKNOWN_VERSION;
try {
Class klass = Class.forName("water.genmodel.BuildVersion");
java.lang.reflect.Constructor constructor = klass.getConstructor();
abv = (AbstractBuildVersion) constructor.newInstance();
} catch (Exception ignore) { }
return abv;
}
public static void main(String[] args) {
if (args.length == 0) {
args = new String[]{"projectVersion"};
}
AbstractBuildVersion buildVersion = getBuildVersion();
System.out.print(buildVersion.getValue(args[0]));
for (int i = 1; i < args.length; i++) {
System.out.print(' ');
System.out.print(buildVersion.getValue(args[i]));
}
System.out.println();
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/genmodel/BuildVersion.java
|
package water.genmodel;
public class BuildVersion extends AbstractBuildVersion {
public String branchName() { return "rel-3.46.0"; }
public String lastCommitHash() { return "479065fe4d09bf8f364b6cab45b54f9324ee1c1d"; }
public String describe() { return "479065fe4d"; }
public String projectVersion() { return "3.46.0.7"; }
public String compiledOn() { return "2025-03-27 16:00:19"; }
public String compiledBy() { return "root"; }
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/genmodel/IGeneratedModel.java
|
package water.genmodel;
import hex.genmodel.CategoricalEncoding;
/**
* A generic interface to access generated models.
*/
public interface IGeneratedModel {
/** Returns model's unique identifier. */
public String getUUID();
/** Returns number of columns used as input for training (i.e., exclude response and offset columns). */
public int getNumCols();
/** The names of columns used in the model. It contains names of input columns and a name of response column. */
public String[] getNames();
/** The original names of columns used in the model. It contains names of input columns and a name of response column. */
public String[] getOrigNames();
/** The name of the response column. */
@Deprecated
public String getResponseName();
/** The name of the offset column. */
public String getOffsetName();
/** Returns an index of the response column inside getDomains(). */
public int getResponseIdx();
/** Get number of classes in in given column.
* Return number greater than zero if the column is categorical
* or -1 if the column is numeric. */
public int getNumClasses(int i);
/** Return a number of classes in response column.
*
* @return number of response classes
* @throws java.lang.UnsupportedOperationException if called on a non-classifier model.
*/
public int getNumResponseClasses();
public CategoricalEncoding getCategoricalEncoding();
/** @return true if this model represents a classifier, else it is used for regression. */
public boolean isClassifier();
/** @return true if this model represents an AutoEncoder. */
public boolean isAutoEncoder();
/** Gets domain of given column.
* @param name column name
* @return return domain for given column or null if column is numeric.
*/
public String[] getDomainValues(String name);
/**
* Returns domain values for i-th column.
* @param i index of column
* @return domain for given categorical column or null if columns contains numeric value
*/
public String[] getDomainValues(int i);
/** Returns domain values for all columns including response column. */
public String[][] getDomainValues();
/** Returns original domain values for all columns including response column. */
public String[][] getOrigDomainValues();
/** Returns original Eigen encoder projections array for all columns. */
public double[] getOrigProjectionArray();
/** Returns index of column with give name or -1 if column is not found. */
public int getColIdx(String name);
/** Maps given column's categorical to integer used by this model.
* Returns -1 if mapping is not found. */
public int mapEnum(int colIdx, String categoricalValue);
/**
* Returns the expected size of preds array which is passed to `predict(double[], float[])` function.
* @return expected size of preds array
*/
public int getPredsSize();
/** Predict the given row and return prediction.
*
* @param data row holding the data. Ordering should follow ordering of columns returned by getNames()
* @param preds allocated array to hold a prediction
* @return returned preds parameter filled by prediction
* @deprecated use method IGenModel#score0
@Deprecated
public float[] predict(double[] data, float[] preds);
*/
/** Predict the given row and return prediction using given number of iterations (e.g., number of trees from forest).
*
* @param data row holding the data. Ordering should follow ordering of columns returned by getNames()
* @param preds allocated array to hold a prediction
* @param maxIters maximum number of iterations to use during predicting process
* @return returned preds parameter filled by prediction
public float[] predict(double[] data, float[] preds, int maxIters);
*/
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/genmodel/package-info.java
|
/**
* Deprecated (see hex.genmodel instead).
*/
package water.genmodel;
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/H2OPredictor.java
|
package water.util;
import com.google.gson.Gson;
import com.google.gson.JsonSyntaxException;
import com.google.gson.reflect.TypeToken;
import hex.genmodel.GenModel;
import hex.genmodel.easy.exception.PredictException;
import hex.genmodel.easy.prediction.AbstractPrediction;
import hex.genmodel.easy.EasyPredictModelWrapper;
import hex.genmodel.easy.RowData;
import hex.genmodel.MojoModel;
import java.io.*;
import java.lang.reflect.Type;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
/**
* Created by magnus on 5/5/16.
*/
public class H2OPredictor {
private static final boolean DEBUG = false;
private static boolean useLabels = false;
private String[] labels = null;
private EasyPredictModelWrapper model = null;
private static final Gson gson = new Gson();
private final Type MapType = new TypeToken<Map<String, Object>>(){}.getType();
public H2OPredictor(String ojoFileName, String modelName) {
if (DEBUG) System.out.printf("init ojoFileName %s modelName %s\n", ojoFileName, modelName);
try {
if (ojoFileName == null)
throw new Exception("file name can't be null");
else if (ojoFileName.endsWith(".jar")) {
loadPojo(ojoFileName, modelName);
}
else if (ojoFileName.endsWith(".zip"))
loadMojo(ojoFileName);
else
throw new Exception("unknown model archive type");
if (useLabels)
labels = model.getResponseDomainValues();
}
catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
}
private GenModel loadClassFromJar(String jarFileName, String modelName) throws Exception {
if (DEBUG) System.out.println("jar " + jarFileName + " model " + modelName);
if (!new File(jarFileName).isFile()) {
throw new FileNotFoundException("Can't read " + jarFileName);
}
try {
URL url = new File(jarFileName).toURI().toURL();
ClassLoader loader = URLClassLoader.newInstance(
new URL[]{url},
getClass().getClassLoader()
);
String packagePrefix = "";
String className = packagePrefix + modelName;
Class<?> clazz = loader.loadClass(className);
Class<? extends GenModel> modelClass = clazz.asSubclass(GenModel.class);
return modelClass.newInstance();
}
catch (MalformedURLException e) {
throw new Exception("Can't use Jar file" + jarFileName);
}
catch (ClassNotFoundException e) {
throw new Exception("Can't find model " + modelName + " in jar file " + jarFileName);
}
catch (InstantiationException e) {
throw new Exception("Can't find model " + modelName + " in jar file " + jarFileName);
}
catch (IllegalAccessException e) {
throw new Exception("Can't find model " + modelName + " in jar file " + jarFileName);
}
}
private void loadPojo(String jarFileName, String modelName)
throws Exception {
GenModel rawModel = loadClassFromJar(jarFileName, modelName);
model = new EasyPredictModelWrapper(rawModel);
}
private void loadMojo(String zipFileName)
throws Exception {
GenModel rawModel = MojoModel.load(zipFileName);
model = new EasyPredictModelWrapper(rawModel);
}
private RowData jsonToRowData(String json) {
try {
return gson.fromJson(json, RowData.class);
}
catch (JsonSyntaxException e) {
throw new JsonSyntaxException("Malformed JSON");
}
}
private RowData[] jsonToRowDataArray(String json) {
try {
return gson.fromJson(json, RowData[].class);
}
catch (JsonSyntaxException e) {
throw new JsonSyntaxException("Malformed JSON Array");
}
}
private String predictRow(RowData row) throws PredictException {
if (model == null)
throw new PredictException("No model loaded");
if (gson == null)
throw new PredictException("Gson not available");
if (row == null)
throw new PredictException("No row data");
AbstractPrediction pr = model.predict(row);
String json = gson.toJson(pr);
if (useLabels) {
Map<String, Object> map = gson.fromJson(json, MapType);
map.put("responseDomainValues", labels);
json = gson.toJson(map);
}
return json;
}
public static String predict3(String ojoFileName, String modelName, String jsonArgs) {
if (DEBUG)
System.out.printf("predict3 ojoFileName %s modelName %s jsonArgs %s\n", ojoFileName, modelName, jsonArgs);
try {
H2OPredictor p = new H2OPredictor(ojoFileName, modelName);
if (ojoFileName == null)
throw new Exception("file name can't be null");
else if (ojoFileName.endsWith(".jar"))
p.loadPojo(ojoFileName, modelName);
else if (ojoFileName.endsWith(".zip"))
p.loadMojo(ojoFileName);
else
throw new Exception("unknown model archive type");
if (jsonArgs == null || jsonArgs.length() == 0)
throw new Exception("empty json argument");
// check if argument is a file name or json
char first = jsonArgs.trim().charAt(0);
boolean isJson = first == '{' || first == '[';
if (DEBUG) {
System.out.println("first " + first);
System.out.println("isJson " + isJson);
}
if (!isJson) {
// argument is a file name
byte[] bytes = readFile(jsonArgs);
jsonArgs = new String(bytes);
first = jsonArgs.trim().charAt(0);
isJson = first == '{' || first == '[';
}
if (DEBUG) System.out.println("jsonArgs " + jsonArgs);
String result = "";
if (first == '[') {
RowData[] rows = p.jsonToRowDataArray(jsonArgs);
result += "[ ";
for (RowData row : rows) {
if (DEBUG) System.out.println("rowdata\t" + row);
if (!result.trim().endsWith("["))
result += ", ";
result += p.predictRow(row);
}
result += " ]";
}
else {
RowData row = p.jsonToRowData(jsonArgs);
if (DEBUG) System.out.println("rowdata\t" + row);
result = p.predictRow(row);
}
return result;
}
catch (final Exception e) {
Map<String, String> map = new HashMap<String, String>();
map.put("error", stackTraceToString(e));
String s = gson.toJson(map);
return s;
}
}
public String pred(String jsonArgs) {
try {
return predictRow(jsonToRowData(jsonArgs));
}
catch (Exception e) {
return "{ \"error\": \"" + stackTraceToString(e) + "\" }";
}
}
public static String predict2(String ojoFileName, String jsonArgs) {
String modelName = ojoFileName.replace(".zip", "").replace(".jar", "");
int index = modelName.lastIndexOf(File.separatorChar);
if (index != -1) modelName = modelName.substring(index + 1);
return predict3(ojoFileName, modelName, jsonArgs);
}
private static byte[] readFile(String filePath) throws IOException {
StringBuffer fileData = new StringBuffer();
BufferedReader reader = null;
try {
reader = new BufferedReader(new FileReader(filePath));
char[] buf = new char[1024];
int numRead = 0;
while ((numRead = reader.read(buf)) != -1) {
String readData = String.valueOf(buf, 0, numRead);
fileData.append(readData);
}
}
finally{
if (reader != null)
reader.close();
}
return fileData.toString().getBytes();
}
private static String stackTraceToString(Throwable e) {
ByteArrayOutputStream os = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(os);
e.printStackTrace(ps);
String s = os.toString();
try {
ps.close();
os.close();
}
catch (IOException e1) {
return "Can't get stack trace from throwable " + e.getMessage();
}
return s;
}
public static void main(String[] args) {
if (DEBUG) System.out.println("args\t" + Arrays.toString(args));
// -l option means add labels to output
if (args.length > 0 && args[0].equals("-l")) {
useLabels = true;
args = Arrays.copyOfRange(args, 1, args.length);
}
String result = "";
if (args.length == 2)
result = predict2(args[0], args[1].replaceAll("\\\\", ""));
else if (args.length == 3)
result = predict3(args[0], args[1], args[2].replaceAll("\\\\", ""));
else
result = "{ \"error\": \"Neeed 2 or 3 args have " + args.length + ", \"usage\": \"mojoFile jsonString or: jarFile modelName jsonString\" } ";
System.out.println(result);
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/JavaVersionUtils.java
|
package water.util;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public enum JavaVersionUtils {
JAVA_VERSION, // current version
JAVA_8(8), // mostly for tests to check differences in behaviors
JAVA_9(9);
public static final int UNKNOWN = -1;
private final int majorVersion;
JavaVersionUtils() {
this(System.getProperty("java.version"));
}
JavaVersionUtils(String javaVersion) {
this(parseMajor(javaVersion));
}
JavaVersionUtils(int majorVersion) {
this.majorVersion = majorVersion;
}
public int getMajor() {
return majorVersion;
}
public boolean isKnown() {
return majorVersion!=UNKNOWN;
}
static int parseMajor(String version) {
if(version!=null) {
final Pattern pattern = Pattern.compile("1\\.([0-9]*).*|([0-9][0-9]*).*");
final Matcher matcher = pattern.matcher(version);
if(matcher.matches()) {
return Integer.parseInt(matcher.group(matcher.group(1)!=null?1:2));
}
}
return UNKNOWN;
}
/**
*
* @return True if current Java version uses unified logging (JEP 158), otherwise false.
*/
public boolean useUnifiedLogging(){
// Unified logging enabled since version 9, enforced in version 10.
return isKnown() && getMajor() >= 9;
}
public String getVerboseGCFlag() {
return useUnifiedLogging() ? "-Xlog:gc=info" : "-verbose:gc";
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/ModelUtils.java
|
package water.util;
import java.util.Arrays;
import java.util.Random;
/**
* Shared static code to support modeling, prediction, and scoring.
*
* <p>Used by interpreted models as well as by generated model code.</p>
*
* <p><strong>WARNING:</strong> The class should have no other H2O dependencies
* since it is provided for generated code as h2o-model.jar which contains
* only a few files.</p>
*
*/
public class ModelUtils {
/**
* Sample out-of-bag rows with given rate with help of given sampler.
* It returns array of sampled rows. The first element of array contains a number
* of sampled rows. The returned array can be larger than number of returned sampled
* elements.
*
* @param nrows number of rows to sample from.
* @param rate sampling rate
* @param sampler random "dice"
* @return an array contains numbers of sampled rows. The first element holds a number of sampled rows. The array length
* can be greater than number of sampled rows.
*/
public static int[] sampleOOBRows(int nrows, float rate, Random sampler) {
return sampleOOBRows(nrows, rate, sampler, new int[2+Math.round((1f-rate)*nrows*1.2f+0.5f)]);
}
/**
* In-situ version of {@link #sampleOOBRows(int, float, Random)}.
*
* @param oob an initial array to hold sampled rows. Can be internally reallocated.
* @return an array containing sampled rows.
*
* @see #sampleOOBRows(int, float, Random)
*/
public static int[] sampleOOBRows(int nrows, float rate, Random sampler, int[] oob) {
int oobcnt = 0; // Number of oob rows
Arrays.fill(oob, 0);
for(int row = 0; row < nrows; row++) {
if (sampler.nextFloat() >= rate) { // it is out-of-bag row
oob[1+oobcnt++] = row;
if (1+oobcnt>=oob.length) oob = Arrays.copyOf(oob, Math.round(1.2f*nrows+0.5f)+2);
}
}
oob[0] = oobcnt;
return oob;
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/ParseTime.java
|
package water.util;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.DateTimeFormatterBuilder;
public class ParseTime {
/**
* Factory to create a formatter from a strptime pattern string.
* This models the commonly supported features of strftime from POSIX
* (where it can).
* <p>
* The format may contain locale specific output, and this will change as
* you change the locale of the formatter.
* Call DateTimeFormatter.withLocale(Locale) to switch the locale.
* For example:
* <pre>
* DateTimeFormat.forPattern(pattern).withLocale(Locale.FRANCE).print(dt);
* </pre>
*
* @param pattern pattern specification
* @return the formatter
* @throws IllegalArgumentException if the pattern is invalid
*/
public static DateTimeFormatter forStrptimePattern(String pattern) {
if (pattern == null || pattern.length() == 0)
throw new IllegalArgumentException("Empty date time pattern specification");
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
parseToBuilder(builder, pattern);
DateTimeFormatter formatter = builder.toFormatter();
return formatter;
}
/**
* Parses the given pattern and appends the rules to the given
* DateTimeFormatterBuilder. See strptime man page for valid patterns.
*
* @param pattern pattern specification
* @throws IllegalArgumentException if the pattern is invalid
*/
private static void parseToBuilder(DateTimeFormatterBuilder builder, String pattern) {
int length = pattern.length();
int[] indexRef = new int[1];
for (int i=0; i<length; i++) {
indexRef[0] = i;
String token = parseToken(pattern, indexRef);
i = indexRef[0];
int tokenLen = token.length();
if (tokenLen == 0) {
break;
}
char c = token.charAt(0);
if (c == '%' && token.charAt(1) != '%') {
c = token.charAt(1);
switch(c) {
case 'a':
builder.appendDayOfWeekShortText();
break;
case 'A':
builder.appendDayOfWeekText();
break;
case 'b':
case 'h':
builder.appendMonthOfYearShortText();
break;
case 'B':
builder.appendMonthOfYearText();
break;
case 'c':
builder.appendDayOfWeekShortText();
builder.appendLiteral(' ');
builder.appendMonthOfYearShortText();
builder.appendLiteral(' ');
builder.appendDayOfMonth(2);
builder.appendLiteral(' ');
builder.appendHourOfDay(2);
builder.appendLiteral(':');
builder.appendMinuteOfHour(2);
builder.appendLiteral(':');
builder.appendSecondOfMinute(2);
builder.appendLiteral(' ');
builder.appendYear(4,4);
break;
case 'C':
builder.appendCenturyOfEra(1,2);
break;
case 'd':
builder.appendDayOfMonth(2);
break;
case 'D':
builder.appendMonthOfYear(2);
builder.appendLiteral('/');
builder.appendDayOfMonth(2);
builder.appendLiteral('/');
builder.appendTwoDigitYear(2019);
break;
case 'e':
builder.appendOptional(DateTimeFormat.forPattern("' '").getParser());
builder.appendDayOfMonth(2);
break;
case 'F':
builder.appendYear(4,4);
builder.appendLiteral('-');
builder.appendMonthOfYear(2);
builder.appendLiteral('-');
builder.appendDayOfMonth(2);
break;
case 'g':
case 'G':
break; //for output only, accepted and ignored for input
case 'H':
builder.appendHourOfDay(2);
break;
case 'I':
builder.appendClockhourOfHalfday(2);
break;
case 'j':
builder.appendDayOfYear(3);
break;
case 'k':
builder.appendOptional(DateTimeFormat.forPattern("' '").getParser());
builder.appendHourOfDay(2);
break;
case 'l':
builder.appendOptional(DateTimeFormat.forPattern("' '").getParser());
builder.appendClockhourOfHalfday(2);
break;
case 'm':
builder.appendMonthOfYear(2);
break;
case 'M':
builder.appendMinuteOfHour(2);
break;
case 'n':
break;
case 'p':
builder.appendHalfdayOfDayText();
break;
case 'r':
builder.appendClockhourOfHalfday(2);
builder.appendLiteral(':');
builder.appendMinuteOfHour(2);
builder.appendLiteral(':');
builder.appendSecondOfMinute(2);
builder.appendLiteral(' ');
builder.appendHalfdayOfDayText();
break;
case 'R':
builder.appendHourOfDay(2);
builder.appendLiteral(':');
builder.appendMinuteOfHour(2);
break;
case 'S':
builder.appendSecondOfMinute(2);
break;
case 't':
break;
case 'T':
builder.appendHourOfDay(2);
builder.appendLiteral(':');
builder.appendMinuteOfHour(2);
builder.appendLiteral(':');
builder.appendSecondOfMinute(2);
break;
/* case 'U': //FIXME Joda does not support US week start (Sun), this will be wrong
builder.appendWeekOfYear(2);
break;
case 'u':
builder.appendDayOfWeek(1);
break;*/
case 'V':
break; //accepted and ignored
/* case 'w': //FIXME Joda does not support US week start (Sun), this will be wrong
builder.appendDayOfWeek(1);
break;
case 'W':
builder.appendWeekOfYear(2);
break;*/
case 'x':
builder.appendTwoDigitYear(2019);
builder.appendLiteral('/');
builder.appendMonthOfYear(2);
builder.appendLiteral('/');
builder.appendDayOfMonth(2);
break;
/* case 'X': //Results differ between OSX and Linux
builder.appendHourOfDay(2);
builder.appendLiteral(':');
builder.appendMinuteOfHour(2);
builder.appendLiteral(':');
builder.appendSecondOfMinute(2);
break;*/
case 'y': //POSIX 2004 & 2008 says 69-99 -> 1900s, 00-68 -> 2000s
builder.appendTwoDigitYear(2019);
break;
case 'Y':
builder.appendYear(4,4);
break;
case 'z':
builder.appendTimeZoneOffset(null, "z", false, 2, 2);
break;
case 'Z':
break; //for output only, accepted and ignored for input
default: // No match, ignore
builder.appendLiteral('\'');
builder.appendLiteral(token);
throw new IllegalArgumentException(token + "is not acceptted as a parse token, treating as a literal");
}
} else {
if (c == '\'') {
String sub = token.substring(1);
if (sub.length() > 0) {
// Create copy of sub since otherwise the temporary quoted
// string would still be referenced internally.
builder.appendLiteral(new String(sub));
}
} else throw new IllegalArgumentException("Unexpected token encountered parsing format string:" + c);
}
}
}
/**
* Parses an individual token.
*
* @param pattern the pattern string
* @param indexRef a single element array, where the input is the start
* location and the output is the location after parsing the token
* @return the parsed token
*/
private static String parseToken(String pattern, int[] indexRef) {
StringBuilder buf = new StringBuilder();
int i = indexRef[0];
int length = pattern.length();
char c = pattern.charAt(i);
if (c == '%' && i + 1 < length && pattern.charAt(i+1) != '%') {
//Grab pattern tokens
c = pattern.charAt(++i);
//0 is ignored for input, and this ignores alternative religious eras
if ((c == '0' || c == 'E') && i + 1 >= length) c = pattern.charAt(++i);
buf.append('%');
buf.append(c);
} else { // Grab all else as text
buf.append('\''); // mark literals with ' in first place
buf.append(c);
for (i++; i < length;i++) {
c = pattern.charAt(i);
if (c == '%' ) { // consume literal % otherwise break
if (i + 1 < length && pattern.charAt(i + 1) == '%') i++;
else { i--; break; }
}
buf.append(c);
}
}
indexRef[0] = i;
return buf.toString();
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/package-info.java
|
/**
* Deprecated (see hex.util instead).
*/
package water.util;
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison/string/ExactComparator.java
|
package water.util.comparison.string;
/*
Copyright 2023 Lars Marius Garshol
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Original code: https://github.com/larsga/Duke/blob/duke-1.2/src/main/java/no/priv/garshol/duke/comparators/ExactComparator.java
public class ExactComparator implements StringComparator {
public boolean isTokenized() {
return false;
}
public double compare(String v1, String v2) {
return v1.equals(v2) ? 1.0 : 0.0;
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison/string/H2OJaroWinklerComparator.java
|
/**
* This class is equal to
* https://github.com/larsga/Duke/blob/master/duke-core/src/main/java/no/priv/garshol/duke/comparators/JaroWinkler.java
*
* however it is not included in the last available Duke release. Once the new Duke version is release with this
* fixed class in, we can remove this class.
*
*/
package water.util.comparison.string;
import java.util.ArrayList;
import java.util.List;
/**
* An implementation of the Jaro-Winkler string similarity measure.
* The implementation follows the description in the paper "Evaluating
* String Comparator Performance for Record Linkage", by William
* E. Yancey, RESEARCH REPORT SERIES (Statistics #2005-05), US Bureau
* of the Census. http://www.census.gov/srd/papers/pdf/rrs2005-05.pdf
*/
public class H2OJaroWinklerComparator implements StringComparator {
public double compare(String s1, String s2) {
return similarity(s1, s2);
}
public boolean isTokenized() {
return true; // I guess?
}
/**
* Returns normalized score, with 0.0 meaning no similarity at all,
* and 1.0 meaning full equality.
*/
public static double similarity(String s1, String s2) {
if (s1.equals(s2))
return 1.0;
// ensure that s1 is shorter than or same length as s2
if (s1.length() > s2.length()) {
String tmp = s2;
s2 = s1;
s1 = tmp;
}
/*
* this list of Boolean values is used for avoiding duplicated count of
* common characters in S2
*/
List<Boolean> isCommonCharInS2 = new ArrayList<Boolean>();
for (int i=0; i<s2.length(); i++) {
isCommonCharInS2.add(false);
}
// (1) find the number of characters the two strings have in common.
// note that matching characters can only be half the length of the
// longer string apart.
int maxdist = (int) Math.floor(s2.length() / 2) ;
int c = 0; // count of common characters
int t = 0; // count of transpositions
int prevpos = -1;
for (int ix = 0; ix < s1.length(); ix++) {
char ch = s1.charAt(ix);
// now try to find it in s2
for (int ix2 = Math.max(0, ix - maxdist);
ix2 < Math.min(s2.length(), ix + maxdist);
ix2++) {
if (ch == s2.charAt(ix2) && !isCommonCharInS2.get(ix2)) {
c++; // we found a common character
isCommonCharInS2.set(ix2, true);
if (prevpos != -1 && ix2 < prevpos)
t++; // moved back before earlier
prevpos = ix2;
break;
}
}
}
// we don't divide t by 2 because as far as we can tell, the above
// code counts transpositions directly.
// System.out.println("c: " + c);
// System.out.println("t: " + t);
// System.out.println("c/m: " + (c / (double) s1.length()));
// System.out.println("c/n: " + (c / (double) s2.length()));
// System.out.println("(c-t)/c: " + ((c - t) / (double) c));
// we might have to give up right here
if (c == 0)
return 0.0;
// first compute the score
double score = ((c / (double) s1.length()) +
(c / (double) s2.length()) +
((c - t) / (double) c)) / 3.0;
// (2) common prefix modification
int p = 0; // length of prefix
int last = Math.min(4, s1.length());
for (; p < last && s1.charAt(p) == s2.charAt(p); p++)
;
score = score + ((p * (1 - score)) / 10);
// (3) longer string adjustment
// I'm confused about this part. Winkler's original source code includes
// it, and Yancey's 2005 paper describes it. However, Winkler's list of
// test cases in his 2006 paper does not include this modification. So
// is this part of Jaro-Winkler, or is it not? Hard to say.
//
// if (s1.length() >= 5 && // both strings at least 5 characters long
// c - p >= 2 && // at least two common characters besides prefix
// c - p >= ((s1.length() - p) / 2)) // fairly rich in common chars
// {
// System.out.println("ADJUSTED!");
// score = score + ((1 - score) * ((c - (p + 1)) /
// ((double) ((s1.length() + s2.length())
// - (2 * (p - 1))))));
// }
// (4) similar characters adjustment
// the same holds for this as for (3) above.
return score;
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison/string/JaccardIndexComparator.java
|
package water.util.comparison.string;
/*
Copyright 2023 Lars Marius Garshol
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Original code: https://github.com/larsga/Duke/blob/duke-1.2/src/main/java/no/priv/garshol/duke/comparators/JaccardIndexComparator.java
public class JaccardIndexComparator implements StringComparator {
private StringComparator subcomp;
public JaccardIndexComparator() {
this.subcomp = new ExactComparator();
}
public void setComparator(StringComparator comp) {
this.subcomp = comp;
}
public boolean isTokenized() {
return true;
}
public double compare(String s1, String s2) {
if (s1.equals(s2))
return 1.0;
// tokenize
String[] t1 = StringUtils.split(s1);
String[] t2 = StringUtils.split(s2);
// FIXME: we assume t1 and t2 do not have internal duplicates
// ensure that t1 is shorter than or same length as t2
if (t1.length > t2.length) {
String[] tmp = t2;
t2 = t1;
t1 = tmp;
}
// find best matches for each token in t1
double intersection = 0;
double union = t1.length + t2.length;
for (int ix1 = 0; ix1 < t1.length; ix1++) {
double highest = 0;
for (int ix2 = 0; ix2 < t2.length; ix2++)
highest = Math.max(highest, subcomp.compare(t1[ix1], t2[ix2]));
// INV: the best match for t1[ix1] in t2 is has similarity highest
intersection += highest;
union -= highest; // we reduce the union by this similarity
}
return intersection / union;
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison/string/LevenshteinDistanceComparator.java
|
package water.util.comparison.string;
import static org.apache.commons.lang.math.IEEE754rUtils.min;
/**
* Non-weighted, case-sensitive Levenshtein distance implementation inspired by Simmetrics library.
* This class is a modified version of Leventshein metric from Simmetrics library.
* Double precision instead of 32-bit float is used. Dependence on Guava has been removed.
*
* Copyright H20.ai Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
**/
public class LevenshteinDistanceComparator implements StringComparator {
@Override
public boolean isTokenized() {
return true;
}
@Override
public double compare(String a, String b) {
if (a.isEmpty() && b.isEmpty()) {
return 1D;
}
return 1D - (distance(a, b) / Math.max(a.length(), b.length()));
}
/**
* Computes a case-sensitive Levenshtein distance of two strings
*
* @param a First compared instance of {@link String}
* @param b Second compared instance of {@link String}
* @return Computed distance between two given strings
*/
private double distance(final String a, final String b) {
if (a.isEmpty())
return b.length();
if (b.isEmpty())
return a.length();
if (a.equals(b))
return 0;
final int aLength = b.length();
final int bLength = a.length();
double[] v0 = new double[aLength + 1];
double[] v1 = new double[aLength + 1];
// initialize v0 (the previous row of distances)
// this row is A[0][i]: edit distance for an empty a
// the distance is just the number of characters to delete from b
for (int i = 0; i < v0.length; i++) {
v0[i] = i * 1D;
}
for (int i = 0; i < bLength; i++) {
// first element of v1 is A[i+1][0]
// edit distance is delete (i+1) chars from s to match empty b
v1[0] = (i + 1) * 1D;
for (int j = 0; j < aLength; j++) {
v1[j + 1] = min(v1[j] + 1D,
v0[j + 1] + 1D,
v0[j]
+ (a.charAt(i) == b.charAt(j) ? 0D
: 1D));
}
final double[] swap = v0;
v0 = v1;
v1 = swap;
}
// latest results was in v1 which was swapped with v0
return v0[aLength];
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison/string/LongestCommonSubstring.java
|
package water.util.comparison.string;
/*
Copyright 2023 Lars Marius Garshol
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Original code: https://github.com/larsga/Duke/blob/duke-1.2/src/main/java/no/priv/garshol/duke/comparators/LongestCommonSubstring.java
public class LongestCommonSubstring implements StringComparator {
private int minlen = 2;
private Formula formula = Formula.OVERLAP;
public double compare(String s1, String s2) {
// a couple of quick cutoffs
if (s1.equals(s2))
return 1.0;
if (Math.min(s1.length(), s2.length()) == 0)
return 0.0;
// the results of the algorithm depends on the order of the input
// strings. therefore need a sub-method for this computation
return (compare_(s1, s2) + compare_(s2, s1)) / 2.0;
}
// FIXME: speed this up by using a one-dimensional array
private double compare_(String s1, String s2) {
// before we begin, note the length of the strings
int shortlen = Math.min(s1.length(), s2.length());
int longlen = Math.max(s1.length(), s2.length());
int removed = 0; // total length of common substrings
while (true) {
// first, we identify the longest common substring
int longest = 0;
int longesti = 0;
int longestj = 0;
int[][] matrix = new int[s1.length()][s2.length()];
for (int i = 0; i < s1.length(); i++) {
for (int j = 0; j < s2.length(); j++) {
if (s1.charAt(i) == s2.charAt(j)) {
if (i == 0 || j == 0)
matrix[i][j] = 1;
else
matrix[i][j] = matrix[i - 1][j - 1] + 1;
if (matrix[i][j] > longest) {
longest = matrix[i][j];
longesti = i;
longestj = j;
}
} else
matrix[i][j] = 0;
}
}
longesti++; // this solves an off-by-one problem
longestj++; // this solves an off-by-one problem
// at this point we know the length of the longest common
// substring, and also its location, since it ends at indexes
// longesti and longestj.
if (longest < minlen)
break; // all remaining common substrings are too short, so we stop
// now we slice away the common substrings
s1 = s1.substring(0, longesti - longest) + s1.substring(longesti);
s2 = s2.substring(0, longestj - longest) + s2.substring(longestj);
removed += longest;
}
return formula.compute(removed, shortlen, longlen);
}
public boolean isTokenized() {
return true;
}
public void setMinimumLength(int minlen) {
this.minlen = minlen;
}
public int getMinimumLength() {
return this.minlen;
}
public void setFormula(Formula formula) {
this.formula = formula;
}
public Formula getFormula() {
return formula;
}
/**
* Represents the different formulas we can use to compute similarity.
*/
public enum Formula {
OVERLAP {
public double compute(int removed, int shortlen, int longlen) {
return removed / (double) shortlen;
}
}, DICE {
public double compute(int removed, int shortlen, int longlen) {
return 2*removed / (double) (shortlen + longlen);
}
}, JACCARD {
public double compute(int removed, int shortlen, int longlen) {
return removed / (double) (shortlen + longlen - removed);
}
};
public double compute(int removed, int shortlen, int longlen) {
throw new IllegalStateException("Unknown formula: " + this);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison/string/QGramComparator.java
|
package water.util.comparison.string;
import java.util.Set;
import java.util.HashSet;
/*
Copyright 2023 Lars Marius Garshol
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Original code: https://github.com/larsga/Duke/blob/duke-1.2/src/main/java/no/priv/garshol/duke/comparators/QGramComparator.java
public class QGramComparator implements StringComparator {
private Formula formula;
private Tokenizer tokenizer;
private int q;
public QGramComparator() {
this.formula = Formula.OVERLAP;
this.tokenizer = Tokenizer.BASIC;
this.q = 2;
}
public boolean isTokenized() {
return true;
}
public double compare(String s1, String s2) {
if (s1.equals(s2))
return 1.0;
Set<String> q1 = tokenizer.qgrams(s1, q);
Set<String> q2 = tokenizer.qgrams(s2, q);
if (q1.isEmpty() || q2.isEmpty())
return 0.0; // division will fail
int common = 0;
for (String gram : q1)
if (q2.contains(gram))
common++;
return formula.compute(common, q1, q2);
}
/**
* Sets the value of q, that is, the size of the q-grams.
*/
public void setQ(int q) {
this.q = q;
}
/**
* Tells the comparator what formula to use to compute the actual
* similarity.
*/
public void setFormula(Formula formula) {
this.formula = formula;
}
/**
* Tells the comparator what tokenizer to use to produce q-grams.
*/
public void setTokenizer(Tokenizer tokenizer) {
this.tokenizer = tokenizer;
}
private static String pad(String s, int q, boolean front) {
StringBuffer buf = new StringBuffer(q);
if (!front)
buf.append(s);
for (int ix = 0; ix < q - s.length(); ix++)
buf.append('.');
if (front)
buf.append(s);
return buf.toString();
}
/**
* Represents the different formulas we can use to compute similarity.
*/
public enum Formula {
OVERLAP {
public double compute(int common, Set<String> q1, Set<String> q2) {
return (double) common / Math.min((double) q1.size(), (double) q2.size());
}
}, JACCARD {
public double compute(int common, Set<String> q1, Set<String> q2) {
return (double) common / (double) (q1.size() + q2.size() - common);
}
}, DICE {
public double compute(int common, Set<String> q1, Set<String> q2) {
return (double) (2.0 * common) / (double) (q1.size() + q2.size());
}
};
public double compute(int common, Set<String> q1, Set<String> q2) {
throw new IllegalStateException("Unknown formula: " + this);
}
}
/**
* Represents the different ways we can tokenize a string into a set
* of q-grams for a given q.
*/
public enum Tokenizer {
/**
* Produces basic q-grams, so that 'gail' -> 'ga', 'ai', 'il'.
*/
BASIC {
public Set<String> qgrams(String s, int q) {
Set<String> grams = new HashSet();
for (int ix = 0; ix < s.length() - q + 1; ix++)
grams.add(s.substring(ix, ix + q));
return grams;
}
},
/**
* Produces positional q-grams, so that 'gail' -> 'ga1', 'ai2', 'il3'.
*/
POSITIONAL {
public Set<String> qgrams(String s, int q) {
Set<String> grams = new HashSet();
for (int ix = 0; ix < s.length() - q + 1; ix++)
grams.add(s.substring(ix, ix + q) + ix);
return grams;
}
},
/**
* Produces q-grams with padding, so that 'gail' -> '.g', 'ga', 'ai',
* 'il', 'l.'.
*/
ENDS {
public Set<String> qgrams(String s, int q) {
Set<String> grams = new HashSet();
for (int ix = 1; ix < q; ix++)
grams.add(pad(s.substring(0, ix), q, true));
for (int ix = 0; ix < s.length() - q + 1; ix++)
grams.add(s.substring(ix, ix + q));
for (int ix = 1; ix < q; ix++)
grams.add(pad(s.substring(s.length() - ix), q, false));
return grams;
}
};
public Set<String> qgrams(String s, int q) {
throw new IllegalStateException("Uknown tokenizer: " + this);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison/string/SoundexComparator.java
|
package water.util.comparison.string;
/*
Copyright 2023 Lars Marius Garshol
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Original code: https://github.com/larsga/Duke/blob/duke-1.2/src/main/java/no/priv/garshol/duke/comparators/SoundexComparator.java
public class SoundexComparator implements StringComparator {
// this table is keyed 0-25 (for 'a' to 'z') to the numeric value to put
// in the key. 0 means the letter is to be omitted.
private static char[] number = buildTable();
public double compare(String s1, String s2) {
if (s1.equals(s2))
return 1.0;
if (soundex(s1).equals(soundex(s2)))
return 0.9;
return 0.0;
}
public boolean isTokenized() {
return true; // I guess?
}
/**
* Produces the Soundex key for the given string.
*/
public static String soundex(String str) {
if (str.length() < 1)
return ""; // no soundex key for the empty string (could use 000)
char[] key = new char[4];
key[0] = str.charAt(0);
int pos = 1;
char prev = '0';
for (int ix = 1; ix < str.length() && pos < 4; ix++) {
char ch = str.charAt(ix);
int charno;
if (ch >= 'A' && ch <= 'Z')
charno = ch - 'A';
else if (ch >= 'a' && ch <= 'z')
charno = ch - 'a';
else
continue;
if (number[charno] != '0' && number[charno] != prev)
key[pos++] = number[charno];
prev = number[charno];
}
for ( ; pos < 4; pos++)
key[pos] = '0';
return new String(key);
}
/**
* Builds the mapping table.
*/
private static char[] buildTable() {
char[] table = new char[26];
for (int ix = 0; ix < table.length; ix++)
table[ix] = '0';
table['B' - 'A'] = '1';
table['P' - 'A'] = '1';
table['F' - 'A'] = '1';
table['V' - 'A'] = '1';
table['C' - 'A'] = '2';
table['S' - 'A'] = '2';
table['K' - 'A'] = '2';
table['G' - 'A'] = '2';
table['J' - 'A'] = '2';
table['Q' - 'A'] = '2';
table['X' - 'A'] = '2';
table['Z' - 'A'] = '2';
table['D' - 'A'] = '3';
table['T' - 'A'] = '3';
table['L' - 'A'] = '4';
table['M' - 'A'] = '5';
table['N' - 'A'] = '5';
table['R' - 'A'] = '6';
return table;
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison/string/StringComparator.java
|
package water.util.comparison.string;
/*
Copyright 2023 Lars Marius Garshol
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Original code: https://github.com/larsga/Duke/blob/duke-1.2/src/main/java/no/priv/garshol/duke/Comparator.java
public interface StringComparator {
public boolean isTokenized();
public double compare(String v1, String v2);
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison/string/StringComparatorFactory.java
|
package water.util.comparison.string;
public class StringComparatorFactory {
public static StringComparator makeComparator(String measure) {
switch (measure) {
case "jaccard":
case "JaccardIndex":
return new JaccardIndexComparator();
case "jw":
case "JaroWinkler":
return new H2OJaroWinklerComparator();
case "lv":
case "Levenshtein":
return new LevenshteinDistanceComparator();
case "lcs":
case "LongestCommonSubstring":
return new LongestCommonSubstring();
case "qgram":
case "QGram":
return new QGramComparator();
case "soundex":
case "Soundex":
return new SoundexComparator();
default:
throw new IllegalArgumentException("Unknown comparator: " + measure);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/water/util/comparison/string/StringUtils.java
|
package water.util.comparison.string;
/*
Copyright 2023 Lars Marius Garshol
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Original code: https://github.com/larsga/Duke/blob/duke-1.2/src/main/java/no/priv/garshol/duke/utils/StringUtils.java
public class StringUtils {
public static String[] split(String str) {
String[] tokens = new String[(int) (str.length() / 2) + 1];
int start = 0;
int tcount = 0;
boolean prevws = false;
int ix;
for (ix = 0; ix < str.length(); ix++) {
if (str.charAt(ix) == ' ') {
if (!prevws && ix > 0)
tokens[tcount++] = str.substring(start, ix);
prevws = true;
start = ix + 1;
} else
prevws = false;
}
if (!prevws && start != ix)
tokens[tcount++] = str.substring(start);
String[] tmp = new String[tcount];
for (ix = 0; ix < tcount; ix++)
tmp[ix] = tokens[ix];
return tmp;
}
public static String join(String[] pieces) {
StringBuilder tmp = new StringBuilder();
for (int ix = 0; ix < pieces.length; ix++) {
if (ix != 0)
tmp.append(" ");
tmp.append(pieces[ix]);
}
return tmp.toString();
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-deepwater/3.46.0.7/hex/genmodel/algos
|
java-sources/ai/h2o/h2o-genmodel-ext-deepwater/3.46.0.7/hex/genmodel/algos/deepwater/DWImageConverter.java
|
package hex.genmodel.algos.deepwater;
import hex.genmodel.GenModel;
import hex.genmodel.easy.CategoricalEncoder;
import hex.genmodel.easy.EasyPredictModelWrapper;
import hex.genmodel.easy.RowToRawDataConverter;
import hex.genmodel.easy.exception.PredictException;
import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Map;
public class DWImageConverter extends RowToRawDataConverter {
private final DeepwaterMojoModel _dwm;
DWImageConverter(DeepwaterMojoModel m, Map<String, Integer> modelColumnNameToIndexMap, Map<Integer, CategoricalEncoder> domainMap,
EasyPredictModelWrapper.ErrorConsumer errorConsumer, EasyPredictModelWrapper.Config config) {
super(m, modelColumnNameToIndexMap, domainMap, errorConsumer, config);
_dwm = m;
}
@Override
protected boolean convertValue(String columnName, Object o, CategoricalEncoder catEncoder, int targetIndex, double[] rawData) throws PredictException {
BufferedImage img = null;
if (o instanceof String) {
String s = ((String) o).trim();
// Url to an image given
boolean isURL = s.matches("^(https?|ftp|file)://[-a-zA-Z0-9+&@#/%?=~_|!:,.;]*[-a-zA-Z0-9+&@#/%=~_|]");
try {
img = isURL ? ImageIO.read(new URL(s)) : ImageIO.read(new File(s));
} catch (IOException e) {
throw new PredictException("Couldn't read image from " + s);
}
} else if (o instanceof byte[]) {
// Read the image from raw bytes
InputStream is = new ByteArrayInputStream((byte[]) o);
try {
img = ImageIO.read(is);
} catch (IOException e) {
throw new PredictException("Couldn't interpret raw bytes as an image.");
}
}
if (img != null) {
int W = _dwm._width;
int H = _dwm._height;
int C = _dwm._channels;
float[] _destData = new float[W * H * C];
try {
GenModel.img2pixels(img, W, H, C, _destData, 0, _dwm._meanImageData);
} catch (IOException e) {
throw new PredictException("Couldn't vectorize image.", e);
}
rawData = new double[_destData.length];
for (int i = 0; i < rawData.length; ++i)
rawData[i] = _destData[i];
return true;
} else
return super.convertValue(columnName, o, catEncoder, targetIndex, rawData);
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-deepwater/3.46.0.7/hex/genmodel/algos
|
java-sources/ai/h2o/h2o-genmodel-ext-deepwater/3.46.0.7/hex/genmodel/algos/deepwater/DWTextConverter.java
|
package hex.genmodel.algos.deepwater;
import hex.genmodel.GenModel;
import hex.genmodel.easy.CategoricalEncoder;
import hex.genmodel.easy.EasyPredictModelWrapper;
import hex.genmodel.easy.RowToRawDataConverter;
import hex.genmodel.easy.exception.PredictException;
import java.util.Map;
public class DWTextConverter extends RowToRawDataConverter {
DWTextConverter(GenModel m, Map<String, Integer> modelColumnNameToIndexMap, Map<Integer, CategoricalEncoder> domainMap,
EasyPredictModelWrapper.ErrorConsumer errorConsumer, EasyPredictModelWrapper.Config config) {
super(m, modelColumnNameToIndexMap, domainMap, errorConsumer, config);
}
@Override
protected boolean convertValue(String columnName, Object o, CategoricalEncoder catEncoder, int targetIndex, double[] rawData) throws PredictException {
if (o instanceof String) {
throw new PredictException("MOJO scoring for text classification is not yet implemented.");
}
return super.convertValue(columnName, o, catEncoder, targetIndex, rawData);
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-deepwater/3.46.0.7/hex/genmodel/algos
|
java-sources/ai/h2o/h2o-genmodel-ext-deepwater/3.46.0.7/hex/genmodel/algos/deepwater/DeepwaterMojoModel.java
|
package hex.genmodel.algos.deepwater;
import deepwater.backends.BackendModel;
import deepwater.backends.BackendParams;
import deepwater.backends.BackendTrain;
import deepwater.backends.RuntimeOptions;
import deepwater.datasets.ImageDataSet;
import hex.genmodel.ConverterFactoryProvidingModel;
import hex.genmodel.GenModel;
import hex.genmodel.MojoModel;
import hex.genmodel.algos.deepwater.caffe.DeepwaterCaffeBackend;
import hex.genmodel.easy.CategoricalEncoder;
import hex.genmodel.easy.EasyPredictModelWrapper;
import hex.genmodel.easy.RowToRawDataConverter;
import java.io.File;
import java.util.Map;
public class DeepwaterMojoModel extends MojoModel implements ConverterFactoryProvidingModel {
public String _problem_type;
public int _mini_batch_size;
public int _height;
public int _width;
public int _channels;
public int _nums;
public int _cats;
public int[] _catOffsets;
public double[] _normMul;
public double[] _normSub;
public double[] _normRespMul;
public double[] _normRespSub;
public boolean _useAllFactorLevels;
transient byte[] _network;
transient byte[] _parameters;
public transient float[] _meanImageData;
BackendTrain _backend; //interface provider
BackendModel _model; //pointer to C++ process
ImageDataSet _imageDataSet; //interface provider
RuntimeOptions _opts;
BackendParams _backendParams;
DeepwaterMojoModel(String[] columns, String[][] domains, String responseColumn) {
super(columns, domains, responseColumn);
}
/**
* Corresponds to `hex.DeepWater.score0()`
*/
@Override
public final double[] score0(double[] doubles, double offset, double[] preds) {
assert(doubles != null) : "doubles are null";
float[] floats;
int cats = _catOffsets == null ? 0 : _catOffsets[_cats];
if (_nums > 0) {
floats = new float[_nums + cats]; //TODO: use thread-local storage
GenModel.setInput(doubles, floats, _nums, _cats, _catOffsets, _normMul, _normSub, _useAllFactorLevels, true);
} else {
floats = new float[doubles.length];
for (int i=0; i<floats.length; ++i) {
floats[i] = (float) doubles[i] - (_meanImageData == null ? 0 : _meanImageData[i]);
}
}
float[] predFloats = _backend.predict(_model, floats);
assert(_nclasses == predFloats.length) : "nclasses " + _nclasses + " predFloats.length " + predFloats.length;
if (_nclasses > 1) {
for (int i = 0; i < predFloats.length; ++i)
preds[1 + i] = predFloats[i];
if (_balanceClasses)
GenModel.correctProbabilities(preds, _priorClassDistrib, _modelClassDistrib);
preds[0] = GenModel.getPrediction(preds, _priorClassDistrib, doubles, _defaultThreshold);
} else {
if (_normRespMul!=null && _normRespSub!=null)
preds[0] = predFloats[0] * _normRespMul[0] + _normRespSub[0];
else
preds[0] = predFloats[0];
}
return preds;
}
@Override
public double[] score0(double[] row, double[] preds) {
return score0(row, 0.0, preds);
}
static public BackendTrain createDeepWaterBackend(String backend) {
try {
// For Caffe, only instantiate if installed at the right place
File f = new File(DeepwaterCaffeBackend.CAFFE_H2O_DIR);
if (backend.equals("caffe") && f.exists() && f.isDirectory())
return new DeepwaterCaffeBackend();
if (backend.equals("mxnet"))
backend="deepwater.backends.mxnet.MXNetBackend";
else if (backend.equals("tensorflow"))
backend = "deepwater.backends.tensorflow.TensorflowBackend";
// else if (backend.equals("xgrpc"))
// backend="deepwater.backends.grpc.XGRPCBackendTrain";
return (BackendTrain) (Class.forName(backend).newInstance());
} catch (Exception ignored) {
//ignored.printStackTrace();
}
return null;
}
@Override
public RowToRawDataConverter makeConverterFactory(Map<String, Integer> modelColumnNameToIndexMap,
Map<Integer, CategoricalEncoder> domainMap,
EasyPredictModelWrapper.ErrorConsumer errorConsumer,
EasyPredictModelWrapper.Config config) {
if (_problem_type.equals("image"))
return new DWImageConverter(this, modelColumnNameToIndexMap, domainMap, errorConsumer, config);
else if (_problem_type.equals("text")) {
return new DWTextConverter(this, modelColumnNameToIndexMap, domainMap, errorConsumer, config);
}
return new RowToRawDataConverter(this, modelColumnNameToIndexMap, domainMap,
errorConsumer, config);
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-deepwater/3.46.0.7/hex/genmodel/algos
|
java-sources/ai/h2o/h2o-genmodel-ext-deepwater/3.46.0.7/hex/genmodel/algos/deepwater/DeepwaterMojoReader.java
|
package hex.genmodel.algos.deepwater;
import deepwater.backends.BackendParams;
import deepwater.backends.RuntimeOptions;
import deepwater.datasets.ImageDataSet;
import hex.genmodel.ModelMojoReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.UUID;
/**
*/
public class DeepwaterMojoReader extends ModelMojoReader<DeepwaterMojoModel> {
@Override
public String getModelName() {
return "Deep Water";
}
@Override
protected void readModelData() throws IOException {
try {
_model._network = readblob("model_network");
_model._parameters = readblob("model_params");
} catch (IOException e) {
throw new RuntimeException(e);
}
_model._backend = DeepwaterMojoModel.createDeepWaterBackend((String) readkv("backend")); // new ImageTrain(_width, _height, _channels, _deviceID, (int)parameters.getOrMakeRealSeed(), _gpu);
if (_model._backend == null) {
throw new IllegalArgumentException("Couldn't instantiate the Deep Water backend.");
}
_model._problem_type = readkv("problem_type");
_model._mini_batch_size = readkv("mini_batch_size");
_model._height = readkv("height");
_model._width = readkv("width");
_model._channels = readkv("channels");
_model._nums = readkv("nums");
_model._cats = readkv("cats");
_model._catOffsets = readkv("cat_offsets");
_model._normMul = readkv("norm_mul");
_model._normSub = readkv("norm_sub");
_model._normRespMul = readkv("norm_resp_mul");
_model._normRespSub = readkv("norm_resp_sub");
_model._useAllFactorLevels = readkv("use_all_factor_levels");
_model._imageDataSet = new ImageDataSet(_model._width, _model._height, _model._channels, _model._nclasses);
_model._opts = new RuntimeOptions();
_model._opts.setSeed(0); // ignored - not needed during scoring
_model._opts.setUseGPU((boolean)readkv("gpu"));
_model._opts.setDeviceID((int[])readkv("device_id"));
_model._backendParams = new BackendParams();
_model._backendParams.set("mini_batch_size", 1);
File file = new File(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString() + ".json");
try {
FileOutputStream os = new FileOutputStream(file.toString());
os.write(_model._network);
os.close();
_model._model = _model._backend.buildNet(_model._imageDataSet, _model._opts, _model._backendParams, _model._nclasses, file.toString());
} catch (IOException e) {
e.printStackTrace();
} finally {
if (file!=null)
_model._backend.deleteSavedModel(file.toString());
}
// 1) read the raw bytes of the mean image file from the MOJO
byte[] meanBlob;
try {
meanBlob = readblob("mean_image_file"); //throws exception if not found
// 2) write the mean image file
File meanFile = new File(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString() + ".mean");
try {
FileOutputStream os = new FileOutputStream(meanFile.toString());
os.write(meanBlob);
os.close();
// 3) tell the backend to use that mean image file (just in case it needs it)
_model._imageDataSet.setMeanData(_model._backend.loadMeanImage(_model._model, meanFile.toString()));
// 4) keep a float[] version of the mean array to be used during image processing
_model._meanImageData = _model._imageDataSet.getMeanData();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (meanFile!=null)
meanFile.delete();
}
} catch (IOException e) {
// e.printStackTrace();
}
file = new File(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString());
try {
_model._backend.writeBytes(file, _model._parameters);
_model._backend.loadParam(_model._model, file.toString());
} catch (IOException e) {
e.printStackTrace();
} finally {
if (file!=null)
_model._backend.deleteSavedParam(file.toString());
}
}
@Override
protected DeepwaterMojoModel makeModel(String[] columns, String[][] domains, String responseColumn) {
return new DeepwaterMojoModel(columns, domains, responseColumn);
}
@Override public String mojoVersion() {
return "1.00";
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-deepwater/3.46.0.7/hex/genmodel/algos/deepwater
|
java-sources/ai/h2o/h2o-genmodel-ext-deepwater/3.46.0.7/hex/genmodel/algos/deepwater/caffe/DeepwaterCaffeBackend.java
|
package hex.genmodel.algos.deepwater.caffe;
import deepwater.backends.BackendModel;
import deepwater.backends.BackendParams;
import deepwater.backends.BackendTrain;
import deepwater.backends.RuntimeOptions;
import deepwater.datasets.ImageDataSet;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
/**
* This backend forward requests to a docker images running the python
* Caffe interface. C.f h20-docker/caffe for more information.
*/
public class DeepwaterCaffeBackend implements BackendTrain {
static public final String CAFFE_DIR = "/opt/caffe/";
static public final String CAFFE_H2O_DIR = "/opt/caffe-h2o/";
@Override
public void delete(BackendModel m) {
((DeepwaterCaffeModel) m).close();
}
@Override
public BackendModel buildNet(ImageDataSet dataset, RuntimeOptions opts, BackendParams bparms, int num_classes, String name) {
if (name.equals("MLP")) {
int[] hidden = (int[]) bparms.get("hidden");
int[] sizes = new int[hidden.length + 2];
sizes[0] = dataset.getWidth();
System.arraycopy(hidden, 0, sizes, 1, hidden.length);
sizes[sizes.length - 1] = num_classes;
System.err.println("Ignoring device_id");
double[] hdr = new double[sizes.length];
if (bparms.get("input_dropout_ratio") != null)
hdr[0] = (double) bparms.get("input_dropout_ratio");
double[] bphdr = (double[]) bparms.get("hidden_dropout_ratios");
if (bphdr != null)
System.arraycopy(bphdr, 0, hdr, 1, bphdr.length);
String[] layers = new String[sizes.length];
System.arraycopy(bparms.get("activations"), 0, layers, 1, hidden.length);
layers[0] = "data";
layers[layers.length - 1] = "loss";
return new DeepwaterCaffeModel(
(Integer) bparms.get("mini_batch_size"),
sizes,
layers,
hdr,
opts.getSeed(),
opts.useGPU()
);
} else {
return new DeepwaterCaffeModel(
name,
new int[] {
(Integer) bparms.get("mini_batch_size"),
dataset.getChannels(),
dataset.getWidth(),
dataset.getHeight()
},
opts.getSeed(),
opts.useGPU()
);
}
}
// graph (model definition) only
@Override
public void saveModel(BackendModel m, String model_path) {
((DeepwaterCaffeModel) m).saveModel(model_path);
}
// full state of everything but the graph to continue training
@Override
public void loadParam(BackendModel m, String param_path) {
((DeepwaterCaffeModel) m).loadParam(param_path);
}
// full state of everything but the graph to continue training
@Override
public void saveParam(BackendModel m, String param_path) {
((DeepwaterCaffeModel) m).saveParam(param_path);
}
@Override
public float[] loadMeanImage(BackendModel m, String path) {
throw new UnsupportedOperationException();
}
@Override
public String toJson(BackendModel m) {
throw new UnsupportedOperationException();
}
@Override
public void setParameter(BackendModel m, String name, float value) {
// if (name.equals("learning_rate"))
// ((DeepwaterCaffeModel) m).learning_rate(value);
// else if (name.equals("momentum"))
// ((DeepwaterCaffeModel) m).momentum(value);
}
// given a mini-batch worth of data and labels, train
@Override
public float[]/*ignored*/ train(BackendModel m, float[/*mini_batch * input_neurons*/] data, float[/*mini_batch*/] label) {
((DeepwaterCaffeModel) m).train(data, label);
return null; //return value is always ignored
}
// return predictions (num_classes logits (softmax outputs) x mini_batch)
@Override
public float[/*mini_batch * num_classes*/] predict(BackendModel m, float[/*mini_batch * input_neurons*/] data) {
// new float[cm.mini_batch_size * cm.num_classes];
return ((DeepwaterCaffeModel) m).predict(data);
}
@Override
public void deleteSavedModel(String model_path) {
}
@Override
public void deleteSavedParam(String param_path) {
}
@Override
public String listAllLayers(BackendModel m) {
return null;
}
@Override
public float[] extractLayer(BackendModel m, String name, float[] data) {
return new float[0];
}
public void writeBytes(File file, byte[] payload) throws IOException {
FileOutputStream os = new FileOutputStream(file.toString());
os.write(payload);
os.close();
}
public byte[] readBytes(File file) throws IOException {
FileInputStream is = new FileInputStream(file);
byte[] params = new byte[(int)file.length()];
is.read(params);
is.close();
return params;
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-deepwater/3.46.0.7/hex/genmodel/algos/deepwater
|
java-sources/ai/h2o/h2o-genmodel-ext-deepwater/3.46.0.7/hex/genmodel/algos/deepwater/caffe/DeepwaterCaffeModel.java
|
package hex.genmodel.algos.deepwater.caffe;
import com.google.protobuf.nano.CodedInputByteBufferNano;
import com.google.protobuf.nano.CodedOutputByteBufferNano;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import deepwater.backends.BackendModel;
import hex.genmodel.algos.deepwater.caffe.nano.Deepwater;
import hex.genmodel.algos.deepwater.caffe.nano.Deepwater.Cmd;
public class DeepwaterCaffeModel implements BackendModel {
private int[] _input_shape = new int[0];
private int[] _sizes = new int[0]; // neurons per layer
private String[] _types = new String[0]; // layer types
private double[] _dropout_ratios = new double[0];
private long _seed;
private boolean _useGPU;
private String _graph = "";
private Process _process;
private static final ThreadLocal<ByteBuffer> _buffer = new ThreadLocal<>();
public DeepwaterCaffeModel(int batch_size, int[] sizes,
String[] types, double[] dropout_ratios,
long seed, boolean useGPU) {
_input_shape = new int[] {batch_size, 1, 1, sizes[0]};
_sizes = sizes;
_types = types;
_dropout_ratios = dropout_ratios;
_seed = seed;
_useGPU = useGPU;
start();
}
public DeepwaterCaffeModel(String graph, int[] input_shape, long seed, boolean useGPU) {
_graph = graph;
_input_shape = input_shape;
_seed = seed;
_useGPU = useGPU;
start();
}
private void start() {
if (_process == null) {
try {
startRegular();
} catch (IOException e) {
throw new RuntimeException(e);
}
Cmd cmd = new Cmd();
cmd.type = Deepwater.Create;
cmd.graph = _graph;
cmd.inputShape = _input_shape;
cmd.solverType = "Adam";
cmd.sizes = _sizes;
cmd.types = _types;
cmd.dropoutRatios = _dropout_ratios;
cmd.learningRate = .01f;
cmd.momentum = .99f;
cmd.randomSeed = _seed;
cmd.useGpu = _useGPU;
call(cmd);
}
}
public void saveModel(String model_path) {
Cmd cmd = new Cmd();
cmd.type = Deepwater.SaveGraph;
cmd.path = model_path;
call(cmd);
}
public void saveParam(String param_path) {
Cmd cmd = new Cmd();
cmd.type = Deepwater.Save;
cmd.path = param_path;
call(cmd);
}
public void loadParam(String param_path) {
Cmd cmd = new Cmd();
cmd.type = Deepwater.Load;
cmd.path = param_path;
call(cmd);
}
private static void copy(float[] data, byte[] buff) {
if (data.length * 4 != buff.length)
throw new RuntimeException();
ByteBuffer buffer = _buffer.get();
if (buffer == null || buffer.capacity() < buff.length) {
_buffer.set(buffer = ByteBuffer.allocateDirect(buff.length));
buffer.order(ByteOrder.LITTLE_ENDIAN);
}
buffer.clear();
buffer.asFloatBuffer().put(data);
buffer.get(buff);
}
private static void copy(float[][] buffs, Cmd cmd) {
cmd.data = new byte[buffs.length][];
for (int i = 0; i < buffs.length; i++) {
cmd.data[i] = new byte[buffs[i].length * 4];
copy(buffs[i], cmd.data[i]);
}
}
public void train(float[] data, float[] label) {
Cmd cmd = new Cmd();
cmd.type = Deepwater.Train;
cmd.inputShape = _input_shape;
int len = _input_shape[0] * _input_shape[1] * _input_shape[2] * _input_shape[3];
if (data.length != len)
throw new RuntimeException();
if (label.length != _input_shape[0])
throw new RuntimeException();
float[][] buffs = new float[][] {data, label};
copy(buffs, cmd);
call(cmd);
}
public float[] predict(float[] data) {
Cmd cmd = new Cmd();
cmd.type = Deepwater.Predict;
cmd.inputShape = _input_shape;
// int len = _input_shape[0] * _input_shape[1] * _input_shape[2] * _input_shape[3];
// if (data.length != len)
// throw new RuntimeException(data.length + " vs " + len);
float[][] buffs = new float[][] {data};
copy(buffs, cmd);
cmd = call(cmd);
ByteBuffer buffer = _buffer.get();
if (buffer == null || buffer.capacity() < cmd.data[0].length) {
_buffer.set(buffer = ByteBuffer.allocateDirect(cmd.data[0].length));
buffer.order(ByteOrder.LITTLE_ENDIAN);
}
buffer.clear();
buffer.put(cmd.data[0]);
float[] res = new float[cmd.data[0].length / 4];
buffer.flip();
buffer.asFloatBuffer().get(res);
return res;
}
// Debug, or if wee find a way to package Caffe without Docker
private void startRegular() throws IOException {
String pwd = DeepwaterCaffeBackend.CAFFE_H2O_DIR;
ProcessBuilder pb = new ProcessBuilder("python3 backend.py".split(" "));
pb.environment().put("PYTHONPATH", DeepwaterCaffeBackend.CAFFE_DIR + "python");
pb.redirectError(ProcessBuilder.Redirect.INHERIT);
pb.directory(new File(pwd));
_process = pb.start();
}
void close() {
_process.destroy();
try {
_process.waitFor();
} catch (InterruptedException ex) {
// Ignore
}
}
private Cmd call(Cmd cmd) {
try {
OutputStream stdin = _process.getOutputStream();
int len = cmd.getSerializedSize();
ByteBuffer buffer = ByteBuffer.allocate(4 + len);
buffer.putInt(len);
CodedOutputByteBufferNano ou = CodedOutputByteBufferNano.newInstance(
buffer.array(), buffer.position(), buffer.remaining());
cmd.writeTo(ou);
buffer.position(buffer.position() + len);
stdin.write(buffer.array(), 0, buffer.position());
stdin.flush();
InputStream stdout = _process.getInputStream();
int read = stdout.read(buffer.array(), 0, 4);
if (read != 4)
throw new RuntimeException();
buffer.position(0);
buffer.limit(read);
len = buffer.getInt();
if (buffer.capacity() < len)
buffer = ByteBuffer.allocate(len);
buffer.position(0);
buffer.limit(len);
while (buffer.position() < buffer.limit()) {
read = stdout.read(buffer.array(), buffer.position(), buffer.limit());
buffer.position(buffer.position() + read);
}
Cmd res = new Cmd();
CodedInputByteBufferNano in = CodedInputByteBufferNano.newInstance(
buffer.array(), 0, buffer.position());
res.mergeFrom(in);
return res;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-deepwater/3.46.0.7/hex/genmodel/algos/deepwater/caffe
|
java-sources/ai/h2o/h2o-genmodel-ext-deepwater/3.46.0.7/hex/genmodel/algos/deepwater/caffe/nano/Deepwater.java
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
package hex.genmodel.algos.deepwater.caffe.nano;
@SuppressWarnings("hiding")
public interface Deepwater {
// enum Type
public static final int Create = 0;
public static final int Train = 1;
public static final int Predict = 2;
public static final int SaveGraph = 3;
public static final int Save = 4;
public static final int Load = 5;
public static final int Success = 10;
public static final int Failure = 11;
public static final class Cmd extends
com.google.protobuf.nano.MessageNano {
private static volatile Cmd[] _emptyArray;
public static Cmd[] emptyArray() {
// Lazily initializes the empty array
if (_emptyArray == null) {
synchronized (
com.google.protobuf.nano.InternalNano.LAZY_INIT_LOCK) {
if (_emptyArray == null) {
_emptyArray = new Cmd[0];
}
}
}
return _emptyArray;
}
// .deepwater.Type type = 1;
public int type;
// string graph = 100;
public java.lang.String graph;
// repeated int32 input_shape = 101;
public int[] inputShape;
// string solver_type = 102;
public java.lang.String solverType;
// float learning_rate = 103;
public float learningRate;
// float momentum = 104;
public float momentum;
// int64 random_seed = 105;
public long randomSeed;
// bool use_gpu = 106;
public boolean useGpu;
// repeated int32 sizes = 201;
public int[] sizes;
// repeated string types = 202;
public java.lang.String[] types;
// repeated double dropout_ratios = 203;
public double[] dropoutRatios;
// repeated bytes data = 300;
public byte[][] data;
// string path = 400;
public java.lang.String path;
public Cmd() {
clear();
}
public Cmd clear() {
type = hex.genmodel.algos.deepwater.caffe.nano.Deepwater.Create;
graph = "";
inputShape = com.google.protobuf.nano.WireFormatNano.EMPTY_INT_ARRAY;
solverType = "";
learningRate = 0F;
momentum = 0F;
randomSeed = 0L;
useGpu = false;
sizes = com.google.protobuf.nano.WireFormatNano.EMPTY_INT_ARRAY;
types = com.google.protobuf.nano.WireFormatNano.EMPTY_STRING_ARRAY;
dropoutRatios = com.google.protobuf.nano.WireFormatNano.EMPTY_DOUBLE_ARRAY;
data = com.google.protobuf.nano.WireFormatNano.EMPTY_BYTES_ARRAY;
path = "";
cachedSize = -1;
return this;
}
@Override
public void writeTo(com.google.protobuf.nano.CodedOutputByteBufferNano output)
throws java.io.IOException {
if (this.type != hex.genmodel.algos.deepwater.caffe.nano.Deepwater.Create) {
output.writeInt32(1, this.type);
}
if (!this.graph.equals("")) {
output.writeString(100, this.graph);
}
if (this.inputShape != null && this.inputShape.length > 0) {
for (int i = 0; i < this.inputShape.length; i++) {
output.writeInt32(101, this.inputShape[i]);
}
}
if (!this.solverType.equals("")) {
output.writeString(102, this.solverType);
}
if (java.lang.Float.floatToIntBits(this.learningRate)
!= java.lang.Float.floatToIntBits(0F)) {
output.writeFloat(103, this.learningRate);
}
if (java.lang.Float.floatToIntBits(this.momentum)
!= java.lang.Float.floatToIntBits(0F)) {
output.writeFloat(104, this.momentum);
}
if (this.randomSeed != 0L) {
output.writeInt64(105, this.randomSeed);
}
if (this.useGpu != false) {
output.writeBool(106, this.useGpu);
}
if (this.sizes != null && this.sizes.length > 0) {
for (int i = 0; i < this.sizes.length; i++) {
output.writeInt32(201, this.sizes[i]);
}
}
if (this.types != null && this.types.length > 0) {
for (int i = 0; i < this.types.length; i++) {
java.lang.String element = this.types[i];
if (element != null) {
output.writeString(202, element);
}
}
}
if (this.dropoutRatios != null && this.dropoutRatios.length > 0) {
for (int i = 0; i < this.dropoutRatios.length; i++) {
output.writeDouble(203, this.dropoutRatios[i]);
}
}
if (this.data != null && this.data.length > 0) {
for (int i = 0; i < this.data.length; i++) {
byte[] element = this.data[i];
if (element != null) {
output.writeBytes(300, element);
}
}
}
if (!this.path.equals("")) {
output.writeString(400, this.path);
}
super.writeTo(output);
}
@Override
protected int computeSerializedSize() {
int size = super.computeSerializedSize();
if (this.type != hex.genmodel.algos.deepwater.caffe.nano.Deepwater.Create) {
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeInt32Size(1, this.type);
}
if (!this.graph.equals("")) {
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeStringSize(100, this.graph);
}
if (this.inputShape != null && this.inputShape.length > 0) {
int dataSize = 0;
for (int i = 0; i < this.inputShape.length; i++) {
int element = this.inputShape[i];
dataSize += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeInt32SizeNoTag(element);
}
size += dataSize;
size += 2 * this.inputShape.length;
}
if (!this.solverType.equals("")) {
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeStringSize(102, this.solverType);
}
if (java.lang.Float.floatToIntBits(this.learningRate)
!= java.lang.Float.floatToIntBits(0F)) {
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeFloatSize(103, this.learningRate);
}
if (java.lang.Float.floatToIntBits(this.momentum)
!= java.lang.Float.floatToIntBits(0F)) {
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeFloatSize(104, this.momentum);
}
if (this.randomSeed != 0L) {
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeInt64Size(105, this.randomSeed);
}
if (this.useGpu != false) {
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeBoolSize(106, this.useGpu);
}
if (this.sizes != null && this.sizes.length > 0) {
int dataSize = 0;
for (int i = 0; i < this.sizes.length; i++) {
int element = this.sizes[i];
dataSize += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeInt32SizeNoTag(element);
}
size += dataSize;
size += 2 * this.sizes.length;
}
if (this.types != null && this.types.length > 0) {
int dataCount = 0;
int dataSize = 0;
for (int i = 0; i < this.types.length; i++) {
java.lang.String element = this.types[i];
if (element != null) {
dataCount++;
dataSize += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeStringSizeNoTag(element);
}
}
size += dataSize;
size += 2 * dataCount;
}
if (this.dropoutRatios != null && this.dropoutRatios.length > 0) {
int dataSize = 8 * this.dropoutRatios.length;
size += dataSize;
size += 2 * this.dropoutRatios.length;
}
if (this.data != null && this.data.length > 0) {
int dataCount = 0;
int dataSize = 0;
for (int i = 0; i < this.data.length; i++) {
byte[] element = this.data[i];
if (element != null) {
dataCount++;
dataSize += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeBytesSizeNoTag(element);
}
}
size += dataSize;
size += 2 * dataCount;
}
if (!this.path.equals("")) {
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeStringSize(400, this.path);
}
return size;
}
@Override
public Cmd mergeFrom(
com.google.protobuf.nano.CodedInputByteBufferNano input)
throws java.io.IOException {
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
return this;
default: {
if (!com.google.protobuf.nano.WireFormatNano.parseUnknownField(input, tag)) {
return this;
}
break;
}
case 8: {
int value = input.readInt32();
switch (value) {
case hex.genmodel.algos.deepwater.caffe.nano.Deepwater.Create:
case hex.genmodel.algos.deepwater.caffe.nano.Deepwater.Train:
case hex.genmodel.algos.deepwater.caffe.nano.Deepwater.Predict:
case hex.genmodel.algos.deepwater.caffe.nano.Deepwater.SaveGraph:
case hex.genmodel.algos.deepwater.caffe.nano.Deepwater.Save:
case hex.genmodel.algos.deepwater.caffe.nano.Deepwater.Load:
case hex.genmodel.algos.deepwater.caffe.nano.Deepwater.Success:
case hex.genmodel.algos.deepwater.caffe.nano.Deepwater.Failure:
this.type = value;
break;
}
break;
}
case 802: {
this.graph = input.readString();
break;
}
case 808: {
int arrayLength = com.google.protobuf.nano.WireFormatNano
.getRepeatedFieldArrayLength(input, 808);
int i = this.inputShape == null ? 0 : this.inputShape.length;
int[] newArray = new int[i + arrayLength];
if (i != 0) {
java.lang.System.arraycopy(this.inputShape, 0, newArray, 0, i);
}
for (; i < newArray.length - 1; i++) {
newArray[i] = input.readInt32();
input.readTag();
}
// Last one without readTag.
newArray[i] = input.readInt32();
this.inputShape = newArray;
break;
}
case 810: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
// First pass to compute array length.
int arrayLength = 0;
int startPos = input.getPosition();
while (input.getBytesUntilLimit() > 0) {
input.readInt32();
arrayLength++;
}
input.rewindToPosition(startPos);
int i = this.inputShape == null ? 0 : this.inputShape.length;
int[] newArray = new int[i + arrayLength];
if (i != 0) {
java.lang.System.arraycopy(this.inputShape, 0, newArray, 0, i);
}
for (; i < newArray.length; i++) {
newArray[i] = input.readInt32();
}
this.inputShape = newArray;
input.popLimit(limit);
break;
}
case 818: {
this.solverType = input.readString();
break;
}
case 829: {
this.learningRate = input.readFloat();
break;
}
case 837: {
this.momentum = input.readFloat();
break;
}
case 840: {
this.randomSeed = input.readInt64();
break;
}
case 848: {
this.useGpu = input.readBool();
break;
}
case 1608: {
int arrayLength = com.google.protobuf.nano.WireFormatNano
.getRepeatedFieldArrayLength(input, 1608);
int i = this.sizes == null ? 0 : this.sizes.length;
int[] newArray = new int[i + arrayLength];
if (i != 0) {
java.lang.System.arraycopy(this.sizes, 0, newArray, 0, i);
}
for (; i < newArray.length - 1; i++) {
newArray[i] = input.readInt32();
input.readTag();
}
// Last one without readTag.
newArray[i] = input.readInt32();
this.sizes = newArray;
break;
}
case 1610: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
// First pass to compute array length.
int arrayLength = 0;
int startPos = input.getPosition();
while (input.getBytesUntilLimit() > 0) {
input.readInt32();
arrayLength++;
}
input.rewindToPosition(startPos);
int i = this.sizes == null ? 0 : this.sizes.length;
int[] newArray = new int[i + arrayLength];
if (i != 0) {
java.lang.System.arraycopy(this.sizes, 0, newArray, 0, i);
}
for (; i < newArray.length; i++) {
newArray[i] = input.readInt32();
}
this.sizes = newArray;
input.popLimit(limit);
break;
}
case 1618: {
int arrayLength = com.google.protobuf.nano.WireFormatNano
.getRepeatedFieldArrayLength(input, 1618);
int i = this.types == null ? 0 : this.types.length;
java.lang.String[] newArray = new java.lang.String[i + arrayLength];
if (i != 0) {
java.lang.System.arraycopy(this.types, 0, newArray, 0, i);
}
for (; i < newArray.length - 1; i++) {
newArray[i] = input.readString();
input.readTag();
}
// Last one without readTag.
newArray[i] = input.readString();
this.types = newArray;
break;
}
case 1625: {
int arrayLength = com.google.protobuf.nano.WireFormatNano
.getRepeatedFieldArrayLength(input, 1625);
int i = this.dropoutRatios == null ? 0 : this.dropoutRatios.length;
double[] newArray = new double[i + arrayLength];
if (i != 0) {
java.lang.System.arraycopy(this.dropoutRatios, 0, newArray, 0, i);
}
for (; i < newArray.length - 1; i++) {
newArray[i] = input.readDouble();
input.readTag();
}
// Last one without readTag.
newArray[i] = input.readDouble();
this.dropoutRatios = newArray;
break;
}
case 1626: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
int arrayLength = length / 8;
int i = this.dropoutRatios == null ? 0 : this.dropoutRatios.length;
double[] newArray = new double[i + arrayLength];
if (i != 0) {
java.lang.System.arraycopy(this.dropoutRatios, 0, newArray, 0, i);
}
for (; i < newArray.length; i++) {
newArray[i] = input.readDouble();
}
this.dropoutRatios = newArray;
input.popLimit(limit);
break;
}
case 2402: {
int arrayLength = com.google.protobuf.nano.WireFormatNano
.getRepeatedFieldArrayLength(input, 2402);
int i = this.data == null ? 0 : this.data.length;
byte[][] newArray = new byte[i + arrayLength][];
if (i != 0) {
java.lang.System.arraycopy(this.data, 0, newArray, 0, i);
}
for (; i < newArray.length - 1; i++) {
newArray[i] = input.readBytes();
input.readTag();
}
// Last one without readTag.
newArray[i] = input.readBytes();
this.data = newArray;
break;
}
case 3202: {
this.path = input.readString();
break;
}
}
}
}
public static Cmd parseFrom(byte[] data)
throws com.google.protobuf.nano.InvalidProtocolBufferNanoException {
return com.google.protobuf.nano.MessageNano.mergeFrom(new Cmd(), data);
}
public static Cmd parseFrom(
com.google.protobuf.nano.CodedInputByteBufferNano input)
throws java.io.IOException {
return new Cmd().mergeFrom(input);
}
}
public static final class Saved extends
com.google.protobuf.nano.MessageNano {
private static volatile Saved[] _emptyArray;
public static Saved[] emptyArray() {
// Lazily initializes the empty array
if (_emptyArray == null) {
synchronized (
com.google.protobuf.nano.InternalNano.LAZY_INIT_LOCK) {
if (_emptyArray == null) {
_emptyArray = new Saved[0];
}
}
}
return _emptyArray;
}
// string solver = 1;
public java.lang.String solver;
// repeated int32 input_shape = 2;
public int[] inputShape;
public Saved() {
clear();
}
public Saved clear() {
solver = "";
inputShape = com.google.protobuf.nano.WireFormatNano.EMPTY_INT_ARRAY;
cachedSize = -1;
return this;
}
@Override
public void writeTo(com.google.protobuf.nano.CodedOutputByteBufferNano output)
throws java.io.IOException {
if (!this.solver.equals("")) {
output.writeString(1, this.solver);
}
if (this.inputShape != null && this.inputShape.length > 0) {
for (int i = 0; i < this.inputShape.length; i++) {
output.writeInt32(2, this.inputShape[i]);
}
}
super.writeTo(output);
}
@Override
protected int computeSerializedSize() {
int size = super.computeSerializedSize();
if (!this.solver.equals("")) {
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeStringSize(1, this.solver);
}
if (this.inputShape != null && this.inputShape.length > 0) {
int dataSize = 0;
for (int i = 0; i < this.inputShape.length; i++) {
int element = this.inputShape[i];
dataSize += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeInt32SizeNoTag(element);
}
size += dataSize;
size += 1 * this.inputShape.length;
}
return size;
}
@Override
public Saved mergeFrom(
com.google.protobuf.nano.CodedInputByteBufferNano input)
throws java.io.IOException {
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
return this;
default: {
if (!com.google.protobuf.nano.WireFormatNano.parseUnknownField(input, tag)) {
return this;
}
break;
}
case 10: {
this.solver = input.readString();
break;
}
case 16: {
int arrayLength = com.google.protobuf.nano.WireFormatNano
.getRepeatedFieldArrayLength(input, 16);
int i = this.inputShape == null ? 0 : this.inputShape.length;
int[] newArray = new int[i + arrayLength];
if (i != 0) {
java.lang.System.arraycopy(this.inputShape, 0, newArray, 0, i);
}
for (; i < newArray.length - 1; i++) {
newArray[i] = input.readInt32();
input.readTag();
}
// Last one without readTag.
newArray[i] = input.readInt32();
this.inputShape = newArray;
break;
}
case 18: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
// First pass to compute array length.
int arrayLength = 0;
int startPos = input.getPosition();
while (input.getBytesUntilLimit() > 0) {
input.readInt32();
arrayLength++;
}
input.rewindToPosition(startPos);
int i = this.inputShape == null ? 0 : this.inputShape.length;
int[] newArray = new int[i + arrayLength];
if (i != 0) {
java.lang.System.arraycopy(this.inputShape, 0, newArray, 0, i);
}
for (; i < newArray.length; i++) {
newArray[i] = input.readInt32();
}
this.inputShape = newArray;
input.popLimit(limit);
break;
}
}
}
}
public static Saved parseFrom(byte[] data)
throws com.google.protobuf.nano.InvalidProtocolBufferNanoException {
return com.google.protobuf.nano.MessageNano.mergeFrom(new Saved(), data);
}
public static Saved parseFrom(
com.google.protobuf.nano.CodedInputByteBufferNano input)
throws java.io.IOException {
return new Saved().mergeFrom(input);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-jgrapht/3.46.0.7/hex/genmodel
|
java-sources/ai/h2o/h2o-genmodel-ext-jgrapht/3.46.0.7/hex/genmodel/tools/JgraphtPrintMojo.java
|
package hex.genmodel.tools;
import com.mxgraph.layout.mxCompactTreeLayout;
import com.mxgraph.layout.mxEdgeLabelLayout;
import com.mxgraph.layout.mxIGraphLayout;
import com.mxgraph.util.mxCellRenderer;
import hex.genmodel.algos.tree.ConvertTreeOptions;
import hex.genmodel.algos.tree.SharedTreeGraph;
import hex.genmodel.algos.tree.SharedTreeSubgraph;
import hex.genmodel.algos.tree.TreeBackedMojoModel;
import org.jgrapht.ext.JGraphXAdapter;
import org.jgrapht.graph.DefaultEdge;
import org.jgrapht.graph.DirectedMultigraph;
import org.jgrapht.io.*;
import javax.imageio.ImageIO;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Map;
public class JgraphtPrintMojo extends PrintMojo implements MojoPrinter {
@Override
public void run() throws Exception {
if (!Format.png.equals(format)){
super.run();
} else {
validateArgs();
if (genModel instanceof TreeBackedMojoModel){
TreeBackedMojoModel treeBackedModel = (TreeBackedMojoModel) genModel;
ConvertTreeOptions options = new ConvertTreeOptions().withTreeConsistencyCheckEnabled();
final SharedTreeGraph g = treeBackedModel.convert(treeToPrint, null, options);
printPng(g);
}
else {
System.out.println("ERROR: Unknown MOJO type");
System.exit(1);
}
}
}
@Override
public boolean supportsFormat(Format format){
return true;
}
private void printPng(SharedTreeGraph trees) throws IOException, ImportException {
Path outputDirectoryPath = Paths.get(outputFileName);
int numberOfTrees = trees.subgraphArray.size();
if (numberOfTrees > 1) {
if (outputFileName == null) {
outputFileName = Paths.get("").toString();
}
if (Files.exists(outputDirectoryPath) && !Files.isDirectory(outputDirectoryPath)) {
Files.delete(outputDirectoryPath);
}
if (!Files.exists(outputDirectoryPath)) {
Files.createDirectory(outputDirectoryPath);
}
}
for (SharedTreeSubgraph tree : trees.subgraphArray) {
Path dotSourceFilePath = Files.createTempFile("", tmpOutputFileName);
try (FileOutputStream fosTemp = new FileOutputStream(dotSourceFilePath.toFile()); PrintStream osTemp = new PrintStream(fosTemp)) {
tree.printDot(osTemp, maxLevelsToPrintPerEdge, detail, optionalTitle, pTreeOptions, true);
generateOutputPng(dotSourceFilePath, getPngName(numberOfTrees, tree.name));
}
Files.delete(dotSourceFilePath);
}
}
private void generateOutputPng(Path dotSourceFilePath, String treeName) throws ImportException, IOException {
LabeledVertexProvider vertexProvider = new LabeledVertexProvider();
LabeledEdgesProvider edgesProvider = new LabeledEdgesProvider();
ComponentUpdater componentUpdater = new ComponentUpdater();
DOTImporter<LabeledVertex, LabeledEdge> importer = new DOTImporter<>(vertexProvider, edgesProvider, componentUpdater);
DirectedMultigraph<LabeledVertex, LabeledEdge> result = new DirectedMultigraph<>(LabeledEdge.class);
try (FileInputStream is = new FileInputStream(dotSourceFilePath.toFile()); Reader reader = new InputStreamReader(is)) {
importer.importGraph(result, reader);
JGraphXAdapter<LabeledVertex, LabeledEdge> graphAdapter = new JGraphXAdapter<LabeledVertex, LabeledEdge>(result);
mxIGraphLayout treeLayout = new mxCompactTreeLayout(graphAdapter, true);
mxIGraphLayout nonOverlappingEdgesLayout = new mxEdgeLabelLayout(graphAdapter);
treeLayout.execute(graphAdapter.getDefaultParent());
nonOverlappingEdgesLayout.execute(graphAdapter.getDefaultParent());
BufferedImage image = mxCellRenderer.createBufferedImage(graphAdapter, null, 2, Color.WHITE, true, null);
if (outputFileName != null) {
ImageIO.write(image, "PNG", new File(treeName));
} else {
ImageIO.write(image, "PNG", System.out);
}
}
}
protected String getPngName(int numberOfTrees, String treeName) {
if (numberOfTrees == 1) {
return outputFileName;
} else {
return outputFileName + "/" + treeName.replaceAll("\\s+", "").replaceAll(",", "_") + ".png";
}
}
private class LabeledVertexProvider implements VertexProvider<LabeledVertex> {
@Override
public LabeledVertex buildVertex(String id, Map<String, Attribute> attributes) {
return new LabeledVertex(id, attributes.get("label").toString());
}
}
private class LabeledEdgesProvider implements EdgeProvider<LabeledVertex, LabeledEdge> {
@Override
public LabeledEdge buildEdge(LabeledVertex f, LabeledVertex t, String l, Map<String, Attribute> attrs) {
return new LabeledEdge(l);
}
}
private class ComponentUpdater implements org.jgrapht.io.ComponentUpdater<LabeledVertex>{
@Override
public void update(LabeledVertex v, Map<String, Attribute> attrs) {
}
}
private class LabeledEdge extends DefaultEdge {
private String label;
/**
* Constructs a relationship edge
*
* @param label the label of the new edge.
*
*/
public LabeledEdge(String label)
{
this.label = label;
}
/**
* Gets the label associated with this edge.
*
* @return edge label
*/
public String getLabel()
{
return label;
}
@Override
public String toString()
{
return label;
}
}
static class LabeledVertex
{
private String id;
private String label;
public LabeledVertex(String id)
{
this(id, null);
}
public LabeledVertex(String id, String label)
{
this.id = id;
this.label = label;
}
@Override
public int hashCode()
{
return (id == null) ? 0 : id.hashCode();
}
@Override
public String toString()
{
return label;
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
LabeledVertex other = (LabeledVertex) obj;
if (id == null) {
return other.id == null;
} else {
return id.equals(other.id);
}
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline/parsing/ParameterParser.java
|
package hex.genmodel.mojopipeline.parsing;
public class ParameterParser {
public static boolean paramValueToBoolean(Object paramValue) {
if (paramValue instanceof String) {
return Boolean.parseBoolean((String)paramValue);
} else if (paramValue instanceof Double) {
return (Double)paramValue > 0.0;
} else {
throw new UnsupportedOperationException(
String.format(
"Unable convert a parameter value %s of type %s to Boolean.",
paramValue,
paramValue.getClass().getName()));
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline/transformers/MathBinaryTransform.java
|
package hex.genmodel.mojopipeline.transformers;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import ai.h2o.mojos.runtime.frame.MojoFrame;
import ai.h2o.mojos.runtime.frame.MojoFrameMeta;
import ai.h2o.mojos.runtime.transforms.MojoTransform;
import ai.h2o.mojos.runtime.transforms.MojoTransformBuilderFactory;
import java.util.HashMap;
import java.util.Map;
public class MathBinaryTransform extends MojoTransform {
MathBinaryFunction _function;
boolean _isLeftCol;
boolean _isRightCol;
double _constValue;
MathBinaryTransform(
int[] iindices,
int[] oindices,
MathBinaryFunction function,
boolean isLeftCol,
boolean isRightCol,
double constValue) {
super(iindices, oindices);
_function = function;
_isLeftCol = isLeftCol;
_isRightCol = isRightCol;
_constValue = constValue;
}
@Override
public void transform(MojoFrame frame) {
if (!_isLeftCol) {
double[] values = (double[]) frame.getColumnData(iindices[0]);
double[] o = (double[]) frame.getColumnData(oindices[0]);
for (int i = 0, nrows = frame.getNrows(); i < nrows; i++) {
o[i] = _function.call(_constValue, values[i]);
}
} else if (!_isRightCol) {
double[] values = (double[]) frame.getColumnData(iindices[0]);
double[] o = (double[]) frame.getColumnData(oindices[0]);
for (int i = 0, nrows = frame.getNrows(); i < nrows; i++) {
o[i] = _function.call(values[i], _constValue);
}
} else {
double[] left = (double[]) frame.getColumnData(iindices[0]);
double[] right = (double[]) frame.getColumnData(iindices[1]);
double[] o = (double[]) frame.getColumnData(oindices[0]);
for (int i = 0, nrows = frame.getNrows(); i < nrows; i++) {
o[i] = _function.call(left[i], right[i]);
}
}
}
interface MathBinaryFunction {
double call(double left, double right);
}
public static class Factory implements MojoTransformBuilderFactory {
private static boolean isEqual(double l, double r) {
if (Double.isNaN(l) && Double.isNaN(r)) return true;
double ulpLeft = Math.ulp(l);
double ulpRight = Math.ulp(r);
double smallUlp = Math.min(ulpLeft, ulpRight);
double absDiff = Math.abs(l - r); // subtraction order does not matter, due to IEEE 754 spec
return absDiff <= smallUlp;
}
private static double and(double l, double r) {
return (l == 0 || r == 0) ? 0 : (Double.isNaN(l) || Double.isNaN(r) ? Double.NaN : 1);
}
private static double or(double l, double r) {
return (l == 1 || r == 1) ? 1 : (Double.isNaN(l) || Double.isNaN(r) ? Double.NaN : 0);
}
private static final HashMap<String,MathBinaryFunction> _supportedFunctions = new HashMap<String,MathBinaryFunction>() {{
put("&", new MathBinaryFunction() {
@Override
public double call(double l, double r) {
return and(l, r);
}
});
put("&&", new MathBinaryFunction() {
@Override
public double call(double l, double r) {
return and(l, r);
}
});
put("|", new MathBinaryFunction() {
@Override
public double call(double l, double r) {
return or(l, r);
}
});
put("||", new MathBinaryFunction() {
@Override
public double call(double l, double r) {
return or(l, r);
}
});
put("==", new MathBinaryFunction() {
@Override
public double call(double l, double r) {
return isEqual(l, r) ? 1 : 0;
}
});
put("!=", new MathBinaryFunction() {
@Override
public double call(double l, double r) {
return isEqual(l, r) ? 0 : 1;
}
});
put("<=", new MathBinaryFunction() {
@Override
public double call(double l, double r) {
return l <= r ? 1 : 0;
}
});
put("<", new MathBinaryFunction() {
@Override
public double call(double l, double r) {
return l < r ? 1 : 0;
}
});
put(">=", new MathBinaryFunction() {
@Override
public double call(double l, double r) {
return l >= r ? 1 : 0;
}
});
put(">", new MathBinaryFunction() {
@Override
public double call(double l, double r) {
return l > r ? 1 : 0;
}
});
put("intDiv", new MathBinaryFunction() {
@Override
public double call(double l, double r) {
return (((int) r) == 0) ? Double.NaN : (int) l / (int) r;
}
});
put("%/%", new MathBinaryFunction() {
@Override
public double call(double l, double r) {
return (int) (l / r);
}
});
put("%", new MathBinaryFunction() {
@Override
public double call(double l, double r) {
return l % r;
}
});
put("%%", new MathBinaryFunction() {
@Override
public double call(double l, double r) {
return l % r;
}
});
put("*", new MathBinaryFunction() {
@Override
public double call(double l, double r) {
return l * r;
}
});
put("/", new MathBinaryFunction() {
@Override
public double call(double l, double r) {
return l / r;
}
});
put("+", new MathBinaryFunction() {
@Override
public double call(double l, double r) {
return l + r;
}
});
put("-", new MathBinaryFunction() {
@Override
public double call(double l, double r) {
return l - r;
}
});
put("^", new MathBinaryFunction() {
@Override
public double call(double l, double r) {
return Math.pow(l, r);
}
});
}};
public static final String TRANSFORMER_ID = "hex.genmodel.mojopipeline.transformers.MathBinaryTransform";
public static MathBinaryFunction getFunction(String functionName) {
final MathBinaryFunction function = _supportedFunctions.get(functionName);
if (function == null) {
throw new UnsupportedOperationException(
String.format("The function '%s' is not supported unary math transformation.", functionName));
}
return function;
}
public static boolean functionExists(String functionName) {
return _supportedFunctions.containsKey(functionName);
}
@Override
public String transformerName() {
return TRANSFORMER_ID;
}
@Override
public MojoTransform createBuilder(MojoFrameMeta meta,
int[] iindcies, int[] oindices,
Map<String, Object> params,
ReaderBackend backend) {
final String functionName = (String) params.get("function");
final Boolean isLeftCol = (Boolean) params.get("isLeftCol");
final Boolean isRightCol = (Boolean) params.get("isRightCol");
double constValue = 0.0;
if (!isLeftCol || !isRightCol) {
constValue = (Double) params.get("constValue");
}
final MathBinaryFunction function = Factory.getFunction(functionName);
return new MathBinaryTransform(iindcies, oindices, function, isLeftCol, isRightCol, constValue);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline/transformers/MathUnaryTransform.java
|
package hex.genmodel.mojopipeline.transformers;
import org.apache.commons.math3.special.Gamma;
import org.apache.commons.math3.util.FastMath;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import ai.h2o.mojos.runtime.frame.MojoFrame;
import ai.h2o.mojos.runtime.frame.MojoFrameMeta;
import ai.h2o.mojos.runtime.transforms.MojoTransform;
import ai.h2o.mojos.runtime.transforms.MojoTransformBuilderFactory;
import java.util.HashMap;
import java.util.Map;
public class MathUnaryTransform extends MojoTransform {
MathUnaryFunction _function;
MathUnaryTransform(int[] iindices, int[] oindices, MathUnaryFunction function) {
super(iindices, oindices);
_function = function;
}
@Override
public void transform(MojoFrame frame) {
double[] a = (double[]) frame.getColumnData(iindices[0]);
double[] o = (double[]) frame.getColumnData(oindices[0]);
for (int i = 0, nrows = frame.getNrows(); i < nrows; i++) {
o[i] = _function.call(a[i]);
}
}
interface MathUnaryFunction {
void initialize(Map<String, Object> params);
double call(double value);
}
public static class Factory implements MojoTransformBuilderFactory {
private static final HashMap<String,MathUnaryFunction> _supportedFunctions = new HashMap<String,MathUnaryFunction>() {{
put("abs", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.abs(value); }
});
put("acos", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.acos(value); }
});
put("acosh", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return FastMath.acosh(value); }
});
put("asin", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.asin(value); }
});
put("asinh", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return FastMath.asinh(value); }
});
put("atan", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.atan(value); }
});
put("atanh", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return FastMath.atanh(value); }
});
put("ceiling", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.ceil(value); }
});
put("cos", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.cos(value); }
});
put("cosh", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.cosh(value); }
});
put("cospi", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.cos(Math.PI * value); }
});
put("digamma", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Double.isNaN(value) ? Double.NaN : Gamma.digamma(value); }
});
put("exp", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.exp(value); }
});
put("expm1", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.expm1(value); }
});
put("floor", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.floor(value); }
});
put("gamma", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Gamma.gamma(value); }
});
put("lgamma", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Gamma.logGamma(value); }
});
put("log", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.log(value); }
});
put("log1p", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.log1p(value); }
});
put("log2", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.log(value) / Math.log(2); }
});
put("log10", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.log10(value); }
});
put("none", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return value; }
});
put("not", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Double.isNaN(value) ? Double.NaN : value == 0 ? 1 : 0; }
});
put("sign", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.signum(value); }
});
put("sin", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.sin(value); }
});
put("sinh", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.sinh(value); }
});
put("sinpi", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.sin(Math.PI * value); }
});
put("sqrt", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.sqrt(value); }
});
put("tan", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.tan(value); }
});
put("tanh", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.tanh(value); }
});
put("tanpi", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Math.tan(Math.PI * value); }
});
put("trigamma", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return Double.isNaN(value) ? Double.NaN : Gamma.trigamma(value); }
});
put("trunc", new MathUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(double value) { return value >= 0 ? Math.floor(value) : Math.ceil(value); }
});
put("round", new MathUnaryFunction() {
double _digits = Double.NaN;
double _powerOf10 = Double.NaN;
@Override
public void initialize(Map<String, Object> params) {
Object digitsObj = params.get("rightArg");
if (digitsObj == null) {
throw new IllegalArgumentException("The 'digits' param is not passed to 'round' function!");
}
_digits = (Double)digitsObj;
if ((int) _digits != _digits) _digits = Math.round(_digits);
_powerOf10 = (int) Math.pow(10, (int) _digits);
}
@Override
public double call(double x) {
if (Double.isNaN(x)) return x;
double sgn = x < 0 ? -1 : 1;
x = Math.abs(x);
return sgn * (_digits == 0
// go to the even digit
? (x % 1 > 0.5 || (x % 1 == 0.5 && !(Math.floor(x) % 2 == 0)))
? Math.ceil(x)
: Math.floor(x)
: Math.floor(x * _powerOf10 + 0.5) / _powerOf10);
}
});
put("signif", new MathUnaryFunction() {
double _digits = Double.NaN;
@Override
public void initialize(Map<String, Object> params) {
Object digitsObj = params.get("rightArg");
if (digitsObj == null) {
throw new IllegalArgumentException("The 'digits' param is not passed to 'signif' function!");
}
_digits = (Double)digitsObj;
if (_digits < 1) _digits = 1; //mimic R's base::signif
if ((int) _digits != _digits) _digits = Math.round(_digits);
}
@Override
public double call(double x) {
if (Double.isNaN(x)) return x;
java.math.BigDecimal bd = new java.math.BigDecimal(x);
bd = bd.round(new java.math.MathContext((int) _digits, java.math.RoundingMode.HALF_EVEN));
return bd.doubleValue();
}
});
}};
public static final String TRANSFORMER_ID = "hex.genmodel.mojopipeline.transformers.MathUnaryTransform";
public static MathUnaryFunction getFunction(String functionName) {
final MathUnaryFunction function = _supportedFunctions.get(functionName);
if (function == null) {
throw new UnsupportedOperationException(
String.format("The function '%s' is not supported unary math transformation.", functionName));
}
return function;
}
public static boolean functionExists(String functionName) {
return _supportedFunctions.containsKey(functionName);
}
@Override
public String transformerName() {
return TRANSFORMER_ID;
}
@Override
public MojoTransform createBuilder(MojoFrameMeta meta,
int[] iindcies, int[] oindices,
Map<String, Object> params,
ReaderBackend backend) {
final String functionName = (String) params.get("function");
final MathUnaryFunction function = Factory.getFunction(functionName);
function.initialize(params);
return new MathUnaryTransform(iindcies, oindices, function);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline/transformers/StringGrepTransform.java
|
package hex.genmodel.mojopipeline.transformers;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import ai.h2o.mojos.runtime.frame.MojoFrame;
import ai.h2o.mojos.runtime.frame.MojoFrameMeta;
import ai.h2o.mojos.runtime.transforms.MojoTransform;
import ai.h2o.mojos.runtime.transforms.MojoTransformBuilderFactory;
import hex.genmodel.mojopipeline.parsing.ParameterParser;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class StringGrepTransform extends MojoTransform {
Pattern _pattern = null;
Boolean _invert = null;
StringGrepTransform(int[] iindices, int[] oindices, Pattern pattern, Boolean invert) {
super(iindices, oindices);
_pattern = pattern;
_invert = invert;
}
@Override
public void transform(MojoFrame frame) {
String[] a = (String[]) frame.getColumnData(iindices[0]);
double[] o = (double[]) frame.getColumnData(oindices[0]);
Matcher matcher = _pattern.matcher("");
for (int i = 0, nrows = frame.getNrows(); i < nrows; i++) {
if (a[i] == null) {
o[i] = _invert ? 1 : 0;
} else {
matcher.reset(a[i]);
o[i] = matcher.find() != _invert ? 1 : 0;
}
}
}
public static class Factory implements MojoTransformBuilderFactory {
public static final String TRANSFORMER_ID = "hex.genmodel.mojopipeline.transformers.StringGrepTransform";
public static boolean functionExists(String functionName) {
return functionName.equals("grep");
}
@Override
public String transformerName() {
return TRANSFORMER_ID;
}
@Override
public MojoTransform createBuilder(MojoFrameMeta meta,
int[] iindcies, int[] oindices,
Map<String, Object> params,
ReaderBackend backend) {
Object ignoreCaseObj = params.get("ignore_case");
if (ignoreCaseObj == null) {
throw new IllegalArgumentException("The 'ignore_case' param is not passed to 'grep' function!");
}
boolean ignoreCase = ParameterParser.paramValueToBoolean(ignoreCaseObj);
int flags = ignoreCase ? Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE : 0;
Object invertObj = params.get("invert");
if (invertObj == null) {
throw new IllegalArgumentException("The 'invert' param is not passed to 'grep' function!");
}
boolean invert = ParameterParser.paramValueToBoolean(invertObj);
Object outputLogicalObj = params.get("output_logical");
if (outputLogicalObj == null) {
throw new IllegalArgumentException("The 'output_logical' param is not passed to 'grep' function!");
}
boolean outputLogical = ParameterParser.paramValueToBoolean(outputLogicalObj);
if (!outputLogical) {
throw new IllegalArgumentException("The 'grep' operation in MOJO supports just logical output!");
}
Object patternObj = params.get("regex");
if (patternObj == null) {
throw new IllegalArgumentException("The 'pattern' param is not passed to 'grep' function!");
}
String stringPattern = (String)patternObj;
Pattern pattern = Pattern.compile(stringPattern, flags);
return new StringGrepTransform(
iindcies,
oindices,
pattern,
invert);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline/transformers/StringPropertiesBinaryTransform.java
|
package hex.genmodel.mojopipeline.transformers;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import ai.h2o.mojos.runtime.frame.MojoFrame;
import ai.h2o.mojos.runtime.frame.MojoFrameMeta;
import ai.h2o.mojos.runtime.transforms.MojoTransform;
import ai.h2o.mojos.runtime.transforms.MojoTransformBuilderFactory;
import water.util.comparison.string.StringComparatorFactory;
import water.util.comparison.string.StringComparator;
import java.util.HashMap;
import java.util.Map;
public class StringPropertiesBinaryTransform extends MojoTransform {
StringPropertiesBinaryFunction _function;
boolean _isLeftCol;
boolean _isRightCol;
String _constValue;
StringPropertiesBinaryTransform(
int[] iindices,
int[] oindices,
StringPropertiesBinaryFunction function,
boolean isLeftCol,
boolean isRightCol,
String constValue) {
super(iindices, oindices);
_function = function;
_isLeftCol = isLeftCol;
_isRightCol = isRightCol;
_constValue = constValue;
}
@Override
public void transform(MojoFrame frame) {
if (!_isLeftCol) {
String[] values = (String[]) frame.getColumnData(iindices[0]);
double[] o = (double[]) frame.getColumnData(oindices[0]);
for (int i = 0, nrows = frame.getNrows(); i < nrows; i++) {
o[i] = _function.call(_constValue, values[i]);
}
} else if (!_isRightCol) {
String[] values = (String[]) frame.getColumnData(iindices[0]);
double[] o = (double[]) frame.getColumnData(oindices[0]);
for (int i = 0, nrows = frame.getNrows(); i < nrows; i++) {
o[i] = _function.call(values[i], _constValue);
}
} else {
String[] left = (String[]) frame.getColumnData(iindices[0]);
String[] right = (String[]) frame.getColumnData(iindices[1]);
double[] o = (double[]) frame.getColumnData(oindices[0]);
for (int i = 0, nrows = frame.getNrows(); i < nrows; i++) {
o[i] = _function.call(left[i], right[i]);
}
}
}
interface StringPropertiesBinaryFunction {
void initialize(Map<String, Object> params);
double call(String left, String right);
}
public static class Factory implements MojoTransformBuilderFactory {
private static final HashMap<String,StringPropertiesBinaryFunction> _supportedFunctions = new HashMap<String,StringPropertiesBinaryFunction>() {{
put("strDistance", new StringPropertiesBinaryFunction() {
StringComparator _comparator = null;
boolean _compareEmpty = false;
@Override
public void initialize(Map<String, Object> params) {
Object measureObj = params.get("measure");
if (measureObj == null) {
throw new IllegalArgumentException("The 'measure' param is not passed to 'strDistance' function!");
}
String measure = (String) measureObj;
_comparator = StringComparatorFactory.makeComparator(measure);
Object compareEmptyObj = params.get("compare_empty");
if (compareEmptyObj == null) {
throw new IllegalArgumentException("The 'compare_empty' param is not passed to 'strDistance' function!");
}
_compareEmpty = Boolean.parseBoolean((String) compareEmptyObj);
}
@Override
public double call(String left, String right) {
if (!_compareEmpty && (left.isEmpty() || right.isEmpty())) {
return Double.NaN;
} else {
return _comparator.compare(left, right);
}
}
});
}};
public static final String TRANSFORMER_ID = "hex.genmodel.mojopipeline.transformers.StringPropertiesBinaryTransform";
public static StringPropertiesBinaryFunction getFunction(String functionName) {
final StringPropertiesBinaryFunction function = _supportedFunctions.get(functionName);
if (function == null) {
throw new UnsupportedOperationException(
String.format("The function '%s' is not supported binary string properties transformation.", functionName));
}
return function;
}
public static boolean functionExists(String functionName) {
return _supportedFunctions.containsKey(functionName);
}
@Override
public String transformerName() {
return TRANSFORMER_ID;
}
@Override
public MojoTransform createBuilder(MojoFrameMeta meta,
int[] iindcies, int[] oindices,
Map<String, Object> params,
ReaderBackend backend) {
final String functionName = (String) params.get("function");
final Boolean isLeftCol = (Boolean) params.get("isLeftCol");
final Boolean isRightCol = (Boolean) params.get("isRightCol");
String constValue = null;
if (!isLeftCol || !isRightCol) {
constValue = (String) params.get("constValue");
}
final StringPropertiesBinaryFunction function = Factory.getFunction(functionName);
function.initialize(params);
return new StringPropertiesBinaryTransform(iindcies, oindices, function, isLeftCol, isRightCol, constValue);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline/transformers/StringPropertiesUnaryTransform.java
|
package hex.genmodel.mojopipeline.transformers;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import ai.h2o.mojos.runtime.frame.MojoFrame;
import ai.h2o.mojos.runtime.frame.MojoFrameMeta;
import ai.h2o.mojos.runtime.transforms.MojoTransform;
import ai.h2o.mojos.runtime.transforms.MojoTransformBuilderFactory;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.io.FileUtils;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
public class StringPropertiesUnaryTransform extends MojoTransform {
StringPropertiesUnaryFunction _function;
StringPropertiesUnaryTransform(int[] iindices, int[] oindices, StringPropertiesUnaryFunction function) {
super(iindices, oindices);
_function = function;
}
@Override
public void transform(MojoFrame frame) {
String[] a = (String[]) frame.getColumnData(iindices[0]);
double[] o = (double[]) frame.getColumnData(oindices[0]);
for (int i = 0, nrows = frame.getNrows(); i < nrows; i++) {
o[i] = a[i] == null ? null : _function.call(a[i]);
}
}
interface StringPropertiesUnaryFunction {
void initialize(Map<String, Object> params);
double call(String value);
}
public static class Factory implements MojoTransformBuilderFactory {
private static final HashMap<String, StringPropertiesUnaryFunction> _supportedFunctions = new HashMap<String,StringPropertiesUnaryFunction>() {{
put("countmatches", new StringPropertiesUnaryFunction() {
String[] _pattern = null;
@Override
public void initialize(Map<String, Object> params) {
Object patternObj = params.get("pattern");
if (patternObj == null) {
throw new IllegalArgumentException("The 'pattern' param is not passed to 'countmatches' function!");
}
if (patternObj instanceof String) {
_pattern = ((String)patternObj).split("`````");
} else {
throw new IllegalArgumentException(
String.format(
"The type '%s' of 'pattern' param is not supported.",
patternObj.getClass().getName()));
}
}
@Override
public double call(String value) {
int count = 0;
for (String word : _pattern) {
count += StringUtils.countMatches(value, word);
}
return count;
}
});
put("num_valid_substrings", new StringPropertiesUnaryFunction() {
HashSet<String> _words = null;
@Override
public void initialize(Map<String, Object> params) {
Object wordsObj = params.get("words");
if (wordsObj == null) {
throw new IllegalArgumentException("The 'words' param is not passed to 'num_valid_substrings' function!");
}
String wordsPath = (String) wordsObj;
try {
_words = new HashSet<>(FileUtils.readLines(new File(wordsPath)));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public double call(String value) {
int count = 0;
int N = value.length();
for (int i = 0; i < N - 1; i++)
for (int j = i + 2; j < N + 1; j++) {
if (_words.contains(value.substring(i, j)))
count += 1;
}
return count;
}
});
put("entropy", new StringPropertiesUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(String value) {
HashMap<Character, Integer> freq = new HashMap<>();
for (int i = 0; i < value.length(); i++) {
char c = value.charAt(i);
Integer count = freq.get(c);
if (count == null) freq.put(c, 1);
else freq.put(c, count + 1);
}
double sume = 0;
int N = value.length();
double n;
for (char c : freq.keySet()) {
n = freq.get(c);
sume += -n / N * Math.log(n / N) / Math.log(2);
}
return sume;
}
});
put("strlen", new StringPropertiesUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {
}
@Override
public double call(String value) {
return value.length();
}
});
}};
public static final String TRANSFORMER_ID = "hex.genmodel.mojopipeline.transformers.StringPropertiesUnaryTransform";
public static StringPropertiesUnaryFunction getFunction(String functionName) {
final StringPropertiesUnaryFunction function = _supportedFunctions.get(functionName);
if (function == null) {
throw new UnsupportedOperationException(
String.format("The function '%s' is not supported unary string transformation.", functionName));
}
return function;
}
public static boolean functionExists(String functionName) {
return _supportedFunctions.containsKey(functionName);
}
@Override
public String transformerName() {
return TRANSFORMER_ID;
}
@Override
public MojoTransform createBuilder(MojoFrameMeta meta,
int[] iindcies, int[] oindices,
Map<String, Object> params,
ReaderBackend backend) {
final String functionName = (String) params.get("function");
final StringPropertiesUnaryFunction function = Factory.getFunction(functionName);
function.initialize(params);
return new StringPropertiesUnaryTransform(iindcies, oindices, function);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline/transformers/StringSplitTransform.java
|
package hex.genmodel.mojopipeline.transformers;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import ai.h2o.mojos.runtime.frame.MojoFrame;
import ai.h2o.mojos.runtime.frame.MojoFrameMeta;
import ai.h2o.mojos.runtime.transforms.MojoTransform;
import ai.h2o.mojos.runtime.transforms.MojoTransformBuilderFactory;
import hex.genmodel.mojopipeline.parsing.ParameterParser;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class StringSplitTransform extends MojoTransform {
String _regex = null;
int _numberOfOutputCols;
StringSplitTransform(int[] iindices, int[] oindices, String regex) {
super(iindices, oindices);
_regex = regex;
_numberOfOutputCols = oindices.length;
}
@Override
public void transform(MojoFrame frame) {
String[] a = (String[]) frame.getColumnData(iindices[0]);
String[][] outputs = new String[_numberOfOutputCols][];
for (int j = 0; j < _numberOfOutputCols; j++) {
outputs[j] = (String[]) frame.getColumnData(oindices[j]);
}
for (int i = 0, nrows = frame.getNrows(); i < nrows; i++) {
if (a[i] != null) {
String[] split = a[i].split(_regex);
int nCol = Math.min(_numberOfOutputCols, split.length);
for (int j = 0; j < nCol; j++) {
outputs[j][i] = split[j];
}
}
}
}
public static class Factory implements MojoTransformBuilderFactory {
public static final String TRANSFORMER_ID = "hex.genmodel.mojopipeline.transformers.StringSplitTransform";
public static boolean functionExists(String functionName) {
return functionName.equals("strsplit");
}
@Override
public String transformerName() {
return TRANSFORMER_ID;
}
@Override
public MojoTransform createBuilder(MojoFrameMeta meta,
int[] iindcies, int[] oindices,
Map<String, Object> params,
ReaderBackend backend) {
Object regexObj = params.get("split");
if (regexObj== null) {
throw new IllegalArgumentException("The 'split' param is not passed to 'strsplit' function!");
}
String regex = (String)regexObj;
return new StringSplitTransform(iindcies, oindices, regex);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline/transformers/StringUnaryTransform.java
|
package hex.genmodel.mojopipeline.transformers;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import ai.h2o.mojos.runtime.frame.MojoFrame;
import ai.h2o.mojos.runtime.frame.MojoFrameMeta;
import ai.h2o.mojos.runtime.transforms.MojoTransform;
import ai.h2o.mojos.runtime.transforms.MojoTransformBuilderFactory;
import hex.genmodel.mojopipeline.parsing.ParameterParser;
import org.apache.commons.lang.StringUtils;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.regex.Pattern;
public class StringUnaryTransform extends MojoTransform {
StringUnaryFunction _function;
StringUnaryTransform(int[] iindices, int[] oindices, StringUnaryFunction function) {
super(iindices, oindices);
_function = function;
}
@Override
public void transform(MojoFrame frame) {
String[] a = (String[]) frame.getColumnData(iindices[0]);
String[] o = (String[]) frame.getColumnData(oindices[0]);
for (int i = 0, nrows = frame.getNrows(); i < nrows; i++) {
o[i] = a[i] == null ? null : _function.call(a[i]);
}
}
interface StringUnaryFunction {
void initialize(Map<String, Object> params);
String call(String value);
}
public static class Factory implements MojoTransformBuilderFactory {
private static final HashMap<String,StringUnaryFunction> _supportedFunctions = new HashMap<String,StringUnaryFunction>() {{
put("lstrip", new StringUnaryFunction() {
private String _set = null;
@Override
public void initialize(Map<String, Object> params) {
Object setObj = params.get("set");
if (setObj == null) {
throw new IllegalArgumentException("The 'set' param is not passed to 'lstrip' function!");
}
_set = (String)setObj;
}
@Override
public String call(String value) {
return StringUtils.stripStart(value, _set);
}
});
put("rstrip", new StringUnaryFunction() {
private String _set = null;
@Override
public void initialize(Map<String, Object> params) {
Object setObj = params.get("set");
if (setObj == null) {
throw new IllegalArgumentException("The 'set' param is not passed to 'rstrip' function!");
}
_set = (String)setObj;
}
@Override
public String call(String value) {
return StringUtils.stripEnd(value, _set);
}
});
put("replaceall", new StringUnaryFunction() {
Pattern _pattern = null;
String _replacement = null;
Boolean _ignoreCase = null;
@Override
public void initialize(Map<String, Object> params) {
Object patternObj = params.get("pattern");
if (patternObj == null) {
throw new IllegalArgumentException("The 'pattern' param is not passed to 'replaceall' function!");
}
String stringPattern = (String)patternObj;
_pattern = Pattern.compile(stringPattern);
Object replacementObj = params.get("replacement");
if (replacementObj == null) {
throw new IllegalArgumentException("The 'replacement' param is not passed to 'replaceall' function!");
}
_replacement = (String)replacementObj;
Object ignoreCaseObj = params.get("ignore_case");
if (ignoreCaseObj == null) {
throw new IllegalArgumentException("The 'ignore_case' param is not passed to 'replaceall' function!");
}
_ignoreCase = ParameterParser.paramValueToBoolean(ignoreCaseObj);
}
@Override
public String call(String value) {
if (_ignoreCase)
return _pattern.matcher(value.toLowerCase(Locale.ENGLISH)).replaceAll(_replacement);
else
return _pattern.matcher(value).replaceAll(_replacement);
}
});
put("replacefirst", new StringUnaryFunction() {
Pattern _pattern = null;
String _replacement = null;
Boolean _ignoreCase = null;
@Override
public void initialize(Map<String, Object> params) {
Object patternObj = params.get("pattern");
if (patternObj == null) {
throw new IllegalArgumentException("The 'pattern' param is not passed to 'replacefirst' function!");
}
String stringPattern = (String)patternObj;
_pattern = Pattern.compile(stringPattern);
Object replacementObj = params.get("replacement");
if (replacementObj == null) {
throw new IllegalArgumentException("The 'replacement' param is not passed to 'replacefirst' function!");
}
_replacement = (String)replacementObj;
Object ignoreCaseObj = params.get("ignore_case");
if (ignoreCaseObj == null) {
throw new IllegalArgumentException("The 'ignore_case' param is not passed to 'replacefirst' function!");
}
_ignoreCase = ParameterParser.paramValueToBoolean(ignoreCaseObj);
}
@Override
public String call(String value) {
if (_ignoreCase)
return _pattern.matcher(value.toLowerCase(Locale.ENGLISH)).replaceFirst(_replacement);
else
return _pattern.matcher(value).replaceFirst(_replacement);
}
});
put("substring", new StringUnaryFunction() {
private int _startIndex = 0;
private int _endIndex = Integer.MAX_VALUE;
@Override
public void initialize(Map<String, Object> params) {
Object startIndexObj = params.get("startIndex");
if (startIndexObj != null) {
_startIndex = ((Double) startIndexObj).intValue();
if (_startIndex < 0) _startIndex = 0;
}
Object endIndexObj = params.get("endIndex");
if (endIndexObj != null) {
_endIndex = ((Double) endIndexObj).intValue();
}
}
@Override
public String call(String value) {
return value.substring(
_startIndex < value.length() ? _startIndex : value.length(),
_endIndex < value.length() ? _endIndex : value.length());
}
});
put("tolower", new StringUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public String call(String value) { return value.toLowerCase(Locale.ENGLISH); }
});
put("toupper", new StringUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public String call(String value) { return value.toUpperCase(Locale.ENGLISH); }
});
put("trim", new StringUnaryFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public String call(String value) { return value.trim(); }
});
}};
public static final String TRANSFORMER_ID = "hex.genmodel.mojopipeline.transformers.StringUnaryFunction";
public static StringUnaryFunction getFunction(String functionName) {
final StringUnaryFunction function = _supportedFunctions.get(functionName);
if (function == null) {
throw new UnsupportedOperationException(
String.format("The function '%s' is not supported unary string transformation.", functionName));
}
return function;
}
public static boolean functionExists(String functionName) {
return _supportedFunctions.containsKey(functionName);
}
@Override
public String transformerName() {
return TRANSFORMER_ID;
}
@Override
public MojoTransform createBuilder(MojoFrameMeta meta,
int[] iindcies, int[] oindices,
Map<String, Object> params,
ReaderBackend backend) {
final String functionName = (String) params.get("function");
final StringUnaryFunction function = Factory.getFunction(functionName);
function.initialize(params);
return new StringUnaryTransform(iindcies, oindices, function);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline/transformers/TimeUnaryTransform.java
|
package hex.genmodel.mojopipeline.transformers;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import ai.h2o.mojos.runtime.frame.MojoFrame;
import ai.h2o.mojos.runtime.frame.MojoFrameMeta;
import ai.h2o.mojos.runtime.transforms.MojoTransform;
import ai.h2o.mojos.runtime.transforms.MojoTransformBuilderFactory;
import org.apache.commons.lang.mutable.Mutable;
import org.apache.commons.math3.special.Gamma;
import org.apache.commons.math3.util.FastMath;
import org.joda.time.DateTimeZone;
import org.joda.time.MutableDateTime;
import java.util.HashMap;
import java.util.Map;
public class TimeUnaryTransform extends MojoTransform {
TimeUnaryFunction _function;
DateTimeZone _timeZone;
TimeUnaryTransform(int[] iindices, int[] oindices, TimeUnaryFunction function, DateTimeZone timeZone) {
super(iindices, oindices);
_function = function;
_timeZone = timeZone;
}
@Override
public void transform(MojoFrame frame) {
double[] a = (double[]) frame.getColumnData(iindices[0]);
String[] factors = _function.factors();
MutableDateTime dataTime = new MutableDateTime(0, _timeZone);
if (factors == null) {
double[] o = (double[]) frame.getColumnData(oindices[0]);
for (int i = 0, nrows = frame.getNrows(); i < nrows; i++) {
if (Double.isNaN(a[i])) {
o[i] = Double.NaN;
} else {
dataTime.setMillis((long) a[i]);
o[i] = _function.call(dataTime);
}
}
} else {
String[] o = (String[]) frame.getColumnData(oindices[0]);
for (int i = 0, nrows = frame.getNrows(); i < nrows; i++) {
if (Double.isNaN(a[i])) {
o[i] = null;
} else {
dataTime.setMillis((long) a[i]);
o[i] = factors[(int)_function.call(dataTime)];
}
}
}
}
interface TimeUnaryFunction {
double call(MutableDateTime value);
String[] factors();
}
public static class Factory implements MojoTransformBuilderFactory {
private static final HashMap<String,TimeUnaryFunction> _supportedFunctions = new HashMap<String,TimeUnaryFunction>() {{
put("day", new TimeUnaryFunction() {
@Override
public double call(MutableDateTime value) { return value.getDayOfMonth(); }
@Override
public String[] factors() { return null; }
});
put("dayOfWeek", new TimeUnaryFunction() {
@Override
public double call(MutableDateTime value) { return value.getDayOfWeek() - 1; }
@Override
public String[] factors() { return new String[]{"Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"}; }
});
put("hour", new TimeUnaryFunction() {
@Override
public double call(MutableDateTime value) { return value.getHourOfDay(); }
@Override
public String[] factors() { return null; }
});
put("millis", new TimeUnaryFunction() {
@Override
public double call(MutableDateTime value) { return value.getMillisOfSecond(); }
@Override
public String[] factors() { return null; }
});
put("minute", new TimeUnaryFunction() {
@Override
public double call(MutableDateTime value) { return value.getMinuteOfHour(); }
@Override
public String[] factors() { return null; }
});
put("month", new TimeUnaryFunction() {
@Override
public double call(MutableDateTime value) { return value.getMonthOfYear(); }
@Override
public String[] factors() { return null; }
});
put("second", new TimeUnaryFunction() {
@Override
public double call(MutableDateTime value) { return value.getSecondOfMinute(); }
@Override
public String[] factors() { return null; }
});
put("week", new TimeUnaryFunction() {
@Override
public double call(MutableDateTime value) { return value.getWeekOfWeekyear(); }
@Override
public String[] factors() { return null; }
});
put("year", new TimeUnaryFunction() {
@Override
public double call(MutableDateTime value) { return value.getYear(); }
@Override
public String[] factors() { return null; }
});
}};
public static final String TRANSFORMER_ID = "hex.genmodel.mojopipeline.transformers.TimeUnaryTransform";
public static TimeUnaryFunction getFunction(String functionName) {
final TimeUnaryFunction function = _supportedFunctions.get(functionName);
if (function == null) {
throw new UnsupportedOperationException(
String.format("The function '%s' is not supported unary time transformation.", functionName));
}
return function;
}
public static boolean functionExists(String functionName) {
return _supportedFunctions.containsKey(functionName);
}
@Override
public String transformerName() {
return TRANSFORMER_ID;
}
@Override
public MojoTransform createBuilder(MojoFrameMeta meta,
int[] iindcies, int[] oindices,
Map<String, Object> params,
ReaderBackend backend) {
final String functionName = (String) params.get("function");
final TimeUnaryFunction function = Factory.getFunction(functionName);
final String timeZoneId = (String) params.get("timezone");
final DateTimeZone timeZone = DateTimeZone.forID(timeZoneId);
return new TimeUnaryTransform(iindcies, oindices, function, timeZone);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline/transformers/ToNumericConversion.java
|
package hex.genmodel.mojopipeline.transformers;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import ai.h2o.mojos.runtime.frame.MojoFrame;
import ai.h2o.mojos.runtime.frame.MojoFrameMeta;
import ai.h2o.mojos.runtime.transforms.MojoTransform;
import ai.h2o.mojos.runtime.transforms.MojoTransformBuilderFactory;
import org.apache.commons.lang.math.NumberUtils;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormatter;
import water.util.ParseTime;
import java.util.HashMap;
import java.util.Map;
public class ToNumericConversion extends MojoTransform {
ToNumericConversionFunction _function;
ToNumericConversion(int[] iindices, int[] oindices, ToNumericConversionFunction function) {
super(iindices, oindices);
_function = function;
}
@Override
public void transform(MojoFrame frame) {
Object input = frame.getColumnData(iindices[0]);
double[] o = (double[]) frame.getColumnData(oindices[0]);
if (input instanceof double[]){
double[] a = (double[]) input;
for (int i = 0, nrows = frame.getNrows(); i < nrows; i++) {
o[i] = a[i];
}
} else {
String[] a = (String[]) input;
for (int i = 0, nrows = frame.getNrows(); i < nrows; i++) {
if (a[i] == null) {
o[i] = Double.NaN;
} else {
o[i] = _function.call(a[i]);
}
}
}
}
interface ToNumericConversionFunction {
void initialize(Map<String, Object> params);
double call(String value);
}
public static class Factory implements MojoTransformBuilderFactory {
private static final HashMap<String,ToNumericConversionFunction> _supportedFunctions =
new HashMap<String,ToNumericConversionFunction>() {{
put("as.numeric", new ToNumericConversionFunction() {
@Override
public void initialize(Map<String, Object> params) {}
@Override
public double call(String value) {
return NumberUtils.toDouble(value, Double.NaN);
}
});
put("as.Date", new ToNumericConversionFunction() {
DateTimeFormatter _formatter = null;
@Override
public void initialize(Map<String, Object> params) {
Object formatObj = params.get("format");
if (formatObj == null) {
throw new IllegalArgumentException("The 'format' param is not passed to 'as.Date' function!");
}
String format = (String)formatObj;
Object timezoneObj = params.get("timezone");
if (formatObj == null) {
throw new IllegalArgumentException("The 'timezone' param is not passed to 'as.Date' function!");
}
DateTimeZone timeZoneId = DateTimeZone.forID((String)timezoneObj);
_formatter = ParseTime.forStrptimePattern(format).withZone(timeZoneId);
}
@Override
public double call(String value) {
try {
return DateTime.parse(value, _formatter).getMillis();
} catch (IllegalArgumentException e) {
return Double.NaN;
}
}
});
}};
public static final String TRANSFORMER_ID = "hex.genmodel.mojopipeline.transformers.ToNumericConversion";
public static ToNumericConversionFunction getFunction(String functionName) {
final ToNumericConversionFunction function = _supportedFunctions.get(functionName);
if (function == null) {
throw new UnsupportedOperationException(
String.format("The function '%s' is not supported conversion to numeric.", functionName));
}
return function;
}
public static boolean functionExists(String functionName) {
return _supportedFunctions.containsKey(functionName);
}
@Override
public String transformerName() {
return TRANSFORMER_ID;
}
@Override
public MojoTransform createBuilder(MojoFrameMeta meta,
int[] iindcies, int[] oindices,
Map<String, Object> params,
ReaderBackend backend) {
final String functionName = (String) params.get("function");
final ToNumericConversionFunction function = Factory.getFunction(functionName);
function.initialize(params);
return new ToNumericConversion(iindcies, oindices, function);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline
|
java-sources/ai/h2o/h2o-genmodel-ext-mojo-pipeline/3.46.0.7/hex/genmodel/mojopipeline/transformers/ToStringConversion.java
|
package hex.genmodel.mojopipeline.transformers;
import ai.h2o.mojos.runtime.api.backend.ReaderBackend;
import ai.h2o.mojos.runtime.frame.MojoFrame;
import ai.h2o.mojos.runtime.frame.MojoFrameMeta;
import ai.h2o.mojos.runtime.transforms.MojoTransform;
import ai.h2o.mojos.runtime.transforms.MojoTransformBuilderFactory;
import org.apache.commons.math3.special.Gamma;
import org.apache.commons.math3.util.FastMath;
import java.util.HashMap;
import java.util.Map;
public class ToStringConversion extends MojoTransform {
ToStringConversionFunction _function;
ToStringConversion(int[] iindices, int[] oindices, ToStringConversionFunction function) {
super(iindices, oindices);
_function = function;
}
@Override
public void transform(MojoFrame frame) {
Object input = frame.getColumnData(iindices[0]);
String[] o = (String[]) frame.getColumnData(oindices[0]);
if (input instanceof String[]){
String[] a = (String[]) input;
for (int i = 0, nrows = frame.getNrows(); i < nrows; i++) {
o[i] = a[i];
}
} else {
double[] a = (double[]) input;
for (int i = 0, nrows = frame.getNrows(); i < nrows; i++) {
if (Double.isNaN(a[i])) {
o[i] = null;
} else {
o[i] = _function.call(a[i]);
}
}
}
}
interface ToStringConversionFunction {
String call(double value);
}
public static class Factory implements MojoTransformBuilderFactory {
private static final ToStringConversionFunction _defaultConversionFunction = new ToStringConversionFunction() {
@Override
public String call(double value) { return ((Double)value).toString();}
};
private static final HashMap<String,ToStringConversionFunction> _supportedFunctions =
new HashMap<String,ToStringConversionFunction>() {{
put("as.factor", _defaultConversionFunction);
put("as.character", _defaultConversionFunction);
}};
public static final String TRANSFORMER_ID = "hex.genmodel.mojopipeline.transformers.ToStringConversion";
public static ToStringConversionFunction getFunction(String functionName) {
final ToStringConversionFunction function = _supportedFunctions.get(functionName);
if (function == null) {
throw new UnsupportedOperationException(
String.format("The function '%s' is not supported conversion to string.", functionName));
}
return function;
}
public static boolean functionExists(String functionName) {
return _supportedFunctions.containsKey(functionName);
}
@Override
public String transformerName() {
return TRANSFORMER_ID;
}
@Override
public MojoTransform createBuilder(MojoFrameMeta meta,
int[] iindcies, int[] oindices,
Map<String, Object> params,
ReaderBackend backend) {
final String functionName = (String) params.get("function");
final ToStringConversionFunction function = Factory.getFunction(functionName);
return new ToStringConversion(iindcies, oindices, function);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-xgboost/3.46.0.7/biz/k11i/xgboost
|
java-sources/ai/h2o/h2o-genmodel-ext-xgboost/3.46.0.7/biz/k11i/xgboost/tree/TreeSHAPHelper.java
|
package biz.k11i.xgboost.tree;
import biz.k11i.xgboost.util.FVec;
import hex.genmodel.algos.tree.TreeSHAP;
import hex.genmodel.algos.tree.TreeSHAPPredictor;
public class TreeSHAPHelper {
public static TreeSHAPPredictor<FVec> makePredictor(RegTree tree) {
RegTreeImpl treeImpl = (RegTreeImpl) tree;
return new TreeSHAP<>(treeImpl.getNodes(), treeImpl.getStats(), 0);
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-xgboost/3.46.0.7/hex/genmodel/algos
|
java-sources/ai/h2o/h2o-genmodel-ext-xgboost/3.46.0.7/hex/genmodel/algos/xgboost/AuxNodeWeightsHelper.java
|
package hex.genmodel.algos.xgboost;
import biz.k11i.xgboost.tree.RegTree;
import biz.k11i.xgboost.tree.RegTreeNodeStat;
import java.lang.reflect.Field;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
public class AuxNodeWeightsHelper {
private static final int DOUBLE_BYTES = Double.SIZE / Byte.SIZE;
private static final int INTEGER_BYTES = Integer.SIZE / Byte.SIZE;
public static byte[] toBytes(double[][] auxNodeWeights) {
int elements = 0;
for (double[] weights : auxNodeWeights)
elements += weights.length;
int len = (1 + auxNodeWeights.length) * INTEGER_BYTES + elements * DOUBLE_BYTES;
ByteBuffer bb = ByteBuffer.wrap(new byte[len]).order(ByteOrder.nativeOrder());
bb.putInt(auxNodeWeights.length);
for (double[] weights : auxNodeWeights) {
bb.putInt(weights.length);
for (double w : weights)
bb.putDouble(w);
}
return bb.array();
}
static double[][] fromBytes(byte[] auxNodeWeightBytes) {
ByteBuffer bb = ByteBuffer.wrap(auxNodeWeightBytes).order(ByteOrder.nativeOrder());
double[][] auxNodeWeights = new double[bb.getInt()][];
for (int i = 0; i < auxNodeWeights.length; i++) {
double[] weights = new double[bb.getInt()];
for (int j = 0; j < weights.length; j++)
weights[j] = bb.getDouble();
auxNodeWeights[i] = weights;
}
return auxNodeWeights;
}
// FIXME: ugly & hacky - good for a POC only
static void updateNodeWeights(RegTree[] trees, double[][] nodeWeights) {
final Field field;
try {
field = RegTreeNodeStat.class.getDeclaredField("sum_hess");
field.setAccessible(true);
} catch (NoSuchFieldException e) {
throw new IllegalStateException("Unable to access field 'sum_hess'.");
}
try {
for (int i = 0; i < nodeWeights.length; i++) {
RegTreeNodeStat[] stats = trees[i].getStats();
assert stats.length == nodeWeights[i].length;
for (int j = 0; j < nodeWeights[i].length; j++)
field.setFloat(stats[j], (float) nodeWeights[i][j]);
}
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-xgboost/3.46.0.7/hex/genmodel/algos
|
java-sources/ai/h2o/h2o-genmodel-ext-xgboost/3.46.0.7/hex/genmodel/algos/xgboost/OneHotEncoderFactory.java
|
package hex.genmodel.algos.xgboost;
import biz.k11i.xgboost.util.FVec;
import hex.genmodel.GenModel;
import java.io.Serializable;
class OneHotEncoderFactory implements Serializable {
private boolean _compatible10;
private final boolean _sparse;
private final int[] _catOffsets;
private final int _cats;
private final int _nums;
private final boolean _useAllFactorLevels;
private final int[] _catMap;
private final float _notHot;
OneHotEncoderFactory(boolean compatible10, boolean sparse, int[] catOffsets, int cats, int nums, boolean useAllFactorLevels) {
_compatible10 = compatible10;
_sparse = sparse;
_catOffsets = catOffsets;
_cats = cats;
_nums = nums;
_useAllFactorLevels = useAllFactorLevels;
_notHot = _sparse ? Float.NaN : 0;
if (_catOffsets == null) {
_catMap = new int[0];
} else {
_catMap = new int[_catOffsets[_cats]];
for (int c = 0; c < _cats; c++) {
for (int j = _catOffsets[c]; j < _catOffsets[c+1]; j++)
_catMap[j] = c;
}
}
}
FVec fromArray(double[] input) {
float[] numValues = new float[_nums];
int[] catValues = new int[_cats];
GenModel.setCats(input, catValues, _cats, _catOffsets, _useAllFactorLevels);
for (int i = 0; i < numValues.length; i++) {
float val = (float) input[_cats + i];
numValues[i] = _sparse && (val == 0) ? Float.NaN : val;
}
if (_compatible10) {
return new OneHotEncoderFVecCompatible10(catValues, numValues);
} else {
return new DefaultOneHotEncoderFVec(catValues, numValues);
}
}
private abstract class AbstractOneHotEncoderFVec implements FVec {
protected final int[] _catValues;
protected final float[] _numValues;
private AbstractOneHotEncoderFVec(int[] catValues, float[] numValues) {
_catValues = catValues;
_numValues = numValues;
}
@Override
public final float fvalue(int index) {
if (index >= _catMap.length)
return _numValues[index - _catMap.length];
final boolean isHot = getCategoricalValue(index);
return isHot ? 1 : _notHot;
}
protected abstract boolean getCategoricalValue(int index);
}
private class DefaultOneHotEncoderFVec extends AbstractOneHotEncoderFVec {
public DefaultOneHotEncoderFVec(int[] catValues, float[] numValues) {
super(catValues, numValues);
}
@Override
protected boolean getCategoricalValue(int index) {
return _catValues[_catMap[index]] == index;
}
}
private class OneHotEncoderFVecCompatible10 extends AbstractOneHotEncoderFVec {
public OneHotEncoderFVecCompatible10(int[] catValues, float[] numValues) {
super(catValues, numValues);
}
@Override
protected boolean getCategoricalValue(int index) {
boolean hot = _catValues[_catMap[index]] == index;
if (hot) return true;
// check other columns for match
for (int catValue : _catValues) {
if (catValue == index) {
return true;
}
}
return false;
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-xgboost/3.46.0.7/hex/genmodel/algos
|
java-sources/ai/h2o/h2o-genmodel-ext-xgboost/3.46.0.7/hex/genmodel/algos/xgboost/XGBoostJavaMojoModel.java
|
package hex.genmodel.algos.xgboost;
import biz.k11i.xgboost.Predictor;
import biz.k11i.xgboost.gbm.GBTree;
import biz.k11i.xgboost.gbm.GradBooster;
import biz.k11i.xgboost.learner.ObjFunction;
import biz.k11i.xgboost.tree.RegTree;
import biz.k11i.xgboost.tree.TreeSHAPHelper;
import biz.k11i.xgboost.util.FVec;
import hex.genmodel.PredictContributionsFactory;
import hex.genmodel.algos.tree.*;
import hex.genmodel.PredictContributions;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
/**
* Implementation of XGBoostMojoModel that uses Pure Java Predict
* see https://github.com/h2oai/xgboost-predictor
*/
public final class XGBoostJavaMojoModel extends XGBoostMojoModel implements PredictContributionsFactory {
private Predictor _predictor;
private TreeSHAPPredictor<FVec> _treeSHAPPredictor;
private OneHotEncoderFactory _1hotFactory;
@Deprecated
public XGBoostJavaMojoModel(byte[] boosterBytes, String[] columns, String[][] domains, String responseColumn) {
this(boosterBytes, null, columns, domains, responseColumn, false);
}
@Deprecated
public XGBoostJavaMojoModel(byte[] boosterBytes,
String[] columns, String[][] domains, String responseColumn,
boolean enableTreeSHAP) {
this(boosterBytes, null, columns, domains, responseColumn, enableTreeSHAP);
}
public XGBoostJavaMojoModel(byte[] boosterBytes, byte[] auxNodeWeightBytes,
String[] columns, String[][] domains, String responseColumn,
boolean enableTreeSHAP) {
super(columns, domains, responseColumn);
_predictor = makePredictor(boosterBytes, auxNodeWeightBytes);
_treeSHAPPredictor = enableTreeSHAP ? makeTreeSHAPPredictor(_predictor) : null;
}
@Override
public void postReadInit() {
_1hotFactory = new OneHotEncoderFactory(
backwardsCompatibility10(), _sparse, _catOffsets, _cats, _nums, _useAllFactorLevels
);
}
private boolean backwardsCompatibility10() {
return _mojo_version == 1.0 && !"gbtree".equals(_boosterType);
}
public static Predictor makePredictor(byte[] boosterBytes, byte[] auxNodeWeightBytes) {
try (InputStream is = new ByteArrayInputStream(boosterBytes)) {
Predictor p = new Predictor(is);
updateNodeWeights(p, auxNodeWeightBytes);
return p;
} catch (IOException e) {
throw new IllegalStateException("Failed to load predictor.", e);
}
}
public static void updateNodeWeights(Predictor predictor, byte[] auxNodeWeightBytes) {
if (auxNodeWeightBytes == null)
return;
assert predictor.getNumClass() <= 2;
GBTree gbTree = (GBTree) predictor.getBooster();
RegTree[] trees = gbTree.getGroupedTrees()[0];
double[][] weights = AuxNodeWeightsHelper.fromBytes(auxNodeWeightBytes);
assert trees.length == weights.length;
AuxNodeWeightsHelper.updateNodeWeights(trees, weights);
}
private static TreeSHAPPredictor<FVec> makeTreeSHAPPredictor(Predictor predictor) {
if (predictor.getNumClass() > 2) {
throw new UnsupportedOperationException("Calculating contributions is currently not supported for multinomial models.");
}
GBTree gbTree = (GBTree) predictor.getBooster();
RegTree[] trees = gbTree.getGroupedTrees()[0];
List<TreeSHAPPredictor<FVec>> predictors = new ArrayList<>(trees.length);
for (RegTree tree : trees) {
predictors.add(TreeSHAPHelper.makePredictor(tree));
}
float initPred = predictor.getBaseScore();
return new TreeSHAPEnsemble<>(predictors, initPred);
}
public final double[] score0(double[] doubles, double offset, double[] preds) {
if (backwardsCompatibility10()) {
// throw an exception for unexpectedly long input vector
if (doubles.length > _cats + _nums) {
throw new ArrayIndexOutOfBoundsException("Too many input values.");
}
// for unexpectedly short input vector handle the situation gracefully
if (doubles.length < _cats + _nums) {
double[] tmp = new double[_cats + _nums];
System.arraycopy(doubles, 0,tmp, 0, doubles.length);
doubles = tmp;
}
}
FVec row = _1hotFactory.fromArray(doubles);
float[] out;
if (_hasOffset) {
out = _predictor.predict(row, (float) offset);
} else if (offset != 0) {
throw new UnsupportedOperationException("Unsupported: offset != 0");
} else {
out = _predictor.predict(row);
}
return toPreds(doubles, out, preds, _nclasses, _priorClassDistrib, _defaultThreshold);
}
public final TreeSHAPPredictor.Workspace makeContributionsWorkspace() {
return _treeSHAPPredictor.makeWorkspace();
}
public final float[] calculateContributions(FVec row, float[] out_contribs, TreeSHAPPredictor.Workspace workspace) {
_treeSHAPPredictor.calculateContributions(row, out_contribs, 0, -1, workspace);
return out_contribs;
}
public final double[] calculateInterventionalContributions(FVec row, FVec background, double[] out_contribs, int[] catOffsets, boolean expand) {
_treeSHAPPredictor.calculateInterventionalContributions(row, background, out_contribs, catOffsets, expand);
return out_contribs;
}
@Override
public final PredictContributions makeContributionsPredictor() {
TreeSHAPPredictor<FVec> treeSHAPPredictor = _treeSHAPPredictor != null ?
_treeSHAPPredictor : makeTreeSHAPPredictor(_predictor);
return new XGBoostContributionsPredictor(this, treeSHAPPredictor);
}
static ObjFunction getObjFunction(String name) {
return ObjFunction.fromName(name);
}
@Override
public void close() {
_predictor = null;
_treeSHAPPredictor = null;
_1hotFactory = null;
}
@Override
public SharedTreeGraph convert(final int treeNumber, final String treeClass) {
GradBooster booster = _predictor.getBooster();
return computeGraph(booster, treeNumber);
}
@Override
public SharedTreeGraph convert(final int treeNumber, final String treeClass, final ConvertTreeOptions options) {
return convert(treeNumber, treeClass); // Options currently do not apply to XGBoost trees conversion
}
@Override
public double getInitF() {
return _predictor.getBaseScore();
}
@Override
public SharedTreeMojoModel.LeafNodeAssignments getLeafNodeAssignments(double[] doubles) {
FVec row = _1hotFactory.fromArray(doubles);
final SharedTreeMojoModel.LeafNodeAssignments result = new SharedTreeMojoModel.LeafNodeAssignments();
result._paths = _predictor.predictLeafPath(row);
result._nodeIds = _predictor.predictLeaf(row);
return result;
}
@Override
public String[] getDecisionPath(double[] doubles) {
FVec row = _1hotFactory.fromArray(doubles);
return _predictor.predictLeafPath(row);
}
private final class XGBoostContributionsPredictor extends ContributionsPredictor<FVec> {
private XGBoostContributionsPredictor(XGBoostMojoModel model, TreeSHAPPredictor<FVec> treeSHAPPredictor) {
super(_nums + _catOffsets[_cats] + 1, makeFeatureContributionNames(model), treeSHAPPredictor);
}
@Override
protected FVec toInputRow(double[] input) {
return _1hotFactory.fromArray(input);
}
}
private static String[] makeFeatureContributionNames(XGBoostMojoModel m) {
final String[] names = new String[m._nums + m._catOffsets[m._cats]];
final String[] features = m.features();
int i = 0;
for (int c = 0; c < features.length; c++) {
if (m._domains[c] == null) {
names[i++] = features[c];
} else {
for (String d : m._domains[c])
names[i++] = features[c] + "." + d;
names[i++] = features[c] + ".missing(NA)";
}
}
assert names.length == i;
return names;
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-xgboost/3.46.0.7/hex/genmodel/algos
|
java-sources/ai/h2o/h2o-genmodel-ext-xgboost/3.46.0.7/hex/genmodel/algos/xgboost/XGBoostModelAttributes.java
|
package hex.genmodel.algos.xgboost;
import com.google.gson.JsonObject;
import hex.genmodel.MojoModel;
import hex.genmodel.attributes.*;
public class XGBoostModelAttributes extends SharedTreeModelAttributes {
public XGBoostModelAttributes(JsonObject modelJson, MojoModel model) {
super(modelJson, model);
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-xgboost/3.46.0.7/hex/genmodel/algos
|
java-sources/ai/h2o/h2o-genmodel-ext-xgboost/3.46.0.7/hex/genmodel/algos/xgboost/XGBoostMojoModel.java
|
package hex.genmodel.algos.xgboost;
import biz.k11i.xgboost.gbm.GBTree;
import biz.k11i.xgboost.gbm.GradBooster;
import biz.k11i.xgboost.tree.RegTree;
import biz.k11i.xgboost.tree.RegTreeNode;
import biz.k11i.xgboost.tree.RegTreeNodeStat;
import hex.genmodel.GenModel;
import hex.genmodel.MojoModel;
import hex.genmodel.algos.isotonic.IsotonicCalibrator;
import hex.genmodel.algos.tree.*;
import java.io.Closeable;
import java.util.Arrays;
import static hex.genmodel.algos.tree.SharedTreeMojoModel.treeName;
/**
* "Gradient Boosting Machine" MojoModel
*/
public abstract class XGBoostMojoModel extends MojoModel implements TreeBackedMojoModel, SharedTreeGraphConverter, CalibrationMojoHelper.MojoModelWithCalibration, Closeable {
private static final String SPACE = " ";
public enum ObjectiveType {
BINARY_LOGISTIC("binary:logistic"),
REG_GAMMA("reg:gamma"),
REG_TWEEDIE("reg:tweedie"),
COUNT_POISSON("count:poisson"),
REG_SQUAREDERROR("reg:squarederror"),
@Deprecated REG_LINEAR("reg:linear"), // deprectated in favour of REG_SQUAREDERROR
MULTI_SOFTPROB("multi:softprob"),
RANK_PAIRWISE("rank:pairwise");
private String _id;
ObjectiveType(String id) {
_id = id;
}
public String getId() {
return _id;
}
public static ObjectiveType fromXGBoost(String type) {
for (ObjectiveType t : ObjectiveType.values())
if (t.getId().equals(type))
return t;
return null;
}
}
public String _boosterType;
public int _ntrees;
public int _nums;
public int _cats;
public int[] _catOffsets;
public boolean _useAllFactorLevels;
public boolean _sparse;
public String _featureMap;
public boolean _hasOffset;
/**
* GLM's beta used for calibrating output probabilities using Platt Scaling.
*/
protected double[] _calib_glm_beta;
/**
* For calibrating using Isotonic Regression
*/
protected IsotonicCalibrator _isotonic_calibrator;
public XGBoostMojoModel(String[] columns, String[][] domains, String responseColumn) {
super(columns, domains, responseColumn);
}
// finalize MOJO initialization after all the fields are read
public void postReadInit() {}
@Override
public boolean requiresOffset() {
return _hasOffset;
}
@Override
public final double[] score0(double[] row, double[] preds) {
if (_hasOffset) {
throw new IllegalStateException("Model was trained with offset, use score0 with offset");
}
return score0(row, 0.0, preds);
}
// for float output
public static double[] toPreds(double in[], float[] out, double[] preds,
int nclasses, double[] priorClassDistrib, double defaultThreshold) {
if (nclasses > 2) {
for (int i = 0; i < out.length; ++i)
preds[1 + i] = out[i];
preds[0] = GenModel.getPrediction(preds, priorClassDistrib, in, defaultThreshold);
} else if (nclasses==2){
preds[1] = 1f - out[0];
preds[2] = out[0];
preds[0] = GenModel.getPrediction(preds, priorClassDistrib, in, defaultThreshold);
} else {
preds[0] = out[0];
}
return preds;
}
@Override
public int getNTreeGroups() {
return _ntrees;
}
@Override
public int getNTreesPerGroup() {
return _nclasses > 2 ? _nclasses : 1;
}
@Override
public double[] getCalibGlmBeta() {
return _calib_glm_beta;
}
@Override
public IsotonicCalibrator getIsotonicCalibrator() {
return _isotonic_calibrator;
}
@Override
public boolean calibrateClassProbabilities(double[] preds) {
return CalibrationMojoHelper.calibrateClassProbabilities(this, preds);
}
protected void constructSubgraph(final RegTreeNode[] xgBoostNodes, final RegTreeNodeStat[] nodeStats,
final SharedTreeNode sharedTreeNode,
final int nodeIndex, final SharedTreeSubgraph sharedTreeSubgraph,
final boolean[] oneHotEncodedMap, final boolean inclusiveNA, final String[] features) {
final RegTreeNode xgBoostNode = xgBoostNodes[nodeIndex];
// Not testing for NaNs, as SharedTreeNode uses NaNs as default values.
//No domain set, as the structure mimics XGBoost's tree, which is numeric-only
if (oneHotEncodedMap[xgBoostNode.getSplitIndex()]) {
//Shared tree model uses < to the left and >= to the right. Transforiming one-hot encoded categoricals
// from 0 to 1 makes it fit the current split description logic
sharedTreeNode.setSplitValue(1.0F);
} else {
sharedTreeNode.setSplitValue(xgBoostNode.getSplitCondition());
}
sharedTreeNode.setPredValue(xgBoostNode.getLeafValue());
sharedTreeNode.setCol(xgBoostNode.getSplitIndex(), features[xgBoostNode.getSplitIndex()].split(SPACE)[1]);
sharedTreeNode.setInclusiveNa(inclusiveNA);
sharedTreeNode.setNodeNumber(nodeIndex);
sharedTreeNode.setWeight(nodeStats[nodeIndex].getWeight());
if (xgBoostNode.getLeftChildIndex() != -1) {
constructSubgraph(xgBoostNodes, nodeStats, sharedTreeSubgraph.makeLeftChildNode(sharedTreeNode),
xgBoostNode.getLeftChildIndex(), sharedTreeSubgraph, oneHotEncodedMap, xgBoostNode.default_left(),
features);
}
if (xgBoostNode.getRightChildIndex() != -1) {
constructSubgraph(xgBoostNodes, nodeStats, sharedTreeSubgraph.makeRightChildNode(sharedTreeNode),
xgBoostNode.getRightChildIndex(), sharedTreeSubgraph, oneHotEncodedMap, !xgBoostNode.default_left(),
features);
}
}
private String[] constructFeatureMap() {
final String[] featureMapTokens = _featureMap.split("\n");
// There might be an empty line after "\n", this part avoids parsing empty token(s) at the end
int nonEmptyTokenRange = featureMapTokens.length;
for (int i = 0; i < featureMapTokens.length; i++) {
if (featureMapTokens[i].trim().isEmpty()) {
nonEmptyTokenRange = i + 1;
break;
}
}
return Arrays.copyOfRange(featureMapTokens, 0, nonEmptyTokenRange);
}
protected boolean[] markOneHotEncodedCategoricals(final String[] featureMap) {
final int numColumns = featureMap.length;
int numCatCols = -1;
for (int i = 0; i < featureMap.length;i++) {
final String[] s = featureMap[i].split(SPACE);
assert s.length >= 3; // There should be at least three tokens, the third token is feature type (int, categorical etc).
if(!s[2].equals("i")){
numCatCols = i;
break;
}
}
if (numCatCols == -1) {
numCatCols = featureMap.length;
}
boolean[] categorical = new boolean[numColumns];
for (int i = 0; i < numColumns; ++i) {
if (i < numCatCols) {
categorical[i] = true;
}
}
return categorical;
}
/**
* Converts a given XGBoost tree (or a collection of trees) to a common tree representation
*
* @param booster tree based booster
* @param treeToPrint index of tree to convert or -1 if all trees should be converted
* @return instance of SharedTreeGraph
*/
SharedTreeGraph computeGraph(final GradBooster booster, final int treeToPrint) {
if (!(booster instanceof GBTree)) {
throw new IllegalArgumentException(String.format("Given XGBoost model is not backed by a tree-based booster. Booster class is %s",
booster.getClass().getCanonicalName()));
}
final int ntreeGroups = getNTreeGroups();
final int ntreePerGroup = getNTreesPerGroup();
if (treeToPrint >= ntreeGroups) {
throw new IllegalArgumentException("Tree " + treeToPrint + " does not exist (max " + ntreeGroups + ")");
}
final String[] features = constructFeatureMap();
final boolean[] oneHotEncodedMap = markOneHotEncodedCategoricals(features); // XGBoost's usage of one-hot encoding assumed
final RegTree[][] treesAndClasses = ((GBTree) booster).getGroupedTrees();
final SharedTreeGraph g = new SharedTreeGraph();
for (int j = Math.max(treeToPrint, 0); j < ntreeGroups; j++) {
for (int i = 0; i < ntreePerGroup; i++) {
if (j >= treesAndClasses[i].length || treesAndClasses[i][j] == null)
continue; // tree doesn't exist for the given class (in multiclass some can be missing)
RegTreeNode[] treeNodes = treesAndClasses[i][j].getNodes();
RegTreeNodeStat[] nodeStats = treesAndClasses[i][j].getStats();
assert treeNodes.length >= 1;
String[] domainValues = isSupervised() ? getDomainValues(getResponseIdx()) : null;
String treeName = treeName(j, i, domainValues);
SharedTreeSubgraph sg = g.makeSubgraph(treeName);
constructSubgraph(treeNodes, nodeStats, sg.makeRootNode(), 0, sg, oneHotEncodedMap,
true, features); // Root node is at index 0
}
if (treeToPrint >= 0)
break;
}
return g;
}
@Override
public SharedTreeGraph convert(int treeNumber, String treeClass, ConvertTreeOptions options) {
return convert(treeNumber, treeClass); // no use for options as of now
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel-ext-xgboost/3.46.0.7/hex/genmodel/algos
|
java-sources/ai/h2o/h2o-genmodel-ext-xgboost/3.46.0.7/hex/genmodel/algos/xgboost/XGBoostMojoReader.java
|
package hex.genmodel.algos.xgboost;
import com.google.gson.JsonObject;
import hex.genmodel.ModelMojoReader;
import hex.genmodel.attributes.ModelJsonReader;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
public class XGBoostMojoReader extends ModelMojoReader<XGBoostMojoModel> {
@Override
public String getModelName() {
return "XGBoost";
}
@Override
protected void readModelData() throws IOException {
_model._boosterType = readkv("booster");
_model._ntrees = readkv("ntrees", 0);
_model._nums = readkv("nums");
_model._cats = readkv("cats");
_model._catOffsets = readkv("cat_offsets");
_model._useAllFactorLevels = readkv("use_all_factor_levels");
_model._sparse = readkv("sparse");
if (exists("feature_map")) {
_model._featureMap = new String(readblob("feature_map"), StandardCharsets.UTF_8);
}
// Calibration
String calibMethod = readkv("calib_method");
if (calibMethod != null) {
switch (calibMethod) {
case "platt":
_model._calib_glm_beta = readkv("calib_glm_beta", new double[0]);
break;
case "isotonic":
_model._isotonic_calibrator = readIsotonicCalibrator();
break;
default:
throw new IllegalStateException("Unknown calibration method: " + calibMethod);
}
}
_model._hasOffset = readkv("has_offset", false);
_model.postReadInit();
}
@Override
protected XGBoostMojoModel makeModel(String[] columns, String[][] domains, String responseColumn) {
byte[] boosterBytes;
byte[] auxNodeWeights = null;
try {
boosterBytes = readblob("boosterBytes");
if (exists("auxNodeWeights"))
auxNodeWeights = readblob("auxNodeWeights");
} catch (IOException e) {
throw new IllegalStateException("MOJO is corrupted: cannot read the serialized Booster", e);
}
return new XGBoostJavaMojoModel(boosterBytes, auxNodeWeights, columns, domains, responseColumn, false);
}
@Override public String mojoVersion() {
return "1.10";
}
@Override
protected XGBoostModelAttributes readModelSpecificAttributes() {
final JsonObject modelJson = ModelJsonReader.parseModelJson(_reader);
if(modelJson != null) {
return new XGBoostModelAttributes(modelJson, _model);
} else {
return null;
}
}
}
|
0
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water/hive/DelegationTokenPrinter.java
|
package water.hive;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.Date;
import java.util.Objects;
public class DelegationTokenPrinter {
private static class PrintingDelegationTokenIdentifier extends AbstractDelegationTokenIdentifier {
public Text getKind() {
return new Text("PRINT");
}
}
public static void printToken(final String tokenString) {
Objects.requireNonNull(tokenString);
try {
final Token<DelegationTokenIdentifier> token = new Token<>();
token.decodeFromUrlString(tokenString);
final AbstractDelegationTokenIdentifier identifier = new PrintingDelegationTokenIdentifier();
identifier.readFields(new DataInputStream(new ByteArrayInputStream(token.getIdentifier())));
System.out.println(
"token.kind: " + token.getKind() + ", " +
"token.service: " + token.getService() + ", " +
"id.owner: " + identifier.getOwner() + ", " +
"id.renewer: " + identifier.getRenewer() + ", " +
"id.realUser: " + identifier.getRealUser() + ", " +
"id.issueDate: " + identifier.getIssueDate() + " (" + new Date(identifier.getIssueDate()) + "), " +
"id.maxDate: " + identifier.getMaxDate() + " (" + new Date(identifier.getMaxDate()) + "), " +
"id.validity: " + (identifier.getMaxDate() - System.currentTimeMillis()) / 3600_000 + " hours");
} catch (IOException e) {
System.out.println("Failed to decode token, no debug information will be displayed, cause:" + e.getMessage());
}
}
}
|
0
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water/hive/DelegationTokenRefresher.java
|
package water.hive;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import water.H2O;
import water.MRTask;
import water.Paxos;
import water.persist.security.HdfsDelegationTokenRefresher;
import water.util.BinaryFileTransfer;
import water.util.FileUtils;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.IOException;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
public class DelegationTokenRefresher implements Runnable {
public static final String H2O_AUTH_USER = "h2o.auth.user";
public static final String H2O_AUTH_PRINCIPAL = "h2o.auth.principal";
public static final String H2O_AUTH_KEYTAB = "h2o.auth.keytab";
public static final String H2O_HIVE_USE_KEYTAB = "h2o.hive.useKeytab";
public static final String H2O_HIVE_JDBC_URL_PATTERN = "h2o.hive.jdbc.urlPattern";
public static final String H2O_HIVE_HOST = "h2o.hive.jdbc.host";
public static final String H2O_HIVE_PRINCIPAL = "h2o.hive.principal";
public static final String H2O_HIVE_TOKEN = "h2o.hive.token";
public static void setup(Configuration conf, String tmpDir) throws IOException {
if (!HiveTokenGenerator.isHiveDriverPresent()) {
return;
}
String token = conf.get(H2O_HIVE_TOKEN);
if (token != null) {
log("Adding credentials from property", null);
Credentials creds = HiveTokenGenerator.tokenToCredentials(token);
UserGroupInformation.getCurrentUser().addCredentials(creds);
}
String authUser = conf.get(H2O_AUTH_USER);
String authPrincipal = conf.get(H2O_AUTH_PRINCIPAL);
boolean useKeytab = conf.getBoolean(H2O_HIVE_USE_KEYTAB, true);
String authKeytab = useKeytab ? conf.get(H2O_AUTH_KEYTAB) : null;
String hivePrincipal = conf.get(H2O_HIVE_PRINCIPAL);
String hiveJdbcUrl = makeHiveJdbcUrl(conf, hivePrincipal, authKeytab != null);
if (hiveJdbcUrl != null) {
String authKeytabPath;
if (authKeytab != null) {
authKeytabPath = writeKeytabToFile(authKeytab, tmpDir);
} else {
authKeytabPath = null;
}
new DelegationTokenRefresher(authPrincipal, authKeytabPath, authUser, hiveJdbcUrl, hivePrincipal).start();
} else {
log("Delegation token refresh not active.", null);
}
}
private static String makeHiveJdbcUrl(Configuration conf, String hivePrincipal, boolean useKeytab) {
String hiveJdbcUrlPattern = conf.get(H2O_HIVE_JDBC_URL_PATTERN);
String hiveHost = conf.get(H2O_HIVE_HOST);
final String hiveJdbcUrl;
if (useKeytab) {
hiveJdbcUrl = HiveTokenGenerator.makeHivePrincipalJdbcUrl(hiveJdbcUrlPattern, hiveHost, hivePrincipal);
} else {
hiveJdbcUrl = HiveTokenGenerator.makeHiveDelegationTokenJdbcUrl(hiveJdbcUrlPattern, hiveHost);
}
return hiveJdbcUrl;
}
public static boolean startRefresher(Configuration conf, H2O.OptArgs args) {
final String authKeytabPath = args.keytab_path;
final String authPrincipal = args.principal;
final String hivePrincipal = conf.get(H2O_HIVE_PRINCIPAL);
final String hiveJdbcUrl = makeHiveJdbcUrl(conf, hivePrincipal, authKeytabPath != null);
if (hiveJdbcUrl == null)
return false;
new DelegationTokenRefresher(authPrincipal, authKeytabPath, null, hiveJdbcUrl, hivePrincipal)
.start();
return true;
}
private static String writeKeytabToFile(String authKeytab, String tmpDir) throws IOException {
FileUtils.makeSureDirExists(tmpDir);
String fileName = tmpDir + File.separator + "auth_keytab";
byte[] byteArr = BinaryFileTransfer.convertStringToByteArr(authKeytab);
BinaryFileTransfer.writeBinaryFile(fileName, byteArr);
return fileName;
}
private final ScheduledExecutorService _executor = Executors.newSingleThreadScheduledExecutor(
new ThreadFactoryBuilder().setDaemon(true).setNameFormat("delegation-token-refresher-%d").build()
);
private final String _authPrincipal;
private final String _authKeytabPath;
private final String _authUser;
private final String _hiveJdbcUrl;
private final String _hivePrincipal;
private final HiveTokenGenerator _hiveTokenGenerator = new HiveTokenGenerator();
public DelegationTokenRefresher(
String authPrincipal,
String authKeytabPath,
String authUser,
String hiveJdbcUrl,
String hivePrincipal
) {
this._authPrincipal = authPrincipal;
this._authKeytabPath = authKeytabPath;
this._authUser = authUser;
this._hiveJdbcUrl = hiveJdbcUrl;
this._hivePrincipal = hivePrincipal;
}
public void start() {
_executor.scheduleAtFixedRate(this, 0, 1, TimeUnit.MINUTES);
}
private static void log(String s, Exception e) {
System.out.println("TOKEN REFRESH: " + s);
if (e != null) {
e.printStackTrace(System.out);
}
}
@Override
public void run() {
if (Paxos._cloudLocked && !(H2O.CLOUD.leader() == H2O.SELF)) {
// cloud is formed the leader will take of subsequent refreshes
_executor.shutdown();
return;
}
try {
refreshTokens();
} catch (IOException | InterruptedException e) {
log("Failed to refresh token.", e);
}
}
private static class DistributeCreds extends MRTask<DistributeCreds> {
private final byte[] _credsSerialized;
private DistributeCreds(byte[] credsSerialized) {
this._credsSerialized = credsSerialized;
}
@Override
protected void setupLocal() {
try {
Credentials creds = deserialize();
log("Updating credentials", null);
UserGroupInformation.getCurrentUser().addCredentials(creds);
} catch (IOException e) {
log("Failed to update credentials", e);
}
}
private Credentials deserialize() throws IOException {
ByteArrayInputStream tokensBuf = new ByteArrayInputStream(_credsSerialized);
Credentials creds = new Credentials();
creds.readTokenStorageStream(new DataInputStream(tokensBuf));
return creds;
}
}
private void distribute(Credentials creds) throws IOException {
if (!Paxos._cloudLocked) {
// skip token distribution in pre-cloud forming phase, only use credentials locally
log("Updating credentials", null);
UserGroupInformation.getCurrentUser().addCredentials(creds);
} else {
byte[] credsSerialized = serializeCreds(creds);
new DistributeCreds(credsSerialized).doAllNodes();
}
}
private void refreshTokens() throws IOException, InterruptedException {
String token;
if (_authKeytabPath != null) {
log("Log in from keytab as " + _authPrincipal, null);
UserGroupInformation realUser = UserGroupInformation.loginUserFromKeytabAndReturnUGI(_authPrincipal, _authKeytabPath);
UserGroupInformation tokenUser = realUser;
if (_authUser != null) {
log("Impersonate " + _authUser, null);
// attempt to impersonate token user, this verifies that the real-user is able to impersonate tokenUser
tokenUser = UserGroupInformation.createProxyUser(_authUser, tokenUser);
}
token = _hiveTokenGenerator.getHiveDelegationTokenAsUser(realUser, tokenUser, _hiveJdbcUrl, _hivePrincipal);
} else {
UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
token = _hiveTokenGenerator.getHiveDelegationTokenIfPossible(currentUser, _hiveJdbcUrl, _hivePrincipal);
}
if (token != null) {
DelegationTokenPrinter.printToken(token);
Credentials creds = HiveTokenGenerator.tokenToCredentials(token);
distribute(creds);
} else {
log("Failed to refresh delegation token.", null);
}
}
private byte[] serializeCreds(Credentials creds) throws IOException {
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
DataOutputStream dataStream = new DataOutputStream(byteStream);
creds.writeTokenStorageToStream(dataStream);
return byteStream.toByteArray();
}
}
|
0
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water/hive/DirectHiveMetadata.java
|
package water.hive;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.thrift.TException;
import java.util.List;
import java.util.Map;
import static java.util.stream.Collectors.toList;
import static water.api.ImportHiveTableHandler.HiveTableImporter.DEFAULT_DATABASE;
public class DirectHiveMetadata implements HiveMetaData {
private final String database;
public DirectHiveMetadata(String database) {
if (database == null || database.isEmpty()) {
this.database = DEFAULT_DATABASE;
} else {
this.database = database;
}
}
static class HivePartition implements Partition {
private final org.apache.hadoop.hive.metastore.api.Partition partition;
HivePartition(org.apache.hadoop.hive.metastore.api.Partition partition) {
this.partition = partition;
}
@Override
public List<String> getValues() {
return partition.getValues();
}
@Override
public Map<String, String> getSerDeParams() {
return partition.getSd().getSerdeInfo().getParameters();
}
@Override
public String getLocation() {
return partition.getSd().getLocation();
}
@Override
public String getSerializationLib() {
return partition.getSd().getSerdeInfo().getSerializationLib();
}
@Override
public String getInputFormat() {
return partition.getSd().getInputFormat();
}
}
static class HiveColumn implements Column {
private final FieldSchema column;
HiveColumn(FieldSchema column) {
this.column = column;
}
@Override
public String getName() {
return column.getName();
}
@Override
public String getType() {
return column.getType();
}
}
static class HiveTable implements Table {
private final org.apache.hadoop.hive.metastore.api.Table table;
private final List<Partition> partitions;
private final List<Column> columns;
private final List<Column> partitionKeys;
HiveTable(org.apache.hadoop.hive.metastore.api.Table table, List<org.apache.hadoop.hive.metastore.api.Partition> parts) {
this.table = table;
this.partitions = parts.stream().map(HivePartition::new).collect(toList());
this.columns = table.getSd().getCols().stream().map(HiveColumn::new).collect(toList());
this.partitionKeys = table.getPartitionKeys().stream().map(HiveColumn::new).collect(toList());
}
@Override
public String getName() {
return table.getTableName();
}
@Override
public boolean hasPartitions() {
return !partitions.isEmpty();
}
@Override
public List<Partition> getPartitions() {
return partitions;
}
@Override
public List<Column> getColumns() {
return columns;
}
@Override
public Map<String, String> getSerDeParams() {
return table.getSd().getSerdeInfo().getParameters();
}
@Override
public String getLocation() {
return table.getSd().getLocation();
}
@Override
public String getSerializationLib() {
return table.getSd().getSerdeInfo().getSerializationLib();
}
@Override
public String getInputFormat() {
return table.getSd().getInputFormat();
}
@Override
public List<Column> getPartitionKeys() {
return partitionKeys;
}
}
@Override
public Table getTable(String tableName) throws TException {
Configuration conf = new Configuration();
HiveConf hiveConf = new HiveConf(conf, HiveTableImporterImpl.class);
HiveMetaStoreClient client = new HiveMetaStoreClient(hiveConf);
org.apache.hadoop.hive.metastore.api.Table table = client.getTable(database, tableName);
List<org.apache.hadoop.hive.metastore.api.Partition> partitions = client.listPartitions(database, tableName, Short.MAX_VALUE);
return new HiveTable(table, partitions);
}
}
|
0
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water/hive/FrameParquetWriter.java
|
package water.hive;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.GenericRecordBuilder;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.avro.AvroParquetWriter;
import org.apache.parquet.hadoop.ParquetWriter;
import water.H2O;
import water.Key;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
import water.parser.BufferedString;
import water.persist.PersistHdfs;
import water.util.PrettyPrint;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class FrameParquetWriter {
public void write(Frame frame, String fileName) throws IOException {
Schema schema = makeSchema(frame);
try (ParquetWriter<GenericRecord> writer = openWriter(fileName, schema)) {
Chunk[] chunks = new Chunk[frame.numCols()];
BufferedString tmpStr = new BufferedString();
for (int cidx = 0; cidx < frame.anyVec().nChunks(); cidx++) {
for (int col = 0; col < frame.numCols(); col++) {
chunks[col] = frame.vec(col).chunkForChunkIdx(cidx);
}
for (int crow = 0; crow < chunks[0].len(); crow++) {
GenericRecordBuilder builder = new GenericRecordBuilder(schema);
for (int col = 0; col < frame.numCols(); col++) {
builder.set(frame.name(col), getValue(chunks[col], crow, tmpStr));
}
writer.write(builder.build());
}
for (int col = 0; col < frame.numCols(); col++) {
Key chunkKey = chunks[col].vec().chunkKey(cidx);
if (!chunkKey.home()) {
H2O.raw_remove(chunkKey);
}
}
}
}
}
private Object getValue(Chunk chunk, int crow, BufferedString tmpStr) {
Vec v = chunk.vec();
if (!chunk.isNA(crow)) {
if (v.isCategorical()) {
return chunk.vec().domain()[(int) chunk.at8(crow)];
} else if (v.isUUID()) {
return PrettyPrint.UUID(chunk.at16l(crow), chunk.at16h(crow));
} else if (v.isInt()) {
return chunk.at8(crow);
} else if (v.isString()) {
return chunk.atStr(tmpStr, crow).toString();
} else {
return chunk.atd(crow);
}
} else {
return null;
}
}
private ParquetWriter<GenericRecord> openWriter(String fileName, Schema schema) throws IOException {
return AvroParquetWriter.<GenericRecord>builder(new Path(fileName))
.withSchema(schema)
.withConf(PersistHdfs.CONF)
.build();
}
private Schema makeSchema(Frame frame) {
List<Schema.Field> fields = new ArrayList<>();
for (int cidx = 0; cidx < frame.numCols(); cidx++) {
fields.add(new Schema.Field(
frame.name(cidx),
getColumnType(frame.vec(cidx)),
null,
null
));
}
Schema schema = Schema.createRecord("h2o_frame", null, null, false);
schema.setFields(fields);
return schema;
}
private Schema getColumnType(Vec v) {
Schema type;
if (v.isCategorical() || v.isUUID() || v.isString()) {
type = Schema.create(Schema.Type.STRING);
} else if (v.isInt()) {
type = Schema.create(Schema.Type.LONG);
} else {
type = Schema.create(Schema.Type.DOUBLE);
}
return Schema.createUnion(Arrays.asList(type, Schema.create(Schema.Type.NULL)));
}
}
|
0
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water/hive/GenerateHiveToken.java
|
package water.hive;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import java.io.IOException;
import java.io.PrintWriter;
public class GenerateHiveToken extends Configured implements Tool {
private String runAsUser = null;
private String principal = null;
private String keytabPath = null;
private String hiveJdbcUrlPattern = null;
private String hiveHost = null;
private String hivePrincipal = null;
private String tokenFile = null;
public static void main(String[] args) throws Exception {
int exitCode = ToolRunner.run(new GenerateHiveToken(), args);
System.exit(exitCode);
}
private void usage() {
System.out.println("Usage:");
System.exit(1);
}
private void parseArgs(String[] args) {
int i = 0;
while (i < args.length) {
String s = args[i];
if (s.equals("-run_as_user")) {
i++;
if (i >= args.length) {
usage();
}
runAsUser = args[i];
} else if (s.equals("-principal")) {
i++;
if (i >= args.length) {
usage();
}
principal = args[i];
} else if (s.equals("-keytab")) {
i++;
if (i >= args.length) {
usage();
}
keytabPath = args[i];
} else if (s.equals("-hiveJdbcUrlPattern")) {
i++;
if (i >= args.length) {
usage();
}
hiveJdbcUrlPattern = args[i];
} else if (s.equals("-hiveHost")) {
i++;
if (i >= args.length) {
usage();
}
hiveHost = args[i];
} else if (s.equals("-hivePrincipal")) {
i++;
if (i >= args.length) {
usage();
}
hivePrincipal = args[i];
} else if (s.equals("-tokenFile")) {
i++;
if (i >= args.length) {
usage();
}
tokenFile = args[i];
} else {
System.err.println("Unrecognized option " + s);
System.exit(1);
}
i++;
}
}
private void validateArgs() {
ImpersonationUtils.validateImpersonationArgs(principal, keytabPath, runAsUser, this::error, this::warning);
if (hivePrincipal == null) {
error("hive principal name is required (use the '-hivePrincipal' option)");
}
if (hiveHost == null && hiveJdbcUrlPattern == null) {
error("delegation token generator requires Hive host or JDBC URL to be set (use the '-hiveHost' or '-hiveJdbcUrlPattern' option)");
}
if (tokenFile == null) {
error("token file path required (use the '-tokenFile' option)");
}
if (!HiveTokenGenerator.isHiveDriverPresent()) {
error("Hive JDBC driver not available on class-path");
}
}
@Override
public int run(String[] args) throws IOException, InterruptedException {
parseArgs(args);
validateArgs();
ImpersonationUtils.impersonate(getConf(), principal, keytabPath, runAsUser);
String token = HiveTokenGenerator.getHiveDelegationTokenIfHivePresent(hiveJdbcUrlPattern, hiveHost, hivePrincipal);
if (token != null) {
DelegationTokenPrinter.printToken(token);
System.out.println("Token generated, writing into file " + tokenFile);
try (PrintWriter pw = new PrintWriter(tokenFile)) {
pw.print(token);
}
return 0;
} else {
System.out.println("No token generated.");
return 1;
}
}
private void error(String s) {
System.err.printf("\nERROR: " + "%s\n\n", s);
usage();
}
private void warning(String s) {
System.err.printf("\nWARNING: " + "%s\n\n", s);
}
}
|
0
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water/hive/HiveComponent.java
|
package water.hive;
import org.apache.hadoop.conf.Configuration;
import water.H2O;
import water.init.StandaloneKerberosComponent;
public class HiveComponent implements StandaloneKerberosComponent {
@Override
public String name() {
return "SecuredHive";
}
@Override
public int priority() {
return 1000;
}
@Override
public boolean initComponent(Object conf, H2O.OptArgs args) {
return DelegationTokenRefresher.startRefresher((Configuration) conf, args);
}
}
|
0
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water/hive/HiveFrameSaverImpl.java
|
package water.hive;
import hex.genmodel.utils.IOUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;
import water.AbstractH2OExtension;
import water.H2O;
import water.Key;
import water.api.SaveToHiveTableHandler;
import water.fvec.Frame;
import water.fvec.Vec;
import water.jdbc.SQLManager;
import water.persist.Persist;
import water.persist.PersistHdfs;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.UUID;
public class HiveFrameSaverImpl extends AbstractH2OExtension implements SaveToHiveTableHandler.HiveFrameSaver {
private static final Logger LOG = Logger.getLogger(HiveTableImporterImpl.class);
private static final String SQL_DESCRIBE_TABLE = "DESCRIBE %s";
@Override
public String getExtensionName() {
return NAME;
}
@Override
public void saveFrameToHive(
Key<Frame> frameKey,
String jdbcUrl,
String tableName,
Format format,
String configuredTablePath,
String configuredTmpPath
) {
String filePath = null;
try {
String tmpPath = determineTmpPath(configuredTmpPath);
String storagePath = addHdfsPrefixToPath(configuredTablePath);
filePath = new Path(tmpPath, getRandomFileName(format)).toString();
LOG.info("Save frame " + frameKey + " to table " + tableName + " in " + jdbcUrl);
Frame frame = frameKey.get();
if (frame == null) {
throw new IllegalArgumentException("Frame with key " + frameKey + " not found.");
}
writeFrameToHdfs(frame, filePath, format);
loadDataIntoTable(jdbcUrl, tableName, storagePath, frame, filePath, format);
} catch (IOException e) {
throw new RuntimeException("Writing to Hive failed: " + e.getMessage(), e);
} finally {
if (filePath != null) safelyRemoveDataFile(filePath);
}
}
private String determineTmpPath(String configuredTmpPath) throws IOException {
if (configuredTmpPath == null) {
FileSystem fs = FileSystem.get(PersistHdfs.CONF);
String res = fs.getUri().toString() + "/tmp";
LOG.info("Using default temporary directory " + res);
return res;
} else {
return addHdfsPrefixToPath(configuredTmpPath);
}
}
private String addHdfsPrefixToPath(String path) throws IOException {
if (path == null) {
return null;
} else if (!path.startsWith("hdfs://")) {
FileSystem fs = FileSystem.get(PersistHdfs.CONF);
String res = fs.getUri().toString() + "/" + path;
LOG.info("Adding file system prefix to relative tmp_path " + res);
return res;
} else {
return path;
}
}
private String getRandomFileName(Format format) {
return "h2o_save_to_hive_" + UUID.randomUUID().toString() + "." + format.toString().toLowerCase();
}
private void safelyRemoveDataFile(String filePath) {
try {
Persist p = H2O.getPM().getPersistForURI(URI.create(filePath));
if (p.exists(filePath)) {
p.delete(filePath);
} else {
LOG.debug("Data file moved by Hive, doing nothing.");
}
} catch (Exception e) {
LOG.error("Failed cleaning up data file.", e);
}
}
private void writeFrameToHdfs(Frame frame, String filePath, Format format) throws IOException {
switch (format) {
case CSV:
writeFrameAsCsv(frame, filePath);
break;
case PARQUET:
writeFrameAsParquet(frame, filePath);
break;
default:
throw new IllegalArgumentException("Unsupported table format " + format);
}
}
private void writeFrameAsParquet(Frame frame, String filePath) throws IOException {
new FrameParquetWriter().write(frame, filePath);
}
private void writeFrameAsCsv(Frame f, String filePath) throws IOException {
Persist p = H2O.getPM().getPersistForURI(URI.create(filePath));
try (OutputStream os = p.create(filePath, false)) {
Frame.CSVStreamParams parms = new Frame.CSVStreamParams()
.setHeaders(false)
.setEscapeQuotes(true)
.setEscapeChar('\\');
InputStream is = f.toCSV(parms);
IOUtils.copyStream(is, os);
}
}
private void loadDataIntoTable(
String url,
String table,
String tablePath,
Frame frame,
String filePath,
Format format
) throws IOException {
try (Connection conn = SQLManager.getConnectionSafe(url, null, null)) {
if (!doesTableExist(conn, table)) {
createTable(conn, table, tablePath, frame, format);
} else {
throw new IllegalArgumentException("Table " + table + " already exists.");
}
executeDataLoad(conn, table, filePath);
} catch (SQLException e) {
throw new IOException("Failed to load data into Hive table: " + e.getMessage(), e);
}
}
private boolean doesTableExist(Connection conn, String table) {
try (Statement stmt = conn.createStatement()) {
try (ResultSet rs = stmt.executeQuery(String.format(SQL_DESCRIBE_TABLE, table))) {
return rs.next();
}
} catch (SQLException e) {
return false;
}
}
private void createTable(Connection conn, String table, String tablePath, Frame frame, Format format) throws SQLException {
try (Statement stmt = conn.createStatement()) {
String createQuery = makeCreateTableStatement(table, tablePath, frame, format);
LOG.info("Creating Hive table " + table + " with SQL: " + createQuery);
stmt.execute(createQuery);
}
}
private String makeCreateTableStatement(String table, String tablePath, Frame frame, Format format) {
StringBuilder sb = new StringBuilder();
sb.append("CREATE ");
if (tablePath != null) {
sb.append("EXTERNAL ");
}
sb.append("TABLE ").append(table).append(" (");
switch (format) {
case CSV:
makeCreateCSVTableStatement(sb, frame);
break;
case PARQUET:
makeCreateParquetTableStatement(sb, frame);
break;
default:
throw new IllegalArgumentException("Unsupported table format " + format);
}
if (tablePath != null) {
sb.append("\nLOCATION '").append(tablePath).append("'");
}
return sb.toString();
}
private void makeCreateCSVTableStatement(StringBuilder sb, Frame frame) {
for (int i = 0; i < frame.numCols(); i++) {
if (i > 0) sb.append(",\n");
sb.append(frame.name(i)).append(" string");
}
sb.append(") ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'\n")
.append("WITH SERDEPROPERTIES (\n")
.append(" \"separatorChar\" = \",\",\n")
.append(" \"quoteChar\" = \"\\\"\",\n")
.append(" \"escapeChar\" = \"\\\\\") STORED AS TEXTFILE");
}
private void makeCreateParquetTableStatement(StringBuilder sb, Frame frame) {
for (int i = 0; i < frame.numCols(); i++) {
if (i > 0) sb.append(",\n");
sb.append(frame.name(i)).append(" ").append(sqlDataType(frame.vec(i)));
}
sb.append(") STORED AS parquet");
}
private String sqlDataType(Vec v) {
if (v.isCategorical() || v.isUUID() || v.isString()) {
return "STRING";
} else if (v.isInt()) {
return "BIGINT";
} else {
return "DOUBLE";
}
}
private void executeDataLoad(Connection conn, String table, String filePath) throws SQLException {
try (Statement stmt = conn.createStatement()) {
LOG.info("Loading data file " + filePath + " into table " + table);
stmt.execute("LOAD DATA INPATH '" + filePath + "' OVERWRITE INTO TABLE " + table);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water/hive/HiveMetaData.java
|
package water.hive;
import java.util.List;
import java.util.Map;
public interface HiveMetaData {
Table getTable(String name) throws Exception;
interface Storable {
Map<String, String> getSerDeParams();
String getLocation();
String getSerializationLib();
String getInputFormat();
}
interface Table extends Storable {
String getName();
boolean hasPartitions();
List<Partition> getPartitions();
List<Column> getColumns();
List<Column> getPartitionKeys();
}
interface Column {
String getName();
String getType();
}
interface Partition extends Storable {
List<String> getValues();
}
}
|
0
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water/hive/HiveTableImporterImpl.java
|
package water.hive;
import org.apache.log4j.Logger;
import water.AbstractH2OExtension;
import water.H2O;
import water.Job;
import water.Key;
import water.api.ImportHiveTableHandler;
import water.fvec.Frame;
import water.fvec.Vec;
import water.parser.CsvParser;
import water.parser.ParseDataset;
import water.parser.ParseSetup;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static water.fvec.Vec.*;
import static water.parser.DefaultParserProviders.GUESS_INFO;
import static water.parser.ParseSetup.NO_HEADER;
@SuppressWarnings("unused") // called via reflection
public class HiveTableImporterImpl extends AbstractH2OExtension implements ImportHiveTableHandler.HiveTableImporter {
private static final Logger LOG = Logger.getLogger(HiveTableImporterImpl.class);
@Override
public String getExtensionName() {
return NAME;
}
private HiveMetaData getMetaDataClient(String database) {
if (database != null && database.startsWith("jdbc:")) {
return new JdbcHiveMetadata(database);
} else {
return new DirectHiveMetadata(database);
}
}
public Job<Frame> loadHiveTable(
String database,
String tableName,
String[][] partitionFilter,
boolean allowDifferentFormats
) throws Exception {
HiveMetaData.Table table = getMetaDataClient(database).getTable(tableName);
String targetFrame = "hive_table_" + tableName + Key.rand().substring(0, 10);
if (!table.hasPartitions()) {
return loadTable(table, targetFrame);
} else {
List<HiveMetaData.Partition> filteredPartitions = filterPartitions(table, partitionFilter);
if (arePartitionsSameFormat(table, filteredPartitions)) {
return loadPartitionsSameFormat(table, filteredPartitions, targetFrame);
} else if (allowDifferentFormats) {
return loadPartitions(table, filteredPartitions, targetFrame);
} else {
throw new IllegalArgumentException("Hive table contains partitions with differing formats. Use allow_multi_format if needed.");
}
}
}
private boolean arePartitionsSameFormat(HiveMetaData.Table table, List<HiveMetaData.Partition> partitions) {
String tableLib = table.getSerializationLib();
String tableInput = table.getInputFormat();
Map<String, String> tableParams = table.getSerDeParams();
for (HiveMetaData.Partition part : partitions) {
if (!tableLib.equals(part.getSerializationLib()) ||
!tableParams.equals(part.getSerDeParams()) ||
!tableInput.equals(part.getInputFormat())
) {
return false;
}
}
return true;
}
private List<HiveMetaData.Partition> filterPartitions(HiveMetaData.Table table, String[][] partitionFilter) {
if (partitionFilter == null || partitionFilter.length == 0) {
return table.getPartitions();
}
List<List<String>> filtersAsLists = new ArrayList<>(partitionFilter.length);
for (String[] f : partitionFilter) {
filtersAsLists.add(Arrays.asList(f));
}
List<HiveMetaData.Partition> matchedPartitions = new ArrayList<>(table.getPartitions().size());
for (HiveMetaData.Partition p : table.getPartitions()) {
for (List<String> filter : filtersAsLists) {
if (p.getValues().equals(filter)) {
matchedPartitions.add(p);
break;
}
}
}
if (matchedPartitions.isEmpty()) {
throw new IllegalArgumentException("Partition filter did not match any partitions.");
}
return matchedPartitions;
}
private byte getSeparator(HiveMetaData.Storable table) {
Map<String, String> serDeParams = table.getSerDeParams();
String explicitSeparator = serDeParams.get("field.delim"); // for basic TextFormat
if (explicitSeparator != null && !explicitSeparator.isEmpty()) {
return (byte) explicitSeparator.charAt(0);
}
explicitSeparator = serDeParams.get("separatorChar"); // for OpenCSV
if (explicitSeparator != null && !explicitSeparator.isEmpty()) {
return (byte) explicitSeparator.charAt(0);
} else {
return CsvParser.HIVE_SEP;
}
}
private ParseSetup guessTableSetup(Key[] filesKeys, HiveMetaData.Table table) {
ParseSetup setup = guessSetup(filesKeys, table);
List<HiveMetaData.Column> tableColumns = table.getColumns();
String[] columnNames = new String[tableColumns.size()];
byte[] columnTypes = new byte[tableColumns.size()];
fillColumnNamesAndTypes(tableColumns, columnNames, columnTypes);
setup.setColumnNames(columnNames);
setup.setColumnTypes(columnTypes);
setup.setNumberColumns(columnNames.length);
return setup;
}
private Job<Frame> parseTable(String targetFrame, Key[] filesKeys, ParseSetup setup) {
Key<Frame> destinationKey = Key.make(targetFrame);
ParseDataset parse = ParseDataset.parse(destinationKey, filesKeys, true, setup, false);
return parse._job;
}
private void checkTableNotEmpty(HiveMetaData.Table table, Key[] filesKeys) {
if (filesKeys.length == 0) {
throwTableEmpty(table);
}
}
private void throwTableEmpty(HiveMetaData.Table table) {
throw new IllegalArgumentException("Table " + table.getName() + " is empty. Nothing to import.");
}
private Job<Frame> loadTable(HiveMetaData.Table table, String targetFrame) {
Key[] filesKeys = importFiles(table.getLocation());
checkTableNotEmpty(table, filesKeys);
ParseSetup setup = guessTableSetup(filesKeys, table);
return parseTable(targetFrame, filesKeys, setup);
}
private Job<Frame> loadPartitionsSameFormat(HiveMetaData.Table table, List<HiveMetaData.Partition> partitions, String targetFrame) {
List<Key> fileKeysList = new ArrayList<>();
int keyCount = table.getPartitionKeys().size();
List<String[]> partitionValuesMap = new ArrayList<>();
for (HiveMetaData.Partition p : partitions) {
Key[] partFileKeys = importFiles(p.getLocation());
fileKeysList.addAll(Arrays.asList(partFileKeys));
String[] keyValues = p.getValues().toArray(new String[0]);
for (Key f : partFileKeys) {
partitionValuesMap.add(keyValues);
}
}
Key[] filesKeys = fileKeysList.toArray(new Key[0]);
checkTableNotEmpty(table, filesKeys);
ParseSetup setup = guessTableSetup(filesKeys, table);
String[] partitionKeys = new String[table.getPartitionKeys().size()];
for (int i = 0; i < table.getPartitionKeys().size(); i++) {
partitionKeys[i] = table.getPartitionKeys().get(i).getName();
}
setup.setSyntheticColumns(partitionKeys, partitionValuesMap.toArray(new String[0][]), T_STR);
return parseTable(targetFrame, filesKeys, setup);
}
private Job<Frame> loadPartitions(HiveMetaData.Table table, List<HiveMetaData.Partition> partitions, String targetFrame) {
List<HiveMetaData.Column> partitionColumns = table.getPartitionKeys();
List<HiveMetaData.Column> tableColumns = table.getColumns();
String[] columnNames = new String[tableColumns.size()];
byte[] columnTypes = new byte[columnNames.length];
fillColumnNamesAndTypes(tableColumns, columnNames, columnTypes);
List<Job<Frame>> parseJobs = new ArrayList<>(partitions.size());
for (int i = 0; i < partitions.size(); i++) {
String partitionKey = "_" + targetFrame + "_part_" + i;
Job<Frame> job = parsePartition(partitionColumns, partitions.get(i), partitionKey, columnNames, columnTypes);
if (job != null) {
parseJobs.add(job);
}
}
if (parseJobs.isEmpty()) {
throwTableEmpty(table);
}
Job<Frame> job = new Job<>(Key.<Frame>make(targetFrame), Frame.class.getName(),"ImportHiveTable");
PartitionFrameJoiner joiner = new PartitionFrameJoiner(job, table, partitions, targetFrame, parseJobs);
return job.start(joiner, partitions.size()+1);
}
private Job<Frame> parsePartition(List<HiveMetaData.Column> partitionColumns, HiveMetaData.Partition part, String targetFrame, String[] columnNames, byte[] columnTypes) {
Key[] files = importFiles(part.getLocation());
if (files.length == 0) {
return null;
}
ParseSetup setup = guessSetup(files, part);
setup.setColumnNames(columnNames);
setup.setColumnTypes(columnTypes);
setup.setNumberColumns(columnNames.length);
ParseDataset parse = ParseDataset.parse(Key.make(targetFrame), files, true, setup, false);
return parse._job;
}
private void fillColumnNamesAndTypes(List<HiveMetaData.Column> columns, String[] columnNames, byte[] columnTypes) {
for (int i = 0; i < columns.size(); i++) {
HiveMetaData.Column col = columns.get(i);
columnNames[i] = col.getName();
columnTypes[i] = convertHiveType(col.getType());
}
}
private ParseSetup guessSetup(Key[] keys, HiveMetaData.Storable sd) {
ParseSetup parseGuess = new ParseSetup();
parseGuess.setParseType(GUESS_INFO);
parseGuess.setSeparator(getSeparator(sd));
parseGuess.setCheckHeader(NO_HEADER); // TBLPROPERTIES "skip.header.line.count"="1" not supported in metastore API
return ParseSetup.guessSetup(keys, parseGuess);
}
private Key[] stringsToKeys(List<String> strings) {
Key[] keys = new Key[strings.size()];
for (int i = 0; i < keys.length; i++) {
keys[i] = Key.make(strings.get(i));
}
return keys;
}
private Key[] importFiles(String path) {
ArrayList<String> files = new ArrayList<>();
ArrayList<String> keys = new ArrayList<>();
ArrayList<String> fails = new ArrayList<>();
ArrayList<String> dels = new ArrayList<>();
H2O.getPM().importFiles(path, null, files, keys, fails, dels);
if (!fails.isEmpty()) {
throw new RuntimeException("Failed to import some files: " + fails.toString());
}
return stringsToKeys(keys);
}
private Set<String> parseColumnFilter(String filter) {
Set<String> columnNames = new HashSet<>();
for (String colName : filter.split(",")) {
columnNames.add(colName.trim());
}
return columnNames;
}
static byte convertHiveType(String hiveType) {
return convertHiveType(hiveType, false);
}
static byte convertHiveType(final String hiveType, final boolean strict) {
final String sanitized = sanitizeHiveType(hiveType);
switch (sanitized) {
case "tinyint":
case "smallint":
case "int":
case "bigint":
case "integer":
case "float":
case "double":
case "double precision":
case "decimal":
case "numeric":
return T_NUM;
case "timestamp":
case "data":
return T_TIME;
case "interval":
case "string":
case "varchar":
case "char":
case "binary": // binary could be a UTF8-encoded String (similar to what Parquet does)
return T_STR;
case "boolean":
return T_CAT;
default:
if (strict)
throw new IllegalArgumentException("Unsupported column type: " + hiveType);
else {
LOG.warn("Unrecognized Hive type '" + hiveType + "'. Using String type instead.");
return T_STR;
}
}
}
static String sanitizeHiveType(String type) {
int paramIdx = type.indexOf('(');
if (paramIdx >= 0) {
type = type.substring(0, paramIdx);
}
return type.trim().toLowerCase();
}
}
|
0
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water/hive/HiveTokenGenerator.java
|
package water.hive;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hive.jdbc.HiveConnection;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
public class HiveTokenGenerator {
private static final String HIVE_DRIVER_CLASS = "org.apache.hive.jdbc.HiveDriver";
private static boolean isPresent(String value) {
return value != null && !value.isEmpty();
}
public static String makeHivePrincipalJdbcUrl(String hiveJdbcUrlPattern, String hiveHost, String hivePrincipal) {
if (isPresent(hiveJdbcUrlPattern) && isPresent(hivePrincipal)) {
String result = hiveJdbcUrlPattern;
if (hiveHost != null) result = result.replace("{{host}}", hiveHost);
result = result.replace("{{auth}}", "principal=" + hivePrincipal);
return result;
} else if (isPresent(hiveHost) && isPresent(hivePrincipal)) {
return "jdbc:hive2://" + hiveHost + "/" + ";principal=" + hivePrincipal;
} else {
return null;
}
}
public static String makeHiveDelegationTokenJdbcUrl(String hiveJdbcUrlPattern, String hiveHost) {
if (isPresent(hiveJdbcUrlPattern)) {
String result = hiveJdbcUrlPattern;
if (hiveHost != null) result = result.replace("{{host}}", hiveHost);
result = result.replace("{{auth}}", "auth=delegationToken");
return result;
} else if (isPresent(hiveHost)) {
return "jdbc:hive2://" + hiveHost + "/" + ";auth=delegationToken";
} else
return null;
}
public static String getHiveDelegationTokenIfHivePresent(
String hiveJdbcUrlPattern, String hiveHost, String hivePrincipal
) throws IOException, InterruptedException {
if (isHiveDriverPresent()) {
final String hiveJdbcUrl = makeHivePrincipalJdbcUrl(hiveJdbcUrlPattern, hiveHost, hivePrincipal);
return new HiveTokenGenerator().getHiveDelegationToken(hiveJdbcUrl, hivePrincipal);
} else {
log("Hive driver not present, not generating token.", null);
return null;
}
}
public static boolean addHiveDelegationTokenIfHivePresent(
Job job, String hiveJdbcUrlPattern, String hiveHost, String hivePrincipal
) throws IOException, InterruptedException {
if (isHiveDriverPresent()) {
final String hiveJdbcUrl = makeHivePrincipalJdbcUrl(hiveJdbcUrlPattern, hiveHost, hivePrincipal);
return new HiveTokenGenerator().addHiveDelegationToken(job, hiveJdbcUrl, hivePrincipal);
} else {
log("Hive driver not present, not generating token.", null);
return false;
}
}
public boolean addHiveDelegationToken(
Job job,
String hiveJdbcUrl,
String hivePrincipal
) throws IOException, InterruptedException {
if (!isPresent(hiveJdbcUrl) || !isPresent(hivePrincipal)) {
log("Hive JDBC URL or principal not set, no token generated.", null);
return false;
}
String token = getHiveDelegationToken(hiveJdbcUrl, hivePrincipal);
if (token != null) {
DelegationTokenPrinter.printToken(token);
addHiveDelegationToken(job, token);
return true;
} else {
log("Failed to get delegation token.", null);
return false;
}
}
public static void addHiveDelegationToken(Job job, String token) throws IOException {
Credentials creds = tokenToCredentials(token);
job.getCredentials().addAll(creds);
}
private String getHiveDelegationToken(String hiveJdbcUrl, String hivePrincipal) throws IOException, InterruptedException {
UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
UserGroupInformation realUser = currentUser;
if (realUser.getRealUser() != null) {
realUser = realUser.getRealUser();
}
return getHiveDelegationTokenAsUser(realUser, currentUser, hiveJdbcUrl, hivePrincipal);
}
public String getHiveDelegationTokenAsUser(
UserGroupInformation realUser, final UserGroupInformation user, final String hiveJdbcUrl, final String hivePrincipal
) throws IOException, InterruptedException {
return realUser.doAs(new PrivilegedExceptionAction<String>() {
@Override
public String run() {
return getHiveDelegationTokenIfPossible(user, hiveJdbcUrl, hivePrincipal);
}
});
}
private static void log(String s, Exception e) {
System.out.println(s);
if (e != null) {
e.printStackTrace(System.out);
}
}
private String getDelegationTokenFromConnection(String hiveJdbcUrl, String hivePrincipal, String userName) {
if (!isHiveDriverPresent()) {
throw new IllegalStateException("Hive Driver not found");
}
try (Connection connection = DriverManager.getConnection(hiveJdbcUrl)) {
return ((HiveConnection) connection).getDelegationToken(userName, hivePrincipal);
} catch (SQLException e) {
log("Failed to get connection.", e);
return null;
}
}
public String getHiveDelegationTokenIfPossible(UserGroupInformation tokenUser, String hiveJdbcUrl, String hivePrincipal) {
if (!isHiveDriverPresent()) {
return null;
}
String tokenUserName = tokenUser.getShortUserName();
log("Getting delegation token from " + hiveJdbcUrl + ", " + tokenUserName, null);
return getDelegationTokenFromConnection(hiveJdbcUrl, hivePrincipal, tokenUserName);
}
public static Credentials tokenToCredentials(String tokenStr) throws IOException {
if (tokenStr != null) {
Token<DelegationTokenIdentifier> hive2Token = new Token<>();
hive2Token.decodeFromUrlString(tokenStr);
hive2Token.setService(new Text("hiveserver2ClientToken"));
Credentials creds = new Credentials();
creds.addToken(new Text("hive.server2.delegation.token"), hive2Token);
creds.addToken(new Text("hiveserver2ClientToken"), hive2Token); //HiveAuthConstants.HS2_CLIENT_TOKEN
return creds;
} else {
return null;
}
}
public static boolean isHiveDriverPresent() {
try {
Class.forName(HIVE_DRIVER_CLASS);
return true;
} catch (ClassNotFoundException e) {
return false;
}
}
}
|
0
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water/hive/ImpersonationUtils.java
|
package water.hive;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import java.io.IOException;
public class ImpersonationUtils {
public interface Callback { void call(String msg); }
public static void validateImpersonationArgs(
String principal, String keytabPath, String runAsUser,
Callback error, Callback warn
) {
if (principal != null || keytabPath != null) {
if (principal == null) {
error.call("keytab requires a valid principal (use the '-principal' option)");
}
if (keytabPath == null) {
error.call("principal requires a valid keytab path (use the '-keytab' option)");
}
if (runAsUser != null) {
warn.call("will attempt secure impersonation with user from '-run_as_user', " + runAsUser);
}
}
}
public static void impersonate(Configuration conf, String principal, String keytabPath, String runAsUser) throws IOException {
if (principal != null && keytabPath != null) {
UserGroupInformation.setConfiguration(conf);
UserGroupInformation.loginUserFromKeytab(principal, keytabPath);
// performs user impersonation (will only work if core-site.xml has hadoop.proxyuser.*.* props set on name node
if (runAsUser != null) {
System.out.println("Attempting to securely impersonate user, " + runAsUser);
UserGroupInformation currentEffUser = UserGroupInformation.getLoginUser();
UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(runAsUser, currentEffUser);
UserGroupInformation.setLoginUser(proxyUser);
}
} else if (runAsUser != null) {
UserGroupInformation.setConfiguration(conf);
UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(runAsUser));
}
}
}
|
0
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water/hive/JdbcHiveMetadata.java
|
package water.hive;
import org.apache.log4j.Logger;
import water.jdbc.SQLManager;
import water.util.JSONUtils;
import water.util.Log;
import java.sql.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static java.util.Collections.emptyList;
@SuppressWarnings({"rawtypes", "unchecked"})
public class JdbcHiveMetadata implements HiveMetaData {
private static final Logger LOG = Logger.getLogger(JdbcHiveMetadata.class);
private static final String SQL_SET_JSON_OUTPUT = "set hive.ddl.output.format=json";
private static final String SQL_GET_VERSION = "select version()";
private static final String SQL_DESCRIBE_TABLE = "DESCRIBE EXTENDED %s";
private static final String SQL_DESCRIBE_PARTITION = "DESCRIBE EXTENDED %s PARTITION ";
private static final String SQL_SHOW_PARTS = "SHOW PARTITIONS %s";
private final String url;
public JdbcHiveMetadata(String url) {
this.url = url;
}
static class StorableMetadata {
String location;
String serializationLib;
String inputFormat;
Map<String, String> serDeParams = Collections.emptyMap();
}
static class JdbcStorable implements Storable {
private final String location;
private final String serializationLib;
private final String inputFormat;
private final Map<String, String> serDeParams;
JdbcStorable(StorableMetadata data) {
this.location = data.location;
this.serializationLib = data.serializationLib;
this.inputFormat = data.inputFormat;
this.serDeParams = data.serDeParams;
}
@Override
public Map<String, String> getSerDeParams() {
return serDeParams;
}
@Override
public String getLocation() {
return location;
}
@Override
public String getSerializationLib() {
return serializationLib;
}
@Override
public String getInputFormat() {
return inputFormat;
}
}
static class JdbcPartition extends JdbcStorable implements Partition {
private final List<String> values;
JdbcPartition(StorableMetadata meta, List<String> values) {
super(meta);
this.values = values;
}
@Override
public List<String> getValues() {
return values;
}
}
static class JdbcColumn implements Column {
private final String name;
private final String type;
JdbcColumn(String name, String type) {
this.name = name;
this.type = type;
}
@Override
public String getName() {
return name;
}
@Override
public String getType() {
return type;
}
}
static class JdbcTable extends JdbcStorable implements Table {
private final String name;
private final List<Partition> partitions;
private final List<Column> columns;
private final List<Column> partitionKeys;
public JdbcTable(
String name,
StorableMetadata meta,
List<Column> columns,
List<Partition> partitions,
List<Column> partitionKeys
) {
super(meta);
this.name = name;
this.partitions = partitions;
this.columns = columns;
this.partitionKeys = partitionKeys;
}
@Override
public String getName() {
return name;
}
@Override
public boolean hasPartitions() {
return !partitionKeys.isEmpty();
}
@Override
public List<Partition> getPartitions() {
return partitions;
}
@Override
public List<Column> getColumns() {
return columns;
}
@Override
public List<Column> getPartitionKeys() {
return partitionKeys;
}
}
private String executeQuery(Connection conn, String query) throws SQLException {
try (Statement stmt = conn.createStatement()) {
try (ResultSet rs = stmt.executeQuery(query)) {
boolean hasData = rs.next();
assert hasData : "Query has no result rows.";
return rs.getString(1);
}
}
}
private Map<String, Object> executeAndParseJsonResultSet(
Connection conn, String queryPattern, String tableName
) throws SQLException {
String query = String.format(queryPattern, tableName);
LOG.info("Executing Hive metadata query " + query);
String json = executeQuery(conn, query);
return JSONUtils.parse(json);
}
@Override
public Table getTable(String tableName) throws SQLException {
try (Connection conn = SQLManager.getConnectionSafe(url, null, null)) {
try (Statement stmt = conn.createStatement()) {
stmt.execute(SQL_SET_JSON_OUTPUT);
}
return getTable(conn, tableName);
}
}
private Table getTable(Connection conn, String name) throws SQLException {
Map<String, Object> tableData = executeAndParseJsonResultSet(conn, SQL_DESCRIBE_TABLE, name);
List<Column> columns = readColumns((List<Map<String, Object>>) tableData.get("columns"));
Map<String, Object> tableInfo = (Map<String, Object>) tableData.get("tableInfo");
List<Column> partitionKeys = readPartitionKeys(tableInfo);
columns = columns.subList(0, columns.size() - partitionKeys.size()); // remove partition keys from the end
List<Partition> partitions = readPartitions(conn, name, partitionKeys);
StorableMetadata storableData = readStorableMetadata(tableInfo);
return new JdbcTable(name, storableData, columns, partitions, partitionKeys);
}
private String getHiveVersionMajor(Connection conn) {
try {
String versionStr = executeQuery(conn, SQL_GET_VERSION);
return versionStr.substring(0, 1);
} catch (SQLException e) {
return "1"; // older hive versions do not support version() function
}
}
private StorableMetadata readStorableMetadata(Map<String, Object> tableInfo) {
StorableMetadata res = new StorableMetadata();
Map<String, Object> sd = (Map<String, Object>) tableInfo.get("sd");
res.location = (String) sd.get("location");
res.inputFormat = (String) sd.get("inputFormat");
Map serDeInfo = (Map) sd.get("serdeInfo");
res.serializationLib = (String) serDeInfo.get("serializationLib");
res.serDeParams = (Map<String, String>) serDeInfo.get("parameters");
return res;
}
private List<Partition> readPartitions(
Connection conn,
String tableName,
List<Column> partitionKeys
) throws SQLException {
if (partitionKeys.isEmpty()) {
return emptyList();
}
Map<String, Object> partitionsResult = executeAndParseJsonResultSet(conn, SQL_SHOW_PARTS, tableName);
String hiveVersion = getHiveVersionMajor(conn);
List<Partition> partitions = new ArrayList<>();
List<Map<String, Object>> partitionsData = (List<Map<String, Object>>) partitionsResult.get("partitions");
for (Map<String, Object> partition : partitionsData) {
List<String> values = parsePartitionValues(partition, hiveVersion);
StorableMetadata data = readPartitionMetadata(conn, tableName, partitionKeys, values);
partitions.add(new JdbcPartition(data, values));
}
return partitions;
}
private StorableMetadata readPartitionMetadata(
Connection conn,
String tableName,
List<Column> partitionKeys,
List<String> values
) throws SQLException {
String query = getDescribePartitionQuery(partitionKeys, values);
Map<String, Object> data = executeAndParseJsonResultSet(conn, query, tableName);
Map<String, Object> info = (Map<String, Object>) data.get("partitionInfo");
return readStorableMetadata(info);
}
private String getDescribePartitionQuery(List<Column> partitionKeys, List<String> values) {
StringBuilder sb = new StringBuilder();
sb.append(SQL_DESCRIBE_PARTITION).append("(");
for (int i = 0; i < partitionKeys.size(); i++) {
if (i > 0) sb.append(", ");
String escapedValue = values.get(i).replace("\"", "\\\"");
sb.append(partitionKeys.get(i).getName()).append("=\"").append(escapedValue).append("\"");
}
sb.append(")");
return sb.toString();
}
private String unescapePartitionValue(String value, String hiveVersion) {
if (!"1".equals(hiveVersion)) {
// hive 2+ does the un-escaping automatically
return value;
} else {
return value.replace("\\\"", "\"");
}
}
private List<String> parsePartitionValues(Map<String, Object> partition, String hiveVersion) {
List<String> values = new ArrayList<>();
List<Map<String, Object>> valuesData = (List<Map<String, Object>>) partition.get("values");
for (Map<String, Object> valueRecord : valuesData) {
String value = unescapePartitionValue((String) valueRecord.get("columnValue"), hiveVersion);
values.add(value);
}
return values;
}
private List<Column> readPartitionKeys(Map<String, Object> tableInfo) {
if (!tableInfo.containsKey("partitionKeys")) {
return emptyList();
} else {
List<Map<String, Object>> partitionColumns = (List<Map<String, Object>>) tableInfo.get("partitionKeys");
return readColumns(partitionColumns);
}
}
private List<Column> readColumns(List<Map<String, Object>> columnDataList) {
List<Column> columns = new ArrayList<>();
for (Map column : columnDataList) {
columns.add(new JdbcColumn((String) column.get("name"), (String) column.get("type")));
}
return columns;
}
}
|
0
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water
|
java-sources/ai/h2o/h2o-hive/3.46.0.7/water/hive/PartitionFrameJoiner.java
|
package water.hive;
import water.*;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Rapids;
import water.rapids.Val;
import java.util.List;
import static water.fvec.Vec.T_STR;
import static water.fvec.Vec.makeZero;
public class PartitionFrameJoiner extends H2O.H2OCountedCompleter<PartitionFrameJoiner> {
private final Job<Frame> _job;
private final HiveMetaData.Table _table;
private final List<HiveMetaData.Partition> _partitions;
private final String _targetFrame;
private final List<Job<Frame>> _parseJobs;
public PartitionFrameJoiner(
Job<Frame> job, HiveMetaData.Table table, List<HiveMetaData.Partition> partitions, String targetFrame, List<Job<Frame>> parseJobs
) {
_job = job;
_table = table;
_partitions = partitions;
_targetFrame = targetFrame;
_parseJobs = parseJobs;
}
@Override
public void compute2() {
int keyCount = _table.getPartitionKeys().size();
StringBuilder partKeys = new StringBuilder();
for (Job<Frame> job : _parseJobs) {
Frame partitionFrame = job.get();
String partKey = partitionFrame._key.toString();
String[] keySplit = partKey.split("_");
int partIndex = Integer.parseInt(keySplit[keySplit.length - 1]);
HiveMetaData.Partition part = _partitions.get(partIndex);
partKeys.append(" ").append(partKey);
long rows = partitionFrame.numRows();
for (int keyIndex = 0; keyIndex < keyCount; keyIndex++) {
String partitionValue = part.getValues().get(keyIndex);
Vec vec = makeVecWithValue(rows, partitionValue);
partitionFrame.add(_table.getPartitionKeys().get(keyIndex).getName(), vec);
}
_job.update(1);
}
String tree = "(rbind" + partKeys + ")";
Val val = Rapids.exec(tree);
Frame merged = val.getFrame();
merged._key = Key.make(_targetFrame);
DKV.put(merged);
for (Job<Frame> parseJob : _parseJobs) {
DKV.remove(parseJob._result);
}
_job.update(1);
tryComplete();
}
private Vec makeVecWithValue(long rows, final String value) {
Vec zeroes = makeZero(rows, T_STR);
return new MRTask() {
@Override
public void map(Chunk[] cs) {
for (Chunk c : cs)
for (int r = 0; r < c._len; r++)
c.set(r, value);
}
}.doAll(zeroes)._fr.vecs()[0];
}
}
|
0
|
java-sources/ai/h2o/h2o-jaas-pam/3.46.0.7/de/codedo
|
java-sources/ai/h2o/h2o-jaas-pam/3.46.0.7/de/codedo/jaas/PamLoginModule.java
|
package de.codedo.jaas;
import java.io.IOException;
import java.security.Principal;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import javax.security.auth.Subject;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.PasswordCallback;
import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.auth.login.FailedLoginException;
import javax.security.auth.login.LoginException;
import javax.security.auth.spi.LoginModule;
import org.jvnet.libpam.PAM;
import org.jvnet.libpam.PAMException;
import org.jvnet.libpam.UnixUser;
public class PamLoginModule extends Object implements LoginModule
{
public static final String SERVICE_KEY = "service";
private PAM _pam;
private Subject _subject;
private CallbackHandler _callbackHandler;
private Map<String, ?> _options;
private String _username;
private String _password;
private boolean _authSucceeded;
private PamPrincipal _principal;
public PamLoginModule()
{
super();
_authSucceeded = false;
}
@Override
public void initialize(Subject subject, CallbackHandler callbackHandler, Map<String, ?> sharedState, Map<String, ?> options)
{
_subject = subject;
_callbackHandler = callbackHandler;
_options = new HashMap<>(options);
}
@Override
public boolean login() throws LoginException
{
initializePam();
obtainUserAndPassword();
return performLogin();
}
private void initializePam() throws LoginException
{
String service = (String)_options.get(SERVICE_KEY);
if (service == null)
{
throw new LoginException("Error: PAM service was not defined");
}
createPam(service);
}
private void createPam(String service) throws LoginException
{
try
{
_pam = new PAM(service);
}
catch (PAMException ex)
{
LoginException le = new LoginException("Error initializing PAM");
le.initCause(ex);
throw le;
}
}
private void obtainUserAndPassword() throws LoginException
{
if (_callbackHandler == null)
{
throw new LoginException("Error: no CallbackHandler available to gather authentication information from the user");
}
try
{
NameCallback nameCallback = new NameCallback("username");
PasswordCallback passwordCallback = new PasswordCallback("password", false);
invokeCallbackHandler(nameCallback, passwordCallback);
initUserName(nameCallback);
initPassword(passwordCallback);
}
catch (IOException | UnsupportedCallbackException ex)
{
LoginException le = new LoginException("Error in callbacks");
le.initCause(ex);
throw le;
}
}
private void invokeCallbackHandler(NameCallback nameCallback, PasswordCallback passwordCallback) throws IOException, UnsupportedCallbackException
{
Callback[] callbacks = new Callback[2];
callbacks[0] = nameCallback;
callbacks[1] = passwordCallback;
_callbackHandler.handle(callbacks);
}
private void initUserName(NameCallback nameCallback)
{
_username = nameCallback.getName();
}
private void initPassword(PasswordCallback passwordCallback)
{
char[] password = passwordCallback.getPassword();
_password = new String(password);
passwordCallback.clearPassword();
}
private boolean performLogin() throws LoginException
{
try
{
UnixUser user = _pam.authenticate(_username, _password);
_principal = new PamPrincipal(user);
_authSucceeded = true;
return true;
}
catch (PAMException ex)
{
LoginException le = new FailedLoginException("Invalid username or password");
le.initCause(ex);
throw le;
}
}
@Override
public boolean commit() throws LoginException
{
if (_authSucceeded == false)
{
return false;
}
if (_subject.isReadOnly())
{
cleanup();
throw new LoginException("Subject is read-only");
}
Set<Principal> principals = _subject.getPrincipals();
if (principals.contains(_principal) == false)
{
principals.add(_principal);
}
return true;
}
@Override
public boolean abort() throws LoginException
{
if (_authSucceeded == false)
{
return false;
}
cleanup();
return true;
}
@Override
public boolean logout() throws LoginException
{
if (_subject.isReadOnly())
{
cleanup();
throw new LoginException("Subject is read-only");
}
_subject.getPrincipals().remove(_principal);
cleanup();
return true;
}
private void cleanup()
{
_authSucceeded = false;
_username = null;
_password = null;
_principal = null;
_pam.dispose();
}
}
|
0
|
java-sources/ai/h2o/h2o-jaas-pam/3.46.0.7/de/codedo
|
java-sources/ai/h2o/h2o-jaas-pam/3.46.0.7/de/codedo/jaas/PamPrincipal.java
|
package de.codedo.jaas;
import java.security.Principal;
import java.util.Collections;
import java.util.Set;
import org.jvnet.libpam.UnixUser;
public class PamPrincipal extends Object implements Principal
{
private String _userName;
private String _gecos;
private String _homeDir;
private String _shell;
private int _uid;
private int _gid;
private Set<String> _groups;
public PamPrincipal(UnixUser user)
{
super();
_userName = user.getUserName();
_gecos = user.getGecos();
_homeDir = user.getDir();
_shell = user.getShell();
_uid = user.getUID();
_gid = user.getGID();
_groups = Collections.unmodifiableSet(user.getGroups());
}
@Override
public String getName()
{
return _userName;
}
public String getGecos()
{
return _gecos;
}
public String getHomeDir()
{
return _homeDir;
}
public String getShell()
{
return _shell;
}
public int getUid()
{
return _uid;
}
public int getGid()
{
return _gid;
}
public Set<String> getGroups()
{
return _groups;
}
}
|
0
|
java-sources/ai/h2o/h2o-jaas-pam/3.46.0.7/de/codedo
|
java-sources/ai/h2o/h2o-jaas-pam/3.46.0.7/de/codedo/jaas/UsernamePasswordCallbackHandler.java
|
package de.codedo.jaas;
import java.io.IOException;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.PasswordCallback;
import javax.security.auth.callback.UnsupportedCallbackException;
public class UsernamePasswordCallbackHandler extends Object implements CallbackHandler
{
private String _user;
private String _password;
public UsernamePasswordCallbackHandler(String user, String password)
{
super();
_user = user;
_password = password;
}
@Override
public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException
{
for (Callback callback : callbacks)
{
if (callback instanceof NameCallback)
{
handleName((NameCallback)callback);
}
else if (callback instanceof PasswordCallback)
{
handlePassword((PasswordCallback)callback);
}
else
{
throw new UnsupportedCallbackException(callback);
}
}
}
private void handleName(NameCallback callback)
{
callback.setName(_user);
}
private void handlePassword(PasswordCallback callback)
{
char[] passwordChars = _password.toCharArray();
callback.setPassword(passwordChars);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/AboutEntryV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class AboutEntryV3 extends Schema {
/** Property name */
public String name;
/** Property value */
public String value;
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/AboutV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class AboutV3 extends RequestSchema {
/** List of properties about this running H2O instance */
public AboutEntryV3[] entries;
/* INHERITED: Comma-separated list of JSON field paths to exclude from the result, used like: "/3/Frames?_exclude_fields=frames/frame_id/URL,__meta"
* public String _exclude_fields;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/Activation.java
|
package water.bindings.pojos;
public enum Activation {
Tanh,
TanhWithDropout,
Rectifier,
RectifierWithDropout,
Maxout,
MaxoutWithDropout,
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/AssemblyKeyV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class AssemblyKeyV3 extends KeyV3 {
/* INHERITED: Name (string representation) for this Key.
* public String name;
*/
/* INHERITED: Name (string representation) for the type of Keyed this Key points to.
* public String type;
*/
/* INHERITED: URL for the resource that this Key points to, if one exists.
* public String URL;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/AssemblyV99.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class AssemblyV99 extends RequestSchema {
/** A list of steps describing the assembly line. */
public String[] steps;
/** Input Frame for the assembly. */
public FrameKeyV3 frame;
/** The name of the file and generated class */
public String pojo_name;
/** The key of the Assembly object to retrieve from the DKV. */
public String assembly_id;
/** Output of the assembly line. */
public FrameKeyV3 result;
/** A Key to the fit Assembly data structure */
public AssemblyKeyV3 assembly;
/* INHERITED: Comma-separated list of JSON field paths to exclude from the result, used like: "/3/Frames?_exclude_fields=frames/frame_id/URL,__meta"
* public String _exclude_fields;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/BuilderVisibility.java
|
package water.bindings.pojos;
public enum BuilderVisibility {
Experimental,
Beta,
AlwaysVisible,
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/CartesianSearchCriteriaV99.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class CartesianSearchCriteriaV99 extends HyperSpaceSearchCriteriaV99 {
/* INHERITED: Hyperparameter space search strategy.
* public Strategy strategy;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/ClassSamplingMethod.java
|
package water.bindings.pojos;
public enum ClassSamplingMethod {
Uniform,
Stratified,
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/CloudV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class CloudV3 extends RequestSchema {
/** skip_ticks */
public boolean skip_ticks;
/** version */
public String version;
/** branch_name */
public String branch_name;
/** build_number */
public String build_number;
/** Node index number cloud status is collected from (zero-based) */
public int node_idx;
/** cloud_name */
public String cloud_name;
/** cloud_size */
public int cloud_size;
/** cloud_uptime_millis */
public long cloud_uptime_millis;
/** cloud_healthy */
public boolean cloud_healthy;
/** Nodes reporting unhealthy */
public int bad_nodes;
/** Cloud voting is stable */
public boolean consensus;
/** Cloud is accepting new members or not */
public boolean locked;
/** Cloud is in client mode. */
public boolean is_client;
/** nodes */
public NodeV3[] nodes;
/* INHERITED: Comma-separated list of JSON field paths to exclude from the result, used like: "/3/Frames?_exclude_fields=frames/frame_id/URL,__meta"
* public String _exclude_fields;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/ClusteringModelBuilderSchema.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class ClusteringModelBuilderSchema extends ModelBuilderSchema {
/* INHERITED: Model builder parameters.
* public ModelParametersSchema parameters;
*/
/* INHERITED: The algo name for this ModelBuilder.
* public String algo;
*/
/* INHERITED: The pretty algo name for this ModelBuilder (e.g., Generalized Linear Model, rather than GLM).
* public String algo_full_name;
*/
/* INHERITED: Model categories this ModelBuilder can build.
* public ModelCategory[] can_build;
*/
/* INHERITED: Should the builder always be visible, be marked as beta, or only visible if the user starts up with the experimental flag?
* public BuilderVisibility visibility;
*/
/* INHERITED: Job Key
* public JobV3 job;
*/
/* INHERITED: Parameter validation messages
* public ValidationMessageBase[] messages;
*/
/* INHERITED: Count of parameter validation errors
* public int error_count;
*/
/* INHERITED: HTTP status to return for this build.
* public int __http_status;
*/
/* INHERITED: Comma-separated list of JSON field paths to exclude from the result, used like: "/3/Frames?_exclude_fields=frames/frame_id/URL,__meta"
* public String _exclude_fields;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/ClusteringModelParametersSchema.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class ClusteringModelParametersSchema extends ModelParametersSchema {
/** Number of clusters */
public int k;
/* INHERITED: Destination id for this model; auto-generated if not specified
* public ModelKeyV3 model_id;
*/
/* INHERITED: Training frame
* public FrameKeyV3 training_frame;
*/
/* INHERITED: Validation frame
* public FrameKeyV3 validation_frame;
*/
/* INHERITED: Number of folds for N-fold cross-validation
* public int nfolds;
*/
/* INHERITED: Keep cross-validation model predictions
* public boolean keep_cross_validation_predictions;
*/
/* INHERITED: Keep cross-validation fold assignment
* public boolean keep_cross_validation_fold_assignment;
*/
/* INHERITED: Allow parallel training of cross-validation models
* public boolean parallelize_cross_validation;
*/
/* INHERITED: Response column
* public ColSpecifierV3 response_column;
*/
/* INHERITED: Column with observation weights
* public ColSpecifierV3 weights_column;
*/
/* INHERITED: Offset column
* public ColSpecifierV3 offset_column;
*/
/* INHERITED: Column with cross-validation fold index assignment per observation
* public ColSpecifierV3 fold_column;
*/
/* INHERITED: Cross-validation fold assignment scheme, if fold_column is not specified
* public FoldAssignmentScheme fold_assignment;
*/
/* INHERITED: Ignored columns
* public String[] ignored_columns;
*/
/* INHERITED: Ignore constant columns
* public boolean ignore_const_cols;
*/
/* INHERITED: Whether to score during each iteration of model training
* public boolean score_each_iteration;
*/
/* INHERITED: Model checkpoint to resume training with
* public ModelKeyV3 checkpoint;
*/
/* INHERITED: Early stopping based on convergence of stopping_metric. Stop if simple moving average of length k of the stopping_metric does not improve for k:=stopping_rounds scoring events (0 to disable)
* public int stopping_rounds;
*/
/* INHERITED: Maximum allowed runtime in seconds for model training. Use 0 to disable.
* public double max_runtime_secs;
*/
/* INHERITED: Metric to use for early stopping (AUTO: logloss for classification, deviance for regression)
* public StoppingMetric stopping_metric;
*/
/* INHERITED: Relative tolerance for metric-based stopping criterion (stop if relative improvement is not at least this much)
* public double stopping_tolerance;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/ColSpecifierV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class ColSpecifierV3 extends Schema {
/** Name of the column */
public String column_name;
/** List of fields which specify columns that must contain this column */
public String[] is_member_of_frames;
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/ColV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class ColV3 extends Schema {
/** label */
public String label;
/** missing */
public long missing_count;
/** zeros */
public long zero_count;
/** positive infinities */
public long positive_infinity_count;
/** negative infinities */
public long negative_infinity_count;
/** mins */
public double[] mins;
/** maxs */
public double[] maxs;
/** mean */
public double mean;
/** sigma */
public double sigma;
/** datatype: {enum, string, int, real, time, uuid} */
public String type;
/** domain; not-null for categorical columns only */
public String[] domain;
/** cardinality of this column's domain; not-null for categorical columns only */
public int domain_cardinality;
/** data */
public double[] data;
/** string data */
public String[] string_data;
/** decimal precision, -1 for all digits */
public byte precision;
/** Histogram bins; null if not computed */
public long[] histogram_bins;
/** Start of histogram bin zero */
public double histogram_base;
/** Stride per bin */
public double histogram_stride;
/** Percentile values, matching the default percentiles */
public double[] percentiles;
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/ColumnSpecsBase.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class ColumnSpecsBase extends Schema {
/** Column Name */
public String name;
/** Column Type */
public String type;
/** Column Format (printf) */
public String format;
/** Column Description */
public String description;
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/CombineMethod.java
|
package water.bindings.pojos;
public enum CombineMethod {
INTERPOLATE,
AVG,
LO,
HI,
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/ConfusionMatrixBase.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class ConfusionMatrixBase extends Schema {
/** Annotated confusion matrix */
public TwoDimTableV3 table;
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/ConfusionMatrixV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class ConfusionMatrixV3 extends ConfusionMatrixBase {
/* INHERITED: Annotated confusion matrix
* public TwoDimTableV3 table;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/CoxPHModelOutputV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
import java.util.Map;
public class CoxPHModelOutputV3 extends ModelOutputSchema {
/* INHERITED: Column names
* public String[] names;
*/
/* INHERITED: Domains for categorical columns
* public String[][] domains;
*/
/* INHERITED: Cross-validation models (model ids)
* public ModelKeyV3[] cross_validation_models;
*/
/* INHERITED: Cross-validation predictions, one per cv model (deprecated, use cross_validation_holdout_predictions_frame_id instead)
* public FrameKeyV3[] cross_validation_predictions;
*/
/* INHERITED: Cross-validation holdout predictions (full out-of-sample predictions on training data)
* public FrameKeyV3 cross_validation_holdout_predictions_frame_id;
*/
/* INHERITED: Cross-validation fold assignment (each row is assigned to one holdout fold)
* public FrameKeyV3 cross_validation_fold_assignment_frame_id;
*/
/* INHERITED: Category of the model (e.g., Binomial)
* public ModelCategory model_category;
*/
/* INHERITED: Model summary
* public TwoDimTableV3 model_summary;
*/
/* INHERITED: Scoring history
* public TwoDimTableV3 scoring_history;
*/
/* INHERITED: Training data model metrics
* public ModelMetricsBase training_metrics;
*/
/* INHERITED: Validation data model metrics
* public ModelMetricsBase validation_metrics;
*/
/* INHERITED: Cross-validation model metrics
* public ModelMetricsBase cross_validation_metrics;
*/
/* INHERITED: Cross-validation model metrics summary
* public TwoDimTableV3 cross_validation_metrics_summary;
*/
/* INHERITED: Job status
* public String status;
*/
/* INHERITED: Start time in milliseconds
* public long start_time;
*/
/* INHERITED: End time in milliseconds
* public long end_time;
*/
/* INHERITED: Runtime in milliseconds
* public long run_time;
*/
/* INHERITED: Help information for output fields
* public Map<String,String> help;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/CoxPHModelV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class CoxPHModelV3 extends ModelSchema {
/* INHERITED: The build parameters for the model (e.g. K for KMeans).
* public CoxPHParametersV3 parameters;
*/
/* INHERITED: The build output for the model (e.g. the cluster centers for KMeans).
* public CoxPHModelOutputV3 output;
*/
/* INHERITED: Compatible frames, if requested
* public String[] compatible_frames;
*/
/* INHERITED: Checksum for all the things that go into building the Model.
* public long checksum;
*/
/* INHERITED: Model key
* public ModelKeyV3 model_id;
*/
/* INHERITED: The algo name for this Model.
* public String algo;
*/
/* INHERITED: The pretty algo name for this Model (e.g., Generalized Linear Model, rather than GLM).
* public String algo_full_name;
*/
/* INHERITED: The response column name for this Model (if applicable). Is null otherwise.
* public String response_column_name;
*/
/* INHERITED: The Model's training frame key
* public FrameKeyV3 data_frame;
*/
/* INHERITED: Timestamp for when this model was completed
* public long timestamp;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/CoxPHParametersV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class CoxPHParametersV3 extends ModelParametersSchema {
/* INHERITED: Destination id for this model; auto-generated if not specified
* public ModelKeyV3 model_id;
*/
/* INHERITED: Training frame
* public FrameKeyV3 training_frame;
*/
/* INHERITED: Validation frame
* public FrameKeyV3 validation_frame;
*/
/* INHERITED: Number of folds for N-fold cross-validation
* public int nfolds;
*/
/* INHERITED: Keep cross-validation model predictions
* public boolean keep_cross_validation_predictions;
*/
/* INHERITED: Keep cross-validation fold assignment
* public boolean keep_cross_validation_fold_assignment;
*/
/* INHERITED: Allow parallel training of cross-validation models
* public boolean parallelize_cross_validation;
*/
/* INHERITED: Response column
* public ColSpecifierV3 response_column;
*/
/* INHERITED: Column with observation weights
* public ColSpecifierV3 weights_column;
*/
/* INHERITED: Offset column
* public ColSpecifierV3 offset_column;
*/
/* INHERITED: Column with cross-validation fold index assignment per observation
* public ColSpecifierV3 fold_column;
*/
/* INHERITED: Cross-validation fold assignment scheme, if fold_column is not specified
* public FoldAssignmentScheme fold_assignment;
*/
/* INHERITED: Ignored columns
* public String[] ignored_columns;
*/
/* INHERITED: Ignore constant columns
* public boolean ignore_const_cols;
*/
/* INHERITED: Whether to score during each iteration of model training
* public boolean score_each_iteration;
*/
/* INHERITED: Model checkpoint to resume training with
* public ModelKeyV3 checkpoint;
*/
/* INHERITED: Early stopping based on convergence of stopping_metric. Stop if simple moving average of length k of the stopping_metric does not improve for k:=stopping_rounds scoring events (0 to disable)
* public int stopping_rounds;
*/
/* INHERITED: Maximum allowed runtime in seconds for model training. Use 0 to disable.
* public double max_runtime_secs;
*/
/* INHERITED: Metric to use for early stopping (AUTO: logloss for classification, deviance for regression)
* public StoppingMetric stopping_metric;
*/
/* INHERITED: Relative tolerance for metric-based stopping criterion (stop if relative improvement is not at least this much)
* public double stopping_tolerance;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/CoxPHV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class CoxPHV3 extends ModelBuilderSchema {
/* INHERITED: Model builder parameters.
* public CoxPHParametersV3 parameters;
*/
/* INHERITED: The algo name for this ModelBuilder.
* public String algo;
*/
/* INHERITED: The pretty algo name for this ModelBuilder (e.g., Generalized Linear Model, rather than GLM).
* public String algo_full_name;
*/
/* INHERITED: Model categories this ModelBuilder can build.
* public ModelCategory[] can_build;
*/
/* INHERITED: Should the builder always be visible, be marked as beta, or only visible if the user starts up with the experimental flag?
* public BuilderVisibility visibility;
*/
/* INHERITED: Job Key
* public JobV3 job;
*/
/* INHERITED: Parameter validation messages
* public ValidationMessageV3[] messages;
*/
/* INHERITED: Count of parameter validation errors
* public int error_count;
*/
/* INHERITED: HTTP status to return for this build.
* public int __http_status;
*/
/* INHERITED: Comma-separated list of JSON field paths to exclude from the result, used like: "/3/Frames?_exclude_fields=frames/frame_id/URL,__meta"
* public String _exclude_fields;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/CreateFrameV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class CreateFrameV3 extends RequestSchema {
/** destination key */
public FrameKeyV3 dest;
/** Number of rows */
public long rows;
/** Number of data columns (in addition to the first response column) */
public int cols;
/** Random number seed that determines the random values */
public long seed;
/** Random number seed for setting the column types */
public long seed_for_column_types;
/** Whether frame should be randomized */
public boolean randomize;
/** Constant value (for randomize=false) */
public long value;
/** Range for real variables (-range ... range) */
public long real_range;
/** Fraction of categorical columns (for randomize=true) */
public double categorical_fraction;
/** Factor levels for categorical variables */
public int factors;
/** Fraction of integer columns (for randomize=true) */
public double integer_fraction;
/** Range for integer variables (-range ... range) */
public long integer_range;
/** Fraction of binary columns (for randomize=true) */
public double binary_fraction;
/** Fraction of 1's in binary columns */
public double binary_ones_fraction;
/** Fraction of date/time columns (for randomize=true) */
public double time_fraction;
/** Fraction of string columns (for randomize=true) */
public double string_fraction;
/** Fraction of missing values */
public double missing_fraction;
/** Number of factor levels of the first column (1=real, 2=binomial, N=multinomial) */
public int response_factors;
/** Whether an additional response column should be generated */
public boolean has_response;
/** Job Key */
public JobKeyV3 key;
/* INHERITED: Comma-separated list of JSON field paths to exclude from the result, used like: "/3/Frames?_exclude_fields=frames/frame_id/URL,__meta"
* public String _exclude_fields;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/DCTTransformerV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class DCTTransformerV3 extends RequestSchema {
/** Dataset */
public FrameKeyV3 dataset;
/** Destination Frame ID */
public FrameKeyV3 destination_frame;
/** Dimensions of the input array: Height, Width, Depth (Nx1x1 for 1D, NxMx1 for 2D) */
public int[] dimensions;
/** Whether to do the inverse transform */
public boolean inverse;
/* INHERITED: Comma-separated list of JSON field paths to exclude from the result, used like: "/3/Frames?_exclude_fields=frames/frame_id/URL,__meta"
* public String _exclude_fields;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/DRFModelOutputV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
import java.util.Map;
public class DRFModelOutputV3 extends SharedTreeModelOutputV3 {
/* INHERITED: Variable Importances
* public TwoDimTableV3 variable_importances;
*/
/* INHERITED: The Intercept term, the initial model function value to which trees make adjustments
* public double init_f;
*/
/* INHERITED: Column names
* public String[] names;
*/
/* INHERITED: Domains for categorical columns
* public String[][] domains;
*/
/* INHERITED: Cross-validation models (model ids)
* public ModelKeyV3[] cross_validation_models;
*/
/* INHERITED: Cross-validation predictions, one per cv model (deprecated, use cross_validation_holdout_predictions_frame_id instead)
* public FrameKeyV3[] cross_validation_predictions;
*/
/* INHERITED: Cross-validation holdout predictions (full out-of-sample predictions on training data)
* public FrameKeyV3 cross_validation_holdout_predictions_frame_id;
*/
/* INHERITED: Cross-validation fold assignment (each row is assigned to one holdout fold)
* public FrameKeyV3 cross_validation_fold_assignment_frame_id;
*/
/* INHERITED: Category of the model (e.g., Binomial)
* public ModelCategory model_category;
*/
/* INHERITED: Model summary
* public TwoDimTableV3 model_summary;
*/
/* INHERITED: Scoring history
* public TwoDimTableV3 scoring_history;
*/
/* INHERITED: Training data model metrics
* public ModelMetricsBase training_metrics;
*/
/* INHERITED: Validation data model metrics
* public ModelMetricsBase validation_metrics;
*/
/* INHERITED: Cross-validation model metrics
* public ModelMetricsBase cross_validation_metrics;
*/
/* INHERITED: Cross-validation model metrics summary
* public TwoDimTableV3 cross_validation_metrics_summary;
*/
/* INHERITED: Job status
* public String status;
*/
/* INHERITED: Start time in milliseconds
* public long start_time;
*/
/* INHERITED: End time in milliseconds
* public long end_time;
*/
/* INHERITED: Runtime in milliseconds
* public long run_time;
*/
/* INHERITED: Help information for output fields
* public Map<String,String> help;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/DRFModelV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class DRFModelV3 extends SharedTreeModelV3 {
/* INHERITED: The build parameters for the model (e.g. K for KMeans).
* public DRFParametersV3 parameters;
*/
/* INHERITED: The build output for the model (e.g. the cluster centers for KMeans).
* public DRFModelOutputV3 output;
*/
/* INHERITED: Compatible frames, if requested
* public String[] compatible_frames;
*/
/* INHERITED: Checksum for all the things that go into building the Model.
* public long checksum;
*/
/* INHERITED: Model key
* public ModelKeyV3 model_id;
*/
/* INHERITED: The algo name for this Model.
* public String algo;
*/
/* INHERITED: The pretty algo name for this Model (e.g., Generalized Linear Model, rather than GLM).
* public String algo_full_name;
*/
/* INHERITED: The response column name for this Model (if applicable). Is null otherwise.
* public String response_column_name;
*/
/* INHERITED: The Model's training frame key
* public FrameKeyV3 data_frame;
*/
/* INHERITED: Timestamp for when this model was completed
* public long timestamp;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/DRFParametersV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class DRFParametersV3 extends SharedTreeParametersV3 {
/** Number of variables randomly sampled as candidates at each split. If set to -1, defaults to sqrt{p} for classification and p/3 for regression (where p is the # of predictors */
public int mtries;
/** For binary classification: Build 2x as many trees (one per class) - can lead to higher accuracy. */
public boolean binomial_double_trees;
/* INHERITED: Balance training data class counts via over/under-sampling (for imbalanced data).
* public boolean balance_classes;
*/
/* INHERITED: Desired over/under-sampling ratios per class (in lexicographic order). If not specified, sampling factors will be automatically computed to obtain class balance during training. Requires balance_classes.
* public float[] class_sampling_factors;
*/
/* INHERITED: Maximum relative size of the training data after balancing class counts (can be less than 1.0). Requires balance_classes.
* public float max_after_balance_size;
*/
/* INHERITED: Maximum size (# classes) for confusion matrices to be printed in the Logs
* public int max_confusion_matrix_size;
*/
/* INHERITED: Max. number (top K) of predictions to use for hit ratio computation (for multi-class only, 0 to disable)
* public int max_hit_ratio_k;
*/
/* INHERITED: Number of trees.
* public int ntrees;
*/
/* INHERITED: Maximum tree depth.
* public int max_depth;
*/
/* INHERITED: Fewest allowed (weighted) observations in a leaf (in R called 'nodesize').
* public double min_rows;
*/
/* INHERITED: For numerical columns (real/int), build a histogram of (at least) this many bins, then split at the best point
* public int nbins;
*/
/* INHERITED: For numerical columns (real/int), build a histogram of (at most) this many bins at the root level, then decrease by factor of two per level
* public int nbins_top_level;
*/
/* INHERITED: For categorical columns (factors), build a histogram of this many bins, then split at the best point. Higher values can lead to more overfitting.
* public int nbins_cats;
*/
/* INHERITED: Stop making trees when the R^2 metric equals or exceeds this
* public double r2_stopping;
*/
/* INHERITED: Seed for pseudo random number generator (if applicable)
* public long seed;
*/
/* INHERITED: Run on one node only; no network overhead but fewer cpus used. Suitable for small datasets.
* public boolean build_tree_one_node;
*/
/* INHERITED: Row sample rate per tree (from 0.0 to 1.0)
* public float sample_rate;
*/
/* INHERITED: Row sample rate per tree per class (from 0.0 to 1.0)
* public float[] sample_rate_per_class;
*/
/* INHERITED: Column sample rate per tree (from 0.0 to 1.0)
* public float col_sample_rate_per_tree;
*/
/* INHERITED: Score the model after every so many trees. Disabled if set to 0.
* public int score_tree_interval;
*/
/* INHERITED: Minimum relative improvement in squared error reduction for a split to happen.
* public double min_split_improvement;
*/
/* INHERITED: Destination id for this model; auto-generated if not specified
* public ModelKeyV3 model_id;
*/
/* INHERITED: Training frame
* public FrameKeyV3 training_frame;
*/
/* INHERITED: Validation frame
* public FrameKeyV3 validation_frame;
*/
/* INHERITED: Number of folds for N-fold cross-validation
* public int nfolds;
*/
/* INHERITED: Keep cross-validation model predictions
* public boolean keep_cross_validation_predictions;
*/
/* INHERITED: Keep cross-validation fold assignment
* public boolean keep_cross_validation_fold_assignment;
*/
/* INHERITED: Allow parallel training of cross-validation models
* public boolean parallelize_cross_validation;
*/
/* INHERITED: Response column
* public ColSpecifierV3 response_column;
*/
/* INHERITED: Column with observation weights
* public ColSpecifierV3 weights_column;
*/
/* INHERITED: Offset column
* public ColSpecifierV3 offset_column;
*/
/* INHERITED: Column with cross-validation fold index assignment per observation
* public ColSpecifierV3 fold_column;
*/
/* INHERITED: Cross-validation fold assignment scheme, if fold_column is not specified
* public FoldAssignmentScheme fold_assignment;
*/
/* INHERITED: Ignored columns
* public String[] ignored_columns;
*/
/* INHERITED: Ignore constant columns
* public boolean ignore_const_cols;
*/
/* INHERITED: Whether to score during each iteration of model training
* public boolean score_each_iteration;
*/
/* INHERITED: Model checkpoint to resume training with
* public ModelKeyV3 checkpoint;
*/
/* INHERITED: Early stopping based on convergence of stopping_metric. Stop if simple moving average of length k of the stopping_metric does not improve for k:=stopping_rounds scoring events (0 to disable)
* public int stopping_rounds;
*/
/* INHERITED: Maximum allowed runtime in seconds for model training. Use 0 to disable.
* public double max_runtime_secs;
*/
/* INHERITED: Metric to use for early stopping (AUTO: logloss for classification, deviance for regression)
* public StoppingMetric stopping_metric;
*/
/* INHERITED: Relative tolerance for metric-based stopping criterion (stop if relative improvement is not at least this much)
* public double stopping_tolerance;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/DRFV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class DRFV3 extends SharedTreeV3 {
/* INHERITED: Model builder parameters.
* public DRFParametersV3 parameters;
*/
/* INHERITED: The algo name for this ModelBuilder.
* public String algo;
*/
/* INHERITED: The pretty algo name for this ModelBuilder (e.g., Generalized Linear Model, rather than GLM).
* public String algo_full_name;
*/
/* INHERITED: Model categories this ModelBuilder can build.
* public ModelCategory[] can_build;
*/
/* INHERITED: Should the builder always be visible, be marked as beta, or only visible if the user starts up with the experimental flag?
* public BuilderVisibility visibility;
*/
/* INHERITED: Job Key
* public JobV3 job;
*/
/* INHERITED: Parameter validation messages
* public ValidationMessageV3[] messages;
*/
/* INHERITED: Count of parameter validation errors
* public int error_count;
*/
/* INHERITED: HTTP status to return for this build.
* public int __http_status;
*/
/* INHERITED: Comma-separated list of JSON field paths to exclude from the result, used like: "/3/Frames?_exclude_fields=frames/frame_id/URL,__meta"
* public String _exclude_fields;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/DStackTraceV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class DStackTraceV3 extends Schema {
/** Node name */
public String node;
/** Unix epoch time */
public long time;
/** One trace per thread */
public String[] thread_traces;
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/DataInfoFrameV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class DataInfoFrameV3 extends Schema {
/** input frame */
public FrameKeyV3 frame;
/** interactions */
public String[] interactions;
/** use all factor levels */
public boolean use_all;
/** standardize */
public boolean standardize;
/** interactions only returned */
public boolean interactions_only;
/** output frame */
public FrameKeyV3 result;
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/DeepLearningModelOutputV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
import java.util.Map;
public class DeepLearningModelOutputV3 extends ModelOutputSchema {
/** Frame keys for weight matrices */
public FrameKeyV3[] weights;
/** Frame keys for bias vectors */
public FrameKeyV3[] biases;
/** Normalization/Standardization multipliers for numeric predictors */
public double[] normmul;
/** Normalization/Standardization offsets for numeric predictors */
public double[] normsub;
/** Normalization/Standardization multipliers for numeric response */
public double[] normrespmul;
/** Normalization/Standardization offsets for numeric response */
public double[] normrespsub;
/** Categorical offsets for one-hot encoding */
public int[] catoffsets;
/** Variable Importances */
public TwoDimTableV3 variable_importances;
/* INHERITED: Column names
* public String[] names;
*/
/* INHERITED: Domains for categorical columns
* public String[][] domains;
*/
/* INHERITED: Cross-validation models (model ids)
* public ModelKeyV3[] cross_validation_models;
*/
/* INHERITED: Cross-validation predictions, one per cv model (deprecated, use cross_validation_holdout_predictions_frame_id instead)
* public FrameKeyV3[] cross_validation_predictions;
*/
/* INHERITED: Cross-validation holdout predictions (full out-of-sample predictions on training data)
* public FrameKeyV3 cross_validation_holdout_predictions_frame_id;
*/
/* INHERITED: Cross-validation fold assignment (each row is assigned to one holdout fold)
* public FrameKeyV3 cross_validation_fold_assignment_frame_id;
*/
/* INHERITED: Category of the model (e.g., Binomial)
* public ModelCategory model_category;
*/
/* INHERITED: Model summary
* public TwoDimTableV3 model_summary;
*/
/* INHERITED: Scoring history
* public TwoDimTableV3 scoring_history;
*/
/* INHERITED: Training data model metrics
* public ModelMetricsBase training_metrics;
*/
/* INHERITED: Validation data model metrics
* public ModelMetricsBase validation_metrics;
*/
/* INHERITED: Cross-validation model metrics
* public ModelMetricsBase cross_validation_metrics;
*/
/* INHERITED: Cross-validation model metrics summary
* public TwoDimTableV3 cross_validation_metrics_summary;
*/
/* INHERITED: Job status
* public String status;
*/
/* INHERITED: Start time in milliseconds
* public long start_time;
*/
/* INHERITED: End time in milliseconds
* public long end_time;
*/
/* INHERITED: Runtime in milliseconds
* public long run_time;
*/
/* INHERITED: Help information for output fields
* public Map<String,String> help;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/DeepLearningModelV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class DeepLearningModelV3 extends ModelSchema {
/* INHERITED: The build parameters for the model (e.g. K for KMeans).
* public DeepLearningParametersV3 parameters;
*/
/* INHERITED: The build output for the model (e.g. the cluster centers for KMeans).
* public DeepLearningModelOutputV3 output;
*/
/* INHERITED: Compatible frames, if requested
* public String[] compatible_frames;
*/
/* INHERITED: Checksum for all the things that go into building the Model.
* public long checksum;
*/
/* INHERITED: Model key
* public ModelKeyV3 model_id;
*/
/* INHERITED: The algo name for this Model.
* public String algo;
*/
/* INHERITED: The pretty algo name for this Model (e.g., Generalized Linear Model, rather than GLM).
* public String algo_full_name;
*/
/* INHERITED: The response column name for this Model (if applicable). Is null otherwise.
* public String response_column_name;
*/
/* INHERITED: The Model's training frame key
* public FrameKeyV3 data_frame;
*/
/* INHERITED: Timestamp for when this model was completed
* public long timestamp;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/DeepLearningParametersV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class DeepLearningParametersV3 extends ModelParametersSchema {
/** Balance training data class counts via over/under-sampling (for imbalanced data). */
public boolean balance_classes;
/** Desired over/under-sampling ratios per class (in lexicographic order). If not specified, sampling factors will be automatically computed to obtain class balance during training. Requires balance_classes. */
public float[] class_sampling_factors;
/** Maximum relative size of the training data after balancing class counts (can be less than 1.0). Requires balance_classes. */
public float max_after_balance_size;
/** Maximum size (# classes) for confusion matrices to be printed in the Logs */
public int max_confusion_matrix_size;
/** Max. number (top K) of predictions to use for hit ratio computation (for multi-class only, 0 to disable) */
public int max_hit_ratio_k;
/** If enabled, override the final model with the best model found during training */
public boolean overwrite_with_best_model;
/** Auto-Encoder */
public boolean autoencoder;
/** Use all factor levels of categorical variables. Otherwise, the first factor level is omitted (without loss of accuracy). Useful for variable importances and auto-enabled for autoencoder. */
public boolean use_all_factor_levels;
/** If enabled, automatically standardize the data. If disabled, the user must provide properly scaled input data. */
public boolean standardize;
/** Activation function */
public Activation activation;
/** Hidden layer sizes (e.g. 100,100). */
public int[] hidden;
/** How many times the dataset should be iterated (streamed), can be fractional */
public double epochs;
/** Number of training samples (globally) per MapReduce iteration. Special values are 0: one epoch, -1: all available data (e.g., replicated training data), -2: automatic */
public long train_samples_per_iteration;
/** Target ratio of communication overhead to computation. Only for multi-node operation and train_samples_per_iteration=-2 (auto-tuning) */
public double target_ratio_comm_to_comp;
/** Seed for random numbers (affects sampling) - Note: only reproducible when running single threaded */
public long seed;
/** Adaptive learning rate */
public boolean adaptive_rate;
/** Adaptive learning rate time decay factor (similarity to prior updates) */
public double rho;
/** Adaptive learning rate smoothing factor (to avoid divisions by zero and allow progress) */
public double epsilon;
/** Learning rate (higher => less stable, lower => slower convergence) */
public double rate;
/** Learning rate annealing: rate / (1 + rate_annealing * samples) */
public double rate_annealing;
/** Learning rate decay factor between layers (N-th layer: rate*alpha^(N-1)) */
public double rate_decay;
/** Initial momentum at the beginning of training (try 0.5) */
public double momentum_start;
/** Number of training samples for which momentum increases */
public double momentum_ramp;
/** Final momentum after the ramp is over (try 0.99) */
public double momentum_stable;
/** Use Nesterov accelerated gradient (recommended) */
public boolean nesterov_accelerated_gradient;
/** Input layer dropout ratio (can improve generalization, try 0.1 or 0.2) */
public double input_dropout_ratio;
/** Hidden layer dropout ratios (can improve generalization), specify one value per hidden layer, defaults to 0.5 */
public double[] hidden_dropout_ratios;
/** L1 regularization (can add stability and improve generalization, causes many weights to become 0) */
public double l1;
/** L2 regularization (can add stability and improve generalization, causes many weights to be small */
public double l2;
/** Constraint for squared sum of incoming weights per unit (e.g. for Rectifier) */
public float max_w2;
/** Initial Weight Distribution */
public InitialWeightDistribution initial_weight_distribution;
/** Uniform: -value...value, Normal: stddev) */
public double initial_weight_scale;
/** A list of H2OFrame ids to initialize the weight matrices of this model with. */
public FrameKeyV3[] initial_weights;
/** A list of H2OFrame ids to initialize the bias vectors of this model with. */
public FrameKeyV3[] initial_biases;
/** Loss function */
public Loss loss;
/** Distribution function */
public Family distribution;
/** Tweedie Power */
public double tweedie_power;
/** Desired quantile for quantile regression (from 0.0 to 1.0) */
public double quantile_alpha;
/** Shortest time interval (in secs) between model scoring */
public double score_interval;
/** Number of training set samples for scoring (0 for all) */
public long score_training_samples;
/** Number of validation set samples for scoring (0 for all) */
public long score_validation_samples;
/** Maximum duty cycle fraction for scoring (lower: more training, higher: more scoring). */
public double score_duty_cycle;
/** Stopping criterion for classification error fraction on training data (-1 to disable) */
public double classification_stop;
/** Stopping criterion for regression error (MSE) on training data (-1 to disable) */
public double regression_stop;
/** Enable quiet mode for less output to standard output */
public boolean quiet_mode;
/** Method used to sample validation dataset for scoring */
public ClassSamplingMethod score_validation_sampling;
/** Enable diagnostics for hidden layers */
public boolean diagnostics;
/** Compute variable importances for input features (Gedeon method) - can be slow for large networks */
public boolean variable_importances;
/** Enable fast mode (minor approximation in back-propagation) */
public boolean fast_mode;
/** Force extra load balancing to increase training speed for small datasets (to keep all cores busy) */
public boolean force_load_balance;
/** Replicate the entire training dataset onto every node for faster training on small datasets */
public boolean replicate_training_data;
/** Run on a single node for fine-tuning of model parameters */
public boolean single_node_mode;
/** Enable shuffling of training data (recommended if training data is replicated and train_samples_per_iteration is close to #nodes x #rows, of if using balance_classes) */
public boolean shuffle_training_data;
/** Handling of missing values. Either Skip or MeanImputation. */
public MissingValuesHandling missing_values_handling;
/** Sparse data handling (more efficient for data with lots of 0 values). */
public boolean sparse;
/** Use a column major weight matrix for input layer. Can speed up forward propagation, but might slow down backpropagation (Deprecated). */
public boolean col_major;
/** Average activation for sparse auto-encoder (Experimental) */
public double average_activation;
/** Sparsity regularization (Experimental) */
public double sparsity_beta;
/** Max. number of categorical features, enforced via hashing (Experimental) */
public int max_categorical_features;
/** Force reproducibility on small data (will be slow - only uses 1 thread) */
public boolean reproducible;
/** Whether to export Neural Network weights and biases to H2O Frames */
public boolean export_weights_and_biases;
/** Mini-batch size (smaller leads to better fit, larger can speed up and generalize better) */
public int mini_batch_size;
/** Elastic averaging between compute nodes can improve distributed model convergence (Experimental) */
public boolean elastic_averaging;
/** Elastic averaging moving rate (only if elastic averaging is enabled). */
public double elastic_averaging_moving_rate;
/** Elastic averaging regularization strength (only if elastic averaging is enabled). */
public double elastic_averaging_regularization;
/** Pretrained autoencoder model to initialize this model with. */
public ModelKeyV3 pretrained_autoencoder;
/* INHERITED: Destination id for this model; auto-generated if not specified
* public ModelKeyV3 model_id;
*/
/* INHERITED: Training frame
* public FrameKeyV3 training_frame;
*/
/* INHERITED: Validation frame
* public FrameKeyV3 validation_frame;
*/
/* INHERITED: Number of folds for N-fold cross-validation
* public int nfolds;
*/
/* INHERITED: Keep cross-validation model predictions
* public boolean keep_cross_validation_predictions;
*/
/* INHERITED: Keep cross-validation fold assignment
* public boolean keep_cross_validation_fold_assignment;
*/
/* INHERITED: Allow parallel training of cross-validation models
* public boolean parallelize_cross_validation;
*/
/* INHERITED: Response column
* public ColSpecifierV3 response_column;
*/
/* INHERITED: Column with observation weights
* public ColSpecifierV3 weights_column;
*/
/* INHERITED: Offset column
* public ColSpecifierV3 offset_column;
*/
/* INHERITED: Column with cross-validation fold index assignment per observation
* public ColSpecifierV3 fold_column;
*/
/* INHERITED: Cross-validation fold assignment scheme, if fold_column is not specified
* public FoldAssignmentScheme fold_assignment;
*/
/* INHERITED: Ignored columns
* public String[] ignored_columns;
*/
/* INHERITED: Ignore constant columns
* public boolean ignore_const_cols;
*/
/* INHERITED: Whether to score during each iteration of model training
* public boolean score_each_iteration;
*/
/* INHERITED: Model checkpoint to resume training with
* public ModelKeyV3 checkpoint;
*/
/* INHERITED: Early stopping based on convergence of stopping_metric. Stop if simple moving average of length k of the stopping_metric does not improve for k:=stopping_rounds scoring events (0 to disable)
* public int stopping_rounds;
*/
/* INHERITED: Maximum allowed runtime in seconds for model training. Use 0 to disable.
* public double max_runtime_secs;
*/
/* INHERITED: Metric to use for early stopping (AUTO: logloss for classification, deviance for regression)
* public StoppingMetric stopping_metric;
*/
/* INHERITED: Relative tolerance for metric-based stopping criterion (stop if relative improvement is not at least this much)
* public double stopping_tolerance;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/DeepLearningV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class DeepLearningV3 extends ModelBuilderSchema {
/* INHERITED: Model builder parameters.
* public DeepLearningParametersV3 parameters;
*/
/* INHERITED: The algo name for this ModelBuilder.
* public String algo;
*/
/* INHERITED: The pretty algo name for this ModelBuilder (e.g., Generalized Linear Model, rather than GLM).
* public String algo_full_name;
*/
/* INHERITED: Model categories this ModelBuilder can build.
* public ModelCategory[] can_build;
*/
/* INHERITED: Should the builder always be visible, be marked as beta, or only visible if the user starts up with the experimental flag?
* public BuilderVisibility visibility;
*/
/* INHERITED: Job Key
* public JobV3 job;
*/
/* INHERITED: Parameter validation messages
* public ValidationMessageV3[] messages;
*/
/* INHERITED: Count of parameter validation errors
* public int error_count;
*/
/* INHERITED: HTTP status to return for this build.
* public int __http_status;
*/
/* INHERITED: Comma-separated list of JSON field paths to exclude from the result, used like: "/3/Frames?_exclude_fields=frames/frame_id/URL,__meta"
* public String _exclude_fields;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/Direction.java
|
package water.bindings.pojos;
public enum Direction {
INPUT,
OUTPUT,
INOUT,
}
|
0
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings
|
java-sources/ai/h2o/h2o-java-rest-bindings/3.8.2.11/water/bindings/pojos/DownloadDataV3.java
|
package water.bindings.pojos;
import com.google.gson.Gson;
public class DownloadDataV3 extends RequestSchema {
/** Frame to download */
public FrameKeyV3 frame_id;
/** Emit double values in a machine readable lossless format with Double.toHexString(). */
public boolean hex_string;
/** CSV Stream */
public String csv;
/** Suggested Filename */
public String filename;
/* INHERITED: Comma-separated list of JSON field paths to exclude from the result, used like: "/3/Frames?_exclude_fields=frames/frame_id/URL,__meta"
* public String _exclude_fields;
*/
/** Return the contents of this object as a JSON String. */
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.