index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/models/AstTestJavaScoring.java
|
package water.rapids.ast.prims.models;
import hex.Model;
import water.fvec.Frame;
import water.rapids.Env;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
public class AstTestJavaScoring extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"model", "frame", "predictions", "epsilon"};
}
@Override
public int nargs() {
return 1 + 4;
}
@Override
public String str() {
return "model.testJavaScoring";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Model model = stk.track(asts[1].exec(env)).getModel();
Frame frame = stk.track(asts[2].exec(env)).getFrame();
Frame preds = stk.track(asts[3].exec(env)).getFrame();
double epsilon = stk.track(asts[4].exec(env)).getNum();
boolean correct = model.testJavaScoring(frame, preds, epsilon);
return ValFrame.fromRow(correct ? 1 : 0);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/models/AstTransformFrame.java
|
package water.rapids.ast.prims.models;
import hex.Model;
import water.DKV;
import water.fvec.Frame;
import water.rapids.Env;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
public class AstTransformFrame extends AstPrimitive {
@Override
public int nargs() { return 1 + 2; }
@Override
public String str() {
return "transform";
}
@Override
public String[] args() {
return new String[]{"model key", "frame key"};
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Model model = stk.track(asts[1].exec(env)).getModel();
Frame fr = DKV.get(asts[2].toString()).get();
return new ValFrame(model.transform(fr));
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstAnyFactor.java
|
package water.rapids.ast.prims.mungers;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValNum;
import water.rapids.ast.AstPrimitive;
/**
* Any columns factor/categorical?
*/
public class AstAnyFactor extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public int nargs() {
return 1 + 1;
} // (any.factor frame)
@Override
public String str() {
return "any.factor";
}
@Override
public ValNum apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
for (Vec vec : fr.vecs()) if (vec.isCategorical()) return new ValNum(1);
return new ValNum(0);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstAppendLevels.java
|
package water.rapids.ast.prims.mungers;
import water.DKV;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.ast.params.AstStrList;
import water.rapids.vals.ValFrame;
import water.util.ArrayUtils;
public class AstAppendLevels extends AstPrimitive<AstAppendLevels> {
@Override
public String[] args() {
return new String[]{"ary", "inPlace", "extraLevels"};
}
@Override
public int nargs() {
return 1 + 3;
} // (setDomain x inPlace [list of strings])
@Override
public String str() {
return "appendLevels";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame f = stk.track(asts[1].exec(env)).getFrame();
boolean inPlace = asts[2].exec(env).getNum() == 1;
String[] extraLevels = ((AstStrList) asts[3])._strs;
if (f.numCols() != 1)
throw new IllegalArgumentException("Must be a single column. Got: " + f.numCols() + " columns.");
if (! f.vec(0).isCategorical())
throw new IllegalArgumentException("Vector must be a factor column. Got: " + f.vec(0).get_type_str());
final Vec v = inPlace ? f.vec(0) : env._ses.copyOnWrite(f, new int[]{0})[0];
v.setDomain(ArrayUtils.append(v.domain(), extraLevels));
DKV.put(v);
return new ValFrame(f);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstApply.java
|
package water.rapids.ast.prims.mungers;
import water.H2O;
import water.fvec.*;
import water.MRTask;
import water.rapids.*;
import water.rapids.ast.*;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
/**
* Apply a Function to a frame
* Typically, column-by-column, produces a 1-row frame as a result
*/
public class AstApply extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary", "margin", "fun"};
}
@Override
public int nargs() {
return 1 + 3;
} // (apply frame 1/2 fun)
@Override
public String str() {
return "apply";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
double margin = stk.track(asts[2].exec(env)).getNum();
AstPrimitive fun = stk.track(asts[3].exec(env)).getFun();
int nargs = fun.nargs();
if (nargs != -1 && nargs != 2)
throw new IllegalArgumentException("Incorrect number of arguments; '" + fun + "' expects " + nargs + " but was passed " + 2);
switch ((int) margin) {
case 1:
return rowwise(env, fr, fun);
case 2:
return colwise(env, stk, fr, fun);
default:
throw new IllegalArgumentException("Only row-wise (margin 1) or col-wise (margin 2) allowed");
}
}
// --------------------------------------------------------------------------
private ValFrame colwise(Env env, Env.StackHelp stk, Frame fr, AstPrimitive fun) {
// Break each column into it's own Frame, then execute the function passing
// the 1 argument. All columns are independent, and this loop should be
// parallized over each column.
Vec vecs[] = fr.vecs();
Val vals[] = new Val[vecs.length];
AstRoot[] asts = new AstRoot[]{fun, null};
for (int i = 0; i < vecs.length; i++) {
asts[1] = new AstFrame(new Frame(new String[]{fr._names[i]}, new Vec[]{vecs[i]}));
try (Env.StackHelp stk_inner = env.stk()) {
vals[i] = fun.apply(env, stk_inner, asts);
}
}
// All the resulting Vals must be the same scalar type (and if ValFrames,
// the columns must be the same count and type). Build a Frame result with
// 1 row column per applied function result (per column), and as many rows
// as there are columns in the returned Frames.
Val v0 = vals[0];
Vec ovecs[] = new Vec[vecs.length];
switch (v0.type()) {
case Val.NUM:
for (int i = 0; i < vecs.length; i++)
ovecs[i] = Vec.makeCon(vals[i].getNum(), 1L); // Since the zero column is a number, all must be numbers
break;
case Val.FRM:
long nrows = v0.getFrame().numRows();
for (int i = 0; i < vecs.length; i++) {
Frame res = vals[i].getFrame(); // Since the zero column is a frame, all must be frames
if (res.numCols() != 1)
throw new IllegalArgumentException("apply result Frames must have one column, found " + res.numCols() + " cols");
if (res.numRows() != nrows)
throw new IllegalArgumentException("apply result Frames must have all the same rows, found " + nrows + " rows and " + res.numRows());
ovecs[i] = res.vec(0);
}
break;
case Val.NUMS:
for (int i = 0; i < vecs.length; i++)
ovecs[i] = Vec.makeCon(vals[i].getNums()[0], 1L);
break;
case Val.STRS:
throw H2O.unimpl();
case Val.FUN:
throw water.H2O.unimpl();
case Val.STR:
throw water.H2O.unimpl();
default:
throw water.H2O.unimpl();
}
return new ValFrame(new Frame(fr._names, ovecs));
}
// --------------------------------------------------------------------------
// Break each row into it's own Row, then execute the function passing the
// 1 argument. All rows are independent, and run in parallel
private ValFrame rowwise(Env env, Frame fr, final AstPrimitive fun) {
final String[] names = fr._names;
final AstFunction scope = env._scope; // Current execution scope; needed to lookup variables
// do a single row of the frame to determine the size of the output.
double[] ds = new double[fr.numCols()];
for (int col = 0; col < fr.numCols(); ++col)
ds[col] = fr.vec(col).at(0);
int noutputs = fun.apply(env, env.stk(), new AstRoot[]{fun, new AstRow(ds, fr.names())}).getRow().length;
Frame res = new MRTask() {
@Override
public void map(Chunk chks[], NewChunk[] nc) {
double ds[] = new double[chks.length]; // Working row
AstRoot[] asts = new AstRoot[]{fun, new AstRow(ds, names)}; // Arguments to be called; they are reused endlessly
Session ses = new Session(); // Session, again reused endlessly
Env env = new Env(ses);
env._scope = scope; // For proper namespace lookup
for (int row = 0; row < chks[0]._len; row++) {
for (int col = 0; col < chks.length; col++) // Fill the row
ds[col] = chks[col].atd(row);
try (Env.StackHelp stk_inner = env.stk()) {
double[] valRow = fun.apply(env, stk_inner, asts).getRow(); // Make the call per-row
for (int newCol = 0; newCol < nc.length; ++newCol)
nc[newCol].addNum(valRow[newCol]);
}
}
ses.end(null); // Mostly for the sanity checks
}
}.doAll(noutputs, Vec.T_NUM, fr).outputFrame();
return new ValFrame(res);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstAsCharacter.java
|
package water.rapids.ast.prims.mungers;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import water.util.VecUtils;
/**
* Convert to StringVec
*/
public class AstAsCharacter extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public int nargs() {
return 1 + 1;
} // (as.character col)
@Override
public String str() {
return "as.character";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame ary = stk.track(asts[1].exec(env)).getFrame();
Vec[] nvecs = new Vec[ary.numCols()];
Vec vv;
for (int c = 0; c < nvecs.length; ++c) {
vv = ary.vec(c);
try {
nvecs[c] = vv.toStringVec();
} catch (Exception e) {
VecUtils.deleteVecs(nvecs, c);
throw e;
}
}
return new ValFrame(new Frame(ary._names, nvecs));
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstAsFactor.java
|
package water.rapids.ast.prims.mungers;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import water.util.VecUtils;
/**
* Convert to a factor/categorical
*/
public class AstAsFactor extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public int nargs() {
return 1 + 1;
} // (as.factor col)
@Override
public String str() {
return "as.factor";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame ary = stk.track(asts[1].exec(env)).getFrame();
Vec[] nvecs = new Vec[ary.numCols()];
// Type check - prescreen for correct types
for (Vec v : ary.vecs())
if (!(v.isCategorical() || v.isString() || v.isNumeric() || v.isBad()))
throw new IllegalArgumentException("asfactor() requires a string, categorical, numeric or empty column. "
+ "Received " + ary.anyVec().get_type_str()
+ ". Please convert column to a string or categorical first.");
Vec vv;
for (int c = 0; c < nvecs.length; ++c) {
vv = ary.vec(c);
try {
nvecs[c] = vv.toCategoricalVec();
} catch (Exception e) {
VecUtils.deleteVecs(nvecs, c);
throw e;
}
}
return new ValFrame(new Frame(ary._names, nvecs));
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstAsNumeric.java
|
package water.rapids.ast.prims.mungers;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import water.util.VecUtils;
/**
* Convert to a numeric
*/
public class AstAsNumeric extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public int nargs() {
return 1 + 1;
} // (as.numeric col)
@Override
public String str() {
return "as.numeric";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
Vec[] nvecs = new Vec[fr.numCols()];
Vec vv;
for (int c = 0; c < nvecs.length; ++c) {
vv = fr.vec(c);
try {
nvecs[c] = vv.toNumericVec();
} catch (Exception e) {
VecUtils.deleteVecs(nvecs, c);
throw e;
}
}
return new ValFrame(new Frame(fr._names, nvecs));
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstCBind.java
|
package water.rapids.ast.prims.mungers;
import water.H2O;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
/**
* cbind: bind columns together into a new frame
*/
public class AstCBind extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"..."};
}
@Override
public int nargs() {
return -1;
} // variable number of args
@Override
public String str() {
return "cbind";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
// Compute the variable args. Find the common row count
Val vals[] = new Val[asts.length];
Vec vec = null;
for (int i = 1; i < asts.length; i++) {
vals[i] = stk.track(asts[i].exec(env));
if (vals[i].isFrame()) {
Vec anyvec = vals[i].getFrame().anyVec();
if (anyvec == null) continue; // Ignore the empty frame
if (vec == null) vec = anyvec;
else if (vec.length() != anyvec.length())
throw new IllegalArgumentException("cbind frames must have all the same rows, found " + vec.length() + " and " + anyvec.length() + " rows.");
}
}
boolean clean = false;
if (vec == null) {
vec = Vec.makeZero(1);
clean = true;
} // Default to length 1
// Populate the new Frame
Frame fr = new Frame();
for (int i = 1; i < asts.length; i++) {
switch (vals[i].type()) {
case Val.FRM:
fr.add(vals[i].getFrame().names(), fr.makeCompatible(vals[i].getFrame()));
break;
case Val.FUN:
throw H2O.unimpl();
case Val.STR:
throw H2O.unimpl();
case Val.NUM:
// Auto-expand scalars to fill every row
double d = vals[i].getNum();
fr.add(Double.toString(d), vec.makeCon(d));
break;
default:
throw H2O.unimpl();
}
}
if (clean) vec.remove();
return new ValFrame(fr);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstColNames.java
|
package water.rapids.ast.prims.mungers;
import water.DKV;
import water.fvec.Frame;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.params.AstNum;
import water.rapids.ast.params.AstNumList;
import water.rapids.ast.params.AstStrList;
/**
* Assign column names
*/
public class AstColNames extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary", "cols", "names"};
}
@Override
public int nargs() {
return 1 + 3;
} // (colnames frame [#cols] ["names"])
@Override
public String str() {
return "colnames=";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
if (asts[2] instanceof AstNumList) {
if (!(asts[3] instanceof AstStrList))
throw new IllegalArgumentException("Column naming requires a string-list, but found a " + asts[3].getClass());
AstNumList cols = ((AstNumList) asts[2]);
AstStrList nams = ((AstStrList) asts[3]);
int d[] = cols.expand4();
if (d.length != nams._strs.length)
throw new IllegalArgumentException("Must have the same number of column choices as names");
for (int i = 0; i < d.length; i++)
fr._names[d[i]] = nams._strs[i];
} else if ((asts[2] instanceof AstNum)) {
int col = (int) (asts[2].exec(env).getNum());
String name = asts[3].exec(env).getStr();
fr._names[col] = name;
} else
throw new IllegalArgumentException("Column naming requires a number-list, but found a " + asts[2].getClass());
if (fr._key != null) DKV.put(fr); // Update names in DKV
return new ValFrame(fr);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstColPySlice.java
|
package water.rapids.ast.prims.mungers;
import water.fvec.Frame;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.ast.AstParameter;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import water.rapids.vals.ValRow;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.params.AstNum;
/**
* Column slice; allows python-like syntax.
* Numbers past last column are allowed and ignored in NumLists, but throw an
* error for single numbers. Negative numbers have the number of columns
* added to them, before being checked for range.
*/
public class AstColPySlice extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary", "cols"};
}
@Override
public int nargs() {
return 1 + 2;
} // (cols_py src [col_list])
@Override
public String str() {
return "cols_py";
}
@Override
public Val apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Val v = stk.track(asts[1].exec(env));
AstParameter colList = (AstParameter) asts[2];
if (v instanceof ValRow) {
ValRow vv = (ValRow) v;
return vv.slice(colList.columns(vv.getNames()));
}
Frame fr = v.getFrame();
int[] cols = colList.columns(fr.names());
Frame fr2 = new Frame();
if (cols.length == 0) // Empty inclusion list?
return new ValFrame(fr2);
if (cols[0] < 0) // Negative cols have number of cols added
for (int i = 0; i < cols.length; i++)
cols[i] += fr.numCols();
if (asts[2] instanceof AstNum && // Singletons must be in-range
(cols[0] < 0 || cols[0] >= fr.numCols()))
throw new IllegalArgumentException("Column must be an integer from 0 to " + (fr.numCols() - 1));
for (int col : cols) // For all included columns
if (col >= 0 && col < fr.numCols()) // Ignoring out-of-range ones
fr2.add(fr.names()[col], fr.vecs()[col]);
return new ValFrame(fr2);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstColSlice.java
|
package water.rapids.ast.prims.mungers;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.ast.AstParameter;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import water.rapids.vals.ValRow;
import water.rapids.ast.AstPrimitive;
import java.util.Arrays;
/**
* Column slice; allows R-like syntax.
* Numbers past the largest column are an error.
* Negative numbers and number lists are allowed, and represent an *exclusion* list
*/
public class AstColSlice extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary", "cols"};
}
@Override
public int nargs() {
return 1 + 2;
} // (cols src [col_list])
@Override
public String str() {
return "cols";
}
@Override
public Val apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Val v = stk.track(asts[1].exec(env));
AstParameter col_list = (AstParameter) asts[2];
if (v instanceof ValRow) {
ValRow vv = (ValRow) v;
return vv.slice(col_list.columns(vv.getNames()));
}
Frame src = v.getFrame();
int[] cols = col_select(src.names(), col_list);
Frame dst = new Frame();
Vec[] vecs = src.vecs();
for (int col : cols) dst.add(src._names[col], vecs[col]);
return new ValFrame(dst);
}
// Complex column selector; by list of names or list of numbers or single
// name or number. Numbers can be ranges or negative.
public static int[] col_select(String[] names, AstParameter col_selector) {
int[] cols = col_selector.columns(names);
if (cols.length == 0) return cols; // Empty inclusion list?
if (cols[0] >= 0) { // Positive (inclusion) list
if (cols[cols.length - 1] >= names.length)
throw new IllegalArgumentException("Column must be an integer from 0 to " + (names.length - 1));
return cols;
}
// Negative (exclusion) list; convert to positive inclusion list
int[] pos = new int[names.length];
for (int col : cols) // more or less a radix sort, filtering down to cols to ignore
if (0 <= -col - 1 && -col - 1 < names.length)
pos[-col - 1] = -1;
int j = 0;
for (int i = 0; i < names.length; i++) if (pos[i] == 0) pos[j++] = i;
return Arrays.copyOfRange(pos, 0, j);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstColumnsByType.java
|
package water.rapids.ast.prims.mungers;
import water.fvec.*;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValNums;
import java.util.ArrayList;
/**
* Get column indexes of an H2OFrame that are of a certain data type.
* <p/>
* This will take an H2OFrame and return all column indexes based on a specific data type (numeric, categorical,
* string,time, uuid, and bad)
* <p/>
*
* @author navdeepgill
* @version 3.10
* @since 3.10
*
*/
public class AstColumnsByType extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary","type"};
}
private enum DType {Numeric,Categorical,String,Time,UUID,Bad}
@Override
public String str() {
return "columnsByType";
}
@Override
public int nargs() {
return 1 + 2;
} //ary type
@Override
public ValNums apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
String type = stk.track(asts[2].exec(env)).getStr();
DType dtype;
switch (type) {
case "numeric": // Numeric, but not categorical or time
dtype = DType.Numeric;
break;
case "categorical": // Integer, with a categorical/factor String mapping
dtype = DType.Categorical;
break;
case "string": // String
dtype = DType.String;
break;
case "time": // Long msec since the Unix Epoch - with a variety of display/parse options
dtype = DType.Time;
break;
case "uuid": // UUID
dtype = DType.UUID;
break;
case "bad": // No none-NA rows (triple negative! all NAs or zero rows)
dtype = DType.Bad;
break;
default:
throw new IllegalArgumentException("unknown data type to filter by: " + type);
}
Vec vecs[] = fr.vecs();
ArrayList<Double> idxs = new ArrayList<>();
for (double i = 0; i < fr.numCols(); i++)
if (dtype.equals(DType.Numeric) && vecs[(int) i].isNumeric()){
idxs.add(i);
}
else if (dtype.equals(DType.Categorical) && vecs[(int) i].isCategorical()){
idxs.add(i);
}
else if (dtype.equals(DType.String) && vecs[(int) i].isString()){
idxs.add(i);
}
else if (dtype.equals(DType.Time) && vecs[(int) i].isTime()){
idxs.add(i);
}
else if (dtype.equals(DType.UUID) && vecs[(int) i].isUUID()){
idxs.add(i);
} else if (dtype.equals(DType.Bad) && vecs[(int) i].isBad()){
idxs.add(i);
}
double[] include_cols = new double[idxs.size()];
int i = 0;
for (double d : idxs)
include_cols[i++] = (int) d;
return new ValNums(include_cols);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstCut.java
|
package water.rapids.ast.prims.mungers;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.fvec.Vec;
import water.rapids.*;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.ast.params.AstNum;
import water.rapids.ast.params.AstNumList;
import water.rapids.ast.params.AstStr;
import water.rapids.ast.params.AstStrList;
import water.rapids.vals.ValFrame;
import water.util.MathUtils;
import java.util.Arrays;
public class AstCut extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary", "breaks", "labels", "include_lowest", "right", "digits"};
}
@Override
public int nargs() {
return 1 + 6;
} // (cut x breaks labels include_lowest right digits)
@Override
public String str() {
return "cut";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
double[] cuts = check(asts[2]);
Arrays.sort(cuts);
String[] labels = check2(asts[3]);
final boolean lowest = asts[4].exec(env).getNum() == 1;
final boolean rite = asts[5].exec(env).getNum() == 1;
final int digits = Math.min((int) asts[6].exec(env).getNum(), 12); // cap at 12
if (fr.vecs().length != 1 || fr.vecs()[0].isCategorical())
throw new IllegalArgumentException("First argument must be a numeric column vector");
double fmin = fr.anyVec().min();
double fmax = fr.anyVec().max();
int nbins = cuts.length - 1; // c(0,10,100) -> 2 bins (0,10] U (10, 100]
double width;
if (nbins == 0) {
if (cuts[0] < 2) throw new IllegalArgumentException("The number of cuts must be >= 2. Got: " + cuts[0]);
// in this case, cut the vec into _cuts[0] many pieces of equal length
nbins = (int) Math.floor(cuts[0]);
width = (fmax - fmin) / nbins;
cuts = new double[nbins];
cuts[0] = fmin - 0.001 * (fmax - fmin);
for (int i = 1; i < cuts.length; ++i)
cuts[i] = (i == cuts.length - 1) ? (fmax + 0.001 * (fmax - fmin)) : (fmin + i * width);
}
// width = (fmax - fmin)/nbins;
// if(width == 0) throw new IllegalArgumentException("Data vector is constant!");
if (labels != null && labels.length != nbins)
throw new IllegalArgumentException("`labels` vector does not match the number of cuts.");
// Construct domain names from _labels or bin intervals if _labels is null
final double cutz[] = cuts;
// first round _cuts to dig.lab decimals: example floor(2.676*100 + 0.5) / 100
for (int i = 0; i < cuts.length; ++i)
cuts[i] = Math.floor(cuts[i] * Math.pow(10, digits) + 0.5) / Math.pow(10, digits);
String[][] domains = new String[1][nbins];
if (labels == null) {
domains[0][0] = (lowest ? "[" : left(rite)) + cuts[0] + "," + cuts[1] + rite(rite);
for (int i = 1; i < (cuts.length - 1); ++i) domains[0][i] = left(rite) + cuts[i] + "," + cuts[i + 1] + rite(rite);
} else domains[0] = labels;
Frame fr2 = new MRTask() {
@Override
public void map(Chunk c, NewChunk nc) {
int rows = c._len;
for (int r = 0; r < rows; ++r) {
double x = c.atd(r);
if (Double.isNaN(x) || (lowest && x < cutz[0])
|| (!lowest && (x < cutz[0] || MathUtils.equalsWithinOneSmallUlp(x, cutz[0])))
|| (rite && x > cutz[cutz.length - 1])
|| (!rite && (x > cutz[cutz.length - 1] || MathUtils.equalsWithinOneSmallUlp(x, cutz[cutz.length - 1]))))
nc.addNum(Double.NaN);
else {
for (int i = 1; i < cutz.length; ++i) {
if (rite) {
if (x <= cutz[i]) {
nc.addNum(i - 1);
break;
}
} else if (x < cutz[i]) {
nc.addNum(i - 1);
break;
}
}
}
}
}
}.doAll(1, Vec.T_NUM, fr).outputFrame(fr.names(), domains);
return new ValFrame(fr2);
}
private String left(boolean rite) {
return rite ? "(" : "[";
}
private String rite(boolean rite) {
return rite ? "]" : ")";
}
private double[] check(AstRoot ast) {
double[] n;
if (ast instanceof AstNumList) n = ((AstNumList) ast).expand();
else if (ast instanceof AstNum)
n = new double[]{((AstNum) ast).getNum()}; // this is the number of breaks wanted...
else throw new IllegalArgumentException("Requires a number-list, but found a " + ast.getClass());
return n;
}
private String[] check2(AstRoot ast) {
String[] s = null;
if (ast instanceof AstStrList) s = ((AstStrList) ast)._strs;
else if (ast instanceof AstStr) s = new String[]{ast.str()};
return s;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstDdply.java
|
package water.rapids.ast.prims.mungers;
import water.*;
import water.fvec.*;
import water.rapids.*;
import water.rapids.ast.*;
import water.rapids.ast.params.AstNumList;
import water.rapids.vals.ValFrame;
import water.util.*;
import java.util.Arrays;
/**
* Ddply
* Group the rows of 'data' by unique combinations of '[group-by-cols]'.
* Apply any function 'fcn' to a group Frame, which must accept a Frame (and
* any "extra" arguments) and return a single scalar value.
* <p/>
* Returns a set of grouping columns, with the single answer column, with one
* row per unique group.
*/
public class AstDdply extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary", "groupByCols", "fun"};
}
@Override
public int nargs() {
return 1 + 3;
} // (ddply data [group-by-cols] fcn )
@Override
public String str() {
return "ddply";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
int ncols = fr.numCols();
AstNumList groupby = AstGroup.check(ncols, asts[2]);
int[] gbCols = groupby.expand4();
AstRoot fun = asts[3].exec(env).getFun();
AstFunction scope = env._scope; // Current execution scope; needed to lookup variables
// Pass 1: Find all the groups (and count rows-per-group)
IcedHashSet<AstGroup.G> gss = AstGroup.doGroups(fr, gbCols, AstGroup.aggNRows());
final AstGroup.G[] grps = gss.toArray(new AstGroup.G[gss.size()]);
// apply an ORDER by here...
final int[] ordCols = new AstNumList(0, gbCols.length).expand4();
Arrays.sort(grps, new java.util.Comparator<AstGroup.G>() {
// Compare 2 groups. Iterate down _gs, stop when _gs[i] > that._gs[i],
// or _gs[i] < that._gs[i]. Order by various columns specified by
// _orderByCols. NaN is treated as least
@Override
public int compare(AstGroup.G g1, AstGroup.G g2) {
for (int i : ordCols) {
if (Double.isNaN(g1._gs[i]) && !Double.isNaN(g2._gs[i])) return -1;
if (!Double.isNaN(g1._gs[i]) && Double.isNaN(g2._gs[i])) return 1;
if (g1._gs[i] != g2._gs[i]) return g1._gs[i] < g2._gs[i] ? -1 : 1;
}
return 0;
}
// I do not believe sort() calls equals() at this time, so no need to implement
@Override
public boolean equals(Object o) {
throw H2O.unimpl();
}
});
// Uniquely number the groups
for (int gnum = 0; gnum < grps.length; gnum++) grps[gnum]._dss[0][0] = gnum;
// Pass 2: Build all the groups, building 1 Vec per-group, with exactly the
// same Chunk layout, except each Chunk will be the filter rows numbers; a
// list of the Chunk-relative row-numbers for that group in an original
// data Chunk. Each Vec will have a *different* number of rows.
Vec[] vgrps = new BuildGroup(gbCols, gss).doAll(gss.size(), Vec.T_NUM, fr).close();
// Pass 3: For each group, build a full frame for the group, run the
// function on it and tear the frame down.
final RemoteRapids[] remoteTasks = new RemoteRapids[gss.size()]; // gather up the remote tasks...
Futures fs = new Futures();
for (int i = 0; i < remoteTasks.length; i++)
fs.add(RPC.call(vgrps[i]._key.home_node(), remoteTasks[i] = new RemoteRapids(fr, vgrps[i]._key, fun, scope)));
fs.blockForPending();
// Build the output!
final double[] res0 = remoteTasks[0]._result;
String[] fcnames = new String[res0.length];
for (int i = 0; i < res0.length; i++)
fcnames[i] = "ddply_C" + (i + 1);
MRTask mrfill = new MRTask() {
@Override
public void map(Chunk[] c, NewChunk[] ncs) {
int start = (int) c[0].start();
for (int i = 0; i < c[0]._len; ++i) {
AstGroup.G g = grps[i + start]; // One Group per row
int j;
for (j = 0; j < g._gs.length; j++) // The Group Key, as a row
ncs[j].addNum(g._gs[j]);
double[] res = remoteTasks[i + start]._result;
for (int a = 0; a < res0.length; a++)
ncs[j++].addNum(res[a]);
}
}
};
Frame f = AstGroup.buildOutput(gbCols, res0.length, fr, fcnames, gss.size(), mrfill);
return new ValFrame(f);
}
// --------------------------------------------------------------------------
// Build all the groups, building 1 Vec per-group, with exactly the same
// Chunk layout, except each Chunk will be the filter rows numbers; a list
// of the Chunk-relative row-numbers for that group in an original data Chunk.
private static class BuildGroup extends MRTask<BuildGroup> {
final IcedHashSet<AstGroup.G> _gss;
final int[] _gbCols;
BuildGroup(int[] gbCols, IcedHashSet<AstGroup.G> gss) {
_gbCols = gbCols;
_gss = gss;
}
@Override
public void map(Chunk[] cs, NewChunk[] ncs) {
AstGroup.G gWork = new AstGroup.G(_gbCols.length, null); // Working Group
for (int row = 0; row < cs[0]._len; row++) {
gWork.fill(row, cs, _gbCols); // Fill the worker Group for the hashtable lookup
int gnum = (int) _gss.get(gWork)._dss[0][0]; // Existing group number
ncs[gnum].addNum(row); // gather row-numbers per-chunk per-group
}
}
// Gather all the output Vecs. Note that each Vec has a *different* number
// of rows, and taken together they do NOT make a valid Frame.
Vec[] close() {
Futures fs = new Futures();
Vec[] vgrps = new Vec[_gss.size()];
for (int i = 0; i < vgrps.length; i++)
vgrps[i] = _appendables[i].close(_appendables[i].compute_rowLayout(), fs);
fs.blockForPending();
return vgrps;
}
}
// --------------------------------------------------------------------------
private static class RemoteRapids extends DTask<RemoteRapids> {
private Frame _data; // Data frame
private Key<Vec> _vKey; // the group to process...
private AstRoot _fun; // the ast to execute on the group
private AstFunction _scope; // Execution environment
private double[] _result; // result is 1 row per group!
RemoteRapids(Frame data, Key<Vec> vKey, AstRoot fun, AstFunction scope) {
_data = data;
_vKey = vKey;
_fun = fun;
_scope = scope;
}
@Override
public void compute2() {
assert _vKey.home();
final Vec gvec = DKV.getGet(_vKey);
assert gvec.group().equals(_data.anyVec().group());
// Make a group Frame, using wrapped Vecs wrapping the original data
// frame with the filtered Vec passed in. Run the function, getting a
// scalar or a 1-row Frame back out. Delete the group Frame. Return the
// 1-row Frame as a double[] of results for this group.
// Make the subset Frame Vecs, no chunks yet
Key<Vec>[] groupKeys = gvec.group().addVecs(_data.numCols());
final Vec[] groupVecs = new Vec[_data.numCols()];
Futures fs = new Futures();
for (int i = 0; i < _data.numCols(); i++)
DKV.put(groupVecs[i] = new Vec(groupKeys[i], gvec._rowLayout, gvec.domain(), gvec.get_type()), fs);
fs.blockForPending();
// Fill in the chunks
new MRTask() {
@Override
public void setupLocal() {
Vec[] data_vecs = _data.vecs();
for (int i = 0; i < gvec.nChunks(); i++)
if (data_vecs[0].chunkKey(i).home()) {
Chunk rowchk = gvec.chunkForChunkIdx(i);
for (int col = 0; col < data_vecs.length; col++)
DKV.put(Vec.chunkKey(groupVecs[col]._key, i), new SubsetChunk(data_vecs[col].chunkForChunkIdx(i), rowchk, groupVecs[col]), _fs);
}
}
}.doAllNodes();
Frame groupFrame = new Frame(_data._names, groupVecs);
// Now run the function on the group frame
Session ses = new Session();
// Build an environment with proper lookup scope, and execute in a temp session
Val val = ses.exec(new AstExec(new AstRoot[]{_fun, new AstFrame(groupFrame)}), _scope);
val = ses.end(val);
// Result into a double[]
if (val.isFrame()) {
Frame res = val.getFrame();
if (res.numRows() != 1)
throw new IllegalArgumentException("ddply must return a 1-row (many column) frame, found " + res.numRows());
_result = new double[res.numCols()];
for (int i = 0; i < res.numCols(); i++)
_result[i] = res.vec(i).at(0);
res.remove();
} else if (val.isNum())
_result = new double[]{val.getNum()};
else if (val.isNums())
_result = val.getNums();
else
throw new IllegalArgumentException("ddply must return either a number or a frame, not a " + val);
// Cleanup
groupFrame.delete(); // Delete the Frame holding WrappedVecs over SubsetChunks
gvec.remove(); // Delete the group-defining Vec
_data = null; // Nuke to avoid returning (not for GC)
_vKey = null; // Nuke to avoid returning (not for GC)
_fun = null; // Nuke to avoid returning (not for GC)
_scope = null; // Nuke to avoid returning (not for GC)
// And done!
tryComplete();
}
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstFillNA.java
|
package water.rapids.ast.prims.mungers;
import water.H2O;
import water.Key;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.fvec.Vec;
import water.rapids.*;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import java.util.Arrays;
/**
* Fill NA's from previous or future values.
* <p/> Different from impute in that a new Frame must be returned. It is
* not inplace b/c a MRTask will create race conditions that will prevent correct results.
* This function allows a limit of values to fill forward
* <p/>
* @param method Direction to fill either: forward or backward
* @param axis Along which axis to fill, 0 for columnar, 1 for row
* @param limit Max number of consecutive NA's to fill
* @return New Frame with filled values
*/
public class AstFillNA extends AstPrimitive {
private static final String METHOD_BACKWARD = "backward";
@Override
public String[] args() {
return new String[]{"ary", "method", "axis", "limit" };
}
@Override
public String str() {
return "h2o.fillna";
}
@Override
public int nargs() {
return 1 + 4;
} // (h2o.impute data col method combine_method groupby groupByFrame values)
@Override
public Val apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
// Argument parsing and sanity checking
// Whole frame being imputed
Frame fr = stk.track(asts[1].exec(env)).getFrame();
// Column within frame being imputed
final String method = asts[2].exec(env).getStr();
if (!(Arrays.asList("forward","backward")).contains(method.toLowerCase()))
throw new IllegalArgumentException("Method must be forward or backward");
final int axis = (int) asts[3].exec(env).getNum();
if (!(Arrays.asList(0,1)).contains(axis))
throw new IllegalArgumentException("Axis must be 0 for columnar 1 for row");
final int limit = (int) asts[4].exec(env).getNum();
assert limit >= 0:"The maxlen/limit parameter should be >= 0.";
if (limit == 0) // fast short cut to do nothing if user set zero limit
return new ValFrame(fr.deepCopy(Key.make().toString()));
Frame res;
if (axis == 0) {
res = (METHOD_BACKWARD.equalsIgnoreCase(method.trim()))?
new FillBackwardTaskCol(limit, fr.anyVec().nChunks()).doAll(fr.numCols(), Vec.T_NUM, fr).outputFrame():
new FillForwardTaskCol(limit).doAll(fr.numCols(), Vec.T_NUM, fr).outputFrame();
} else {
res = (METHOD_BACKWARD.equalsIgnoreCase(method.trim()))?
new FillBackwardTaskRow(limit).doAll(fr.numCols(), Vec.T_NUM, fr).outputFrame():
new FillForwardTaskRow(limit).doAll(fr.numCols(), Vec.T_NUM, fr).outputFrame();
}
res._key = Key.<Frame>make();
return new ValFrame(res);
}
private static class FillForwardTaskRow extends MRTask<FillForwardTaskRow> {
private final int _maxLen;
FillForwardTaskRow(int maxLen) { _maxLen = maxLen; }
@Override
public void map(Chunk cs[], NewChunk nc[]) {
for (int i = 0; i < cs[0]._len; i++) {
int fillCount = 0;
nc[0].addNum(cs[0].atd(i));
for (int j = 1; j < cs.length; j++) {
if (cs[j].isNA(i)) {
if (!nc[j-1].isNA(i) && fillCount < _maxLen) {
nc[j].addNum(nc[j-1].atd(i));
fillCount++;
} else {
nc[j].addNA();
}
} else {
if (fillCount > 0) fillCount = 0;
nc[j].addNum(cs[j].atd(i));
}
}
}
}
}
private static class FillBackwardTaskRow extends MRTask<FillBackwardTaskRow> {
private final int _maxLen;
FillBackwardTaskRow(int maxLen) { _maxLen = maxLen;}
@Override
public void map(Chunk cs[], NewChunk nc[]) {
int lastCol = cs.length-1; // index of last column in the chunk
for (int i = 0; i < cs[0]._len; i++) { // go through each row
int fillCount = 0;
nc[lastCol].addNum(cs[lastCol].atd(i)); // copy over last row element regardless
for (int j = lastCol-1; j >= 0; j--) { // going backwards
if (cs[j].isNA(i)) {
int lastNonNACol = j+1;
if (!nc[lastNonNACol].isNA(i) && fillCount < _maxLen) {
nc[j].addNum(nc[lastNonNACol].atd(i));
fillCount++;
} else {
nc[j].addNA(); // keep the NaNs, run out ot maxLen
}
} else {
if (fillCount > 0) fillCount = 0; // reset fillCount after encountering a non NaN.
nc[j].addNum(cs[j].atd(i)); // no NA filling needed, element is not NaN
}
}
}
}
}
private static class FillForwardTaskCol extends MRTask<FillForwardTaskCol> {
private final int _maxLen;
FillForwardTaskCol(int maxLen) {
_maxLen = maxLen;
}
@Override
public void map(Chunk cs[], NewChunk nc[]) {
for (int i = 0; i < cs.length; i++) {
for (int j = 0; j < cs[i]._len; j++) {
// current chunk element is NA
if (cs[i].isNA(j)) {
// search back to prev chunk if we are < maxLen away from j = 0
if (j < _maxLen) {
int searchCount = 0;
Chunk searchChunk = cs[i];
int searchStartIdx = j;
int searchIdx = 0;
// find the previous valid value up to maxLen distance
while (searchChunk != null && searchCount < _maxLen && searchChunk.isNA(searchStartIdx - searchIdx)) {
if (searchStartIdx - searchCount == 0) {
//reached the start of the chunk
if (searchChunk.cidx() > 0) {
searchChunk = searchChunk.vec().chunkForChunkIdx(searchChunk.cidx() - 1);
searchStartIdx = searchChunk.len() - 1;
searchIdx = 0;
searchCount++;
continue;
} else {
searchChunk = null;
}
}
searchIdx++;
searchCount++;
}
if (searchChunk == null) {
nc[i].addNA();
} else {
// fill forward as much as you need and skip j forward by that amount
double fillVal = searchChunk.atd(searchStartIdx - searchIdx);
int fillCount = _maxLen - searchCount;
fillCount = Math.min(fillCount,cs[i]._len);
// How many fills forward from this row is ok?
int maxFill = 1;
int k = 1;
while(cs[i].isNA(j+k)) {
k++;
maxFill++;
}
fillCount = Math.min(maxFill, fillCount);
// We've searched back but maxlen isnt big enough to propagate here.
if (fillCount < 0)
nc[i].addNA();
else if (fillCount == 0)
nc[i].addNum(fillVal);
else
for (int f = 0; f<fillCount; f++) { nc[i].addNum(fillVal); }
fillCount = Math.max(1,fillCount);
j += (fillCount - 1);
}
} else {
// otherwise keep moving forward
nc[i].addNA();
}
} else if (j < cs[i]._len -1 && !cs[i].isNA(j) && cs[i].isNA(j+1)) {
// current chunk element not NA but next one is
// fill as much as you have to
double fillVal = cs[i].atd(j);
nc[i].addNum(fillVal);
int fillCount = 0; j++;
while (j+fillCount < cs[i]._len && fillCount < _maxLen && cs[i].isNA(j+fillCount)) {
nc[i].addNum(fillVal);
fillCount++;
}
j += (fillCount - 1);
} else {
// current chunk element not NA next one not NA
// keep moving forward
nc[i].addNum(cs[i].atd(j));
}
}
}
}
}
private static class FillBackwardTaskCol extends MRTask<FillBackwardTaskCol> {
private final int _maxLen;
private final int _lastChunkIndex;
FillBackwardTaskCol(int maxLen, int chunksNum) {
_maxLen = maxLen;
_lastChunkIndex= chunksNum-1;
}
@Override
public void map(Chunk cs[], NewChunk nc[]) {
int lastRowIndex = cs[0]._len-1;
int currentCidx = cs[0].cidx();
double[] newChunkInfo = new double[cs[0].len()]; // allocate once per column chunk
int chkLen = cs[0].len();
for (int i = 0; i < cs.length; i++) {
int naBlockRowStart = -1; // row where we see our first NA
int lastNonNaNRow = -1; // indicate row where the element is not NaN
int rowIndex = lastRowIndex;
int naBlockLength = 0;
double fillVal=Double.NaN;
int fillLen = 0; // number of values to be filled for NAs
while (rowIndex > -1) { // search backwards from end of chunk
if (cs[i].isNA(rowIndex)) { // found an NA in a row
naBlockRowStart= rowIndex;
rowIndex--; // drop the row index
naBlockLength++;
while ((rowIndex > -1) && cs[i].isNA(rowIndex)) { // want to find all NA blocks in this chunk
naBlockLength++;
rowIndex--;
}
// done finding a NA block in the chunk column
if (lastNonNaNRow < 0) { // has not found an non NaN element in this chunk, from next chunk then
if (currentCidx == _lastChunkIndex) { // this is the last chunk, nothing to look back to
fillLen = 0;
} else {
fillLen = _maxLen;
boolean foundFillVal = false;
for (int cIndex = currentCidx+1; cIndex <= _lastChunkIndex; cIndex++) {
if (foundFillVal) // found fill value in next chunk
break;
// search the next chunk for nonNAs
Chunk nextChunk = cs[i].vec().chunkForChunkIdx(cIndex); // grab the next chunk
int nChunkLen = nextChunk.len();
for (int rIndex=0; rIndex < nChunkLen; rIndex++) {
if (nextChunk.isNA(rIndex)) {
fillLen--; // reduce fillLen here
} else { // found a No NA row
fillVal = nextChunk.atd(rIndex);
foundFillVal = true;
break;
}
if (fillLen < 1) { // no fill values is found in this chunk
break;
}
}
}
}
} else { // found non NaN element in this chunk, can copy over values if valid
fillVal = cs[i].atd(lastNonNaNRow);
fillLen = _maxLen; // can fill as many as the maxLen here
}
// fill the chunk then with fillVal is fillLen > 0, otherwise, fill it with NaNs
int naRowEnd = naBlockRowStart-naBlockLength;
for (int naRow = naBlockRowStart; naRow > naRowEnd; naRow--) {
if (fillLen > 0) {
newChunkInfo[naRow] = fillVal; // nc[i].addNum(fillVal);
fillLen--;
} else {
newChunkInfo[naRow] = Double.NaN; // nc[i].addNA();
}
}
// finished filling in the NAs, need to reset counts
naBlockLength=0;
lastNonNaNRow = -1;
} else {
newChunkInfo[rowIndex] = cs[i].atd(rowIndex); // nc[i].addNum(cs[i].atd(rowIndex));
lastNonNaNRow = rowIndex;
rowIndex--;
naBlockLength=0;
}
}
// copy info from newChunkInfo to NewChunk
for (int rindex=0; rindex < chkLen; rindex++) {
nc[i].addNum(newChunkInfo[rindex]);
}
}
}
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstFilterNaCols.java
|
package water.rapids.ast.prims.mungers;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValNums;
import water.rapids.ast.AstPrimitive;
import java.util.ArrayList;
/**
*/
public class AstFilterNaCols extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary", "fraction"};
}
/* (filterNACols frame frac) */
@Override
public int nargs() {
return 1 + 2;
}
@Override
public String str() {
return "filterNACols";
}
@Override
public ValNums apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
double frac = asts[2].exec(env).getNum();
double nrow = fr.numRows() * frac;
Vec vecs[] = fr.vecs();
ArrayList<Double> idxs = new ArrayList<>();
for (double i = 0; i < fr.numCols(); i++)
if (vecs[(int) i].naCnt() < nrow)
idxs.add(i);
double[] include_cols = new double[idxs.size()];
int i = 0;
for (double d : idxs)
include_cols[i++] = d;
return new ValNums(include_cols);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstFlatten.java
|
package water.rapids.ast.prims.mungers;
import water.H2O;
import water.fvec.Frame;
import water.fvec.Vec;
import water.parser.BufferedString;
import water.rapids.*;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import water.rapids.vals.ValNum;
import water.rapids.vals.ValStr;
/**
*/
public class AstFlatten extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public int nargs() {
return 1 + 1;
} // (flatten fr)
@Override
public String str() {
return "flatten";
}
@Override
public Val apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
if (fr.numCols() != 1 || fr.numRows() != 1) return new ValFrame(fr); // did not flatten
Vec vec = fr.anyVec();
switch (vec.get_type()) {
case Vec.T_BAD:
case Vec.T_NUM:
return new ValNum(vec.at(0));
case Vec.T_TIME:
return vec.isNA(0)?new ValNum(Double.NaN) : new ValNum(vec.at8(0)); // check for missing values
case Vec.T_STR:
return vec.isNA(0)?new ValStr("NA") : new ValStr(vec.atStr(new BufferedString(), 0).toString());
case Vec.T_CAT: // check for missing values
return vec.isNA(0)?new ValStr("NA") : new ValStr(vec.factor(vec.at8(0)));
default:
throw H2O.unimpl("The type of vector: " + vec.get_type_str() + " is not supported by " + str());
}
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstGetrow.java
|
package water.rapids.ast.prims.mungers;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValRow;
/**
*/
public class AstGetrow extends AstPrimitive {
@Override public String[] args() {
return new String[]{"frame"};
}
@Override public int nargs() {
return 1 + 1;
}
@Override public String str() {
return "getrow";
}
@Override public String example() {
return "(getrow frame)";
}
@Override public String description() {
return "For a single-row frame, this function returns the contents of that frame as a ValRow. " +
"All non-numeric and non-time columns will be converted into NaNs. " +
"This function does not work for frames that have more than 1 row.";
}
@Override
public ValRow apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
if (fr.numRows() != 1)
throw new IllegalArgumentException("The frame should have only 1 row; found " + fr.numRows() + " rows.");
double[] res = new double[fr.numCols()];
for (int i = 0; i < res.length; i++) {
Vec v = fr.vec(i);
res[i] = v.isNumeric()? v.at(0) : v.isTime()? v.at8(0) : Double.NaN;
}
return new ValRow(res, null);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstGroup.java
|
package water.rapids.ast.prims.mungers;
import water.*;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.fvec.Vec;
import water.parser.BufferedString;
import water.rapids.Env;
import water.rapids.Merge;
import water.rapids.Val;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.ast.params.AstNum;
import water.rapids.ast.params.AstNumList;
import water.rapids.vals.ValFrame;
import water.rapids.vals.ValFun;
import water.util.ArrayUtils;
import water.util.IcedHashSet;
import water.util.Log;
import java.util.Arrays;
/**
* GroupBy
* Group the rows of 'data' by unique combinations of '[group-by-cols]'.
* Apply function 'fcn' to a Frame for each group, with a single column
* argument, and a NA-handling flag. Sets of tuples {fun,col,na} are allowed.
* <p/>
* 'fcn' must be a one of a small set of functions, all reductions, and 'GB'
* returns a row per unique group, with the first columns being the grouping
* columns, and the last column(s) the reduction result(s).
* <p/>
* Grouping can be performed on both numeric and string columns, but aggregation
* functions are limited to the ones listed in {@link AstGroup.FCN}.
* <p/>
* The returned column(s).
*/
public class AstGroup extends AstPrimitive {
private final boolean _per_node_aggregates;
public AstGroup() {
this(true);
}
public AstGroup(boolean perNodeAggregates) {
_per_node_aggregates = perNodeAggregates;
}
public enum NAHandling {ALL, RM, IGNORE}
// Functions handled by GroupBy
public enum FCN {
nrow() {
@Override
public void op(double[] d0s, double d1) {
d0s[0]++;
}
@Override
public void atomic_op(double[] d0s, double[] d1s) {
d0s[0] += d1s[0];
}
@Override
public double postPass(double ds[], long n) {
return ds[0];
}
},
mean() {
@Override
public void op(double[] d0s, double d1) {
d0s[0] += d1;
}
@Override
public void atomic_op(double[] d0s, double[] d1s) {
d0s[0] += d1s[0];
}
@Override
public double postPass(double ds[], long n) {
return ds[0] / n;
}
},
sum() {
@Override
public void op(double[] d0s, double d1) {
d0s[0] += d1;
}
@Override
public void atomic_op(double[] d0s, double[] d1s) {
d0s[0] += d1s[0];
}
@Override
public double postPass(double ds[], long n) {
return ds[0];
}
},
sumSquares() {
@Override
public void op(double[] d0s, double d1) {
d0s[0] += d1 * d1;
}
@Override
public void atomic_op(double[] d0s, double[] d1s) {
d0s[0] += d1s[0];
}
@Override
public double postPass(double ds[], long n) {
return ds[0];
}
},
var() {
@Override
public void op(double[] d0s, double d1) {
d0s[0] += d1 * d1;
d0s[1] += d1;
}
@Override
public void atomic_op(double[] d0s, double[] d1s) {
ArrayUtils.add(d0s, d1s);
}
@Override
public double postPass(double ds[], long n) {
double numerator = ds[0] - ds[1] * ds[1] / n;
if (Math.abs(numerator) < 1e-5) numerator = 0;
return numerator / (n - 1);
}
@Override
public double[] initVal(int ignored) {
return new double[2]; /* 0 -> sum_squares; 1 -> sum*/
}
},
sdev() {
@Override
public void op(double[] d0s, double d1) {
d0s[0] += d1 * d1;
d0s[1] += d1;
}
@Override
public void atomic_op(double[] d0s, double[] d1s) {
ArrayUtils.add(d0s, d1s);
}
@Override
public double postPass(double ds[], long n) {
double numerator = ds[0] - ds[1] * ds[1] / n;
if (Math.abs(numerator) < 1e-5) numerator = 0;
return Math.sqrt(numerator / (n - 1));
}
@Override
public double[] initVal(int ignored) {
return new double[2]; /* 0 -> sum_squares; 1 -> sum*/
}
},
min() {
@Override
public void op(double[] d0s, double d1) {
d0s[0] = Math.min(d0s[0], d1);
}
@Override
public void atomic_op(double[] d0s, double[] d1s) {
op(d0s, d1s[0]);
}
@Override
public double postPass(double ds[], long n) {
return ds[0];
}
@Override
public double[] initVal(int maxx) {
return new double[]{Double.MAX_VALUE};
}
},
max() {
@Override
public void op(double[] d0s, double d1) {
d0s[0] = Math.max(d0s[0], d1);
}
@Override
public void atomic_op(double[] d0s, double[] d1s) {
op(d0s, d1s[0]);
}
@Override
public double postPass(double ds[], long n) {
return ds[0];
}
@Override
public double[] initVal(int maxx) {
return new double[]{-Double.MAX_VALUE};
}
},
median() { // we will be doing our own thing here for median
@Override
public void op(double[] d0s, double d1) {
;
}
@Override
public void atomic_op(double[] d0s, double[] d1s) {
;
}
@Override
public double postPass(double ds[], long n) {
return 0;
}
@Override
public double[] initVal(int maxx) {
return new double[maxx];
}
},
mode() {
@Override
public void op(double[] d0s, double d1) {
d0s[(int) d1]++;
}
@Override
public void atomic_op(double[] d0s, double[] d1s) {
ArrayUtils.add(d0s, d1s);
}
@Override
public double postPass(double ds[], long n) {
return ArrayUtils.maxIndex(ds);
}
@Override
public double[] initVal(int maxx) {
return new double[maxx];
}
},;
public abstract void op(double[] d0, double d1);
public abstract void atomic_op(double[] d0, double[] d1);
public abstract double postPass(double ds[], long n);
public double[] initVal(int maxx) {
return new double[]{0};
}
}
@Override
public int nargs() {
return -1;
} // (GB data [group-by-cols] {fcn col "na"}...)
@Override
public String[] args() {
return new String[]{"..."};
}
@Override
public String str() {
return "GB";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
int ncols = fr.numCols();
AstNumList groupby = check(ncols, asts[2]);
final int[] gbCols = groupby.expand4();
int validAggregatesCount = countNumberOfAggregates(fr, ncols, asts);
final AGG[] aggs = constructAggregates(fr, validAggregatesCount, env, asts);
return performGroupingWithAggregations(fr, gbCols, aggs);
}
/**
* Performs grouping and aggregations on given data.
*
* @param fr frame on which grouping should be performed
* @param gbCols indices of columns (of given frame) to be used for grouping.
* @param aggs aggregations to be performed on the grouped data.
*
* @return grouped data with aggregations results.
*/
public ValFrame performGroupingWithAggregations(Frame fr, int[] gbCols, AGG[] aggs) {
final boolean hasMedian = hasMedian(aggs);
final byte[] gbColsTypes = ArrayUtils.select(fr.types(), gbCols);
final IcedHashSet<G> gss = doGroups(fr, gbCols, gbColsTypes, aggs, hasMedian, _per_node_aggregates);
final G[] grps = gss.toArray(new G[gss.size()]);
applyOrdering(gbCols, gbColsTypes, grps);
final int medianActionsNeeded = hasMedian ? calculateMediansForGRPS(fr, gbCols, gbColsTypes, aggs, gss, grps) : -1;
MRTask mrFill = prepareMRFillTask(grps, aggs, gbColsTypes, medianActionsNeeded);
String[] fcNames = prepareFCNames(fr, aggs);
Frame f = buildOutput(gbCols, aggs.length, fr, fcNames, grps.length, mrFill);
return new ValFrame(f);
}
private static boolean hasMedian(AGG[] aggs) {
for (AGG agg : aggs)
if (FCN.median.equals(agg._fcn))
return true;
return false;
}
private MRTask prepareMRFillTask(final G[] grps, final AGG[] aggs, final byte[] gbColsTypes, final int medianCount) {
return new MRTask() {
@Override
public void map(Chunk[] c, NewChunk[] ncs) {
int start = (int) c[0].start();
for (int i = 0; i < c[0]._len; ++i) {
G g = grps[i + start]; // One Group per row
int gbColsCnt = g.colsCount();
int j;
int strIdx = 0;
int numIdx = 0;
for (j = 0; j < gbColsCnt; j++) {
if (gbColsTypes[j] == Vec.T_STR)
ncs[j].addStr(g._gsStr[strIdx++]);
else
ncs[j].addNum(g._gs[numIdx++]);
}
for (int a = 0; a < aggs.length; a++) {
if ((medianCount >=0) && g.medianR._isMedian[a])
ncs[j++].addNum(g.medianR._medians[a]);
else
ncs[j++].addNum(aggs[a]._fcn.postPass(g._dss[a], g._ns[a]));
}
}
}
};
}
private String[] prepareFCNames(Frame fr, AGG[] aggs) {
String[] fcnames = new String[aggs.length];
for (int i = 0; i < aggs.length; i++) {
if (aggs[i]._fcn.toString() != "nrow") {
fcnames[i] = aggs[i]._fcn.toString() + "_" + fr.name(aggs[i]._col);
} else {
fcnames[i] = aggs[i]._fcn.toString();
}
}
return fcnames;
}
// Count of aggregates; knock off the first 4 ASTs (GB data [group-by] [order-by]...), then count by triples.
private int countNumberOfAggregates(Frame fr, int numberOfColumns, AstRoot asts[]) {
int validGroupByCols = 0;
for (int idx=3; idx < asts.length; idx+=3) { // initial loop to count operations on valid columns, ignore String columns
AstNumList col = check(numberOfColumns, asts[idx + 1]);
if (col.cnt() != 1) throw new IllegalArgumentException("Group-By functions take only a single column");
int agg_col = (int) col.min(); // Aggregate column
if (fr.vec(agg_col).isString()) {
Log.warn("Column "+fr._names[agg_col]+" is a string column. Groupby operations will be skipped for this column.");
} else
validGroupByCols++;
}
return validGroupByCols;
}
private AGG[] constructAggregates(Frame fr, int numberOfAggregates, Env env, AstRoot asts[]) {
AGG[] aggs = new AGG[numberOfAggregates];
int ncols = fr.numCols();
int countCols = 0;
for (int idx = 3; idx < asts.length; idx += 3) {
Val v = asts[idx].exec(env);
String fn = v instanceof ValFun ? v.getFun().str() : v.getStr();
FCN fcn = FCN.valueOf(fn);
AstNumList col = check(ncols, asts[idx + 1]);
if (col.cnt() != 1) throw new IllegalArgumentException("Group-By functions take only a single column");
int agg_col = (int) col.min(); // Aggregate column
if (fcn == FCN.mode && !fr.vec(agg_col).isCategorical())
throw new IllegalArgumentException("Mode only allowed on categorical columns");
NAHandling na = NAHandling.valueOf(asts[idx + 2].exec(env).getStr().toUpperCase());
if (!fr.vec(agg_col).isString())
aggs[countCols++] = new AGG(fcn, agg_col, na, (int) fr.vec(agg_col).max() + 1);
}
return aggs;
}
private void applyOrdering(final int[] gbCols, final byte[] gbColsTypes, G[] grps) {
if (gbCols.length > 0)
Arrays.sort(grps, new java.util.Comparator<G>() {
// Compare 2 groups. Iterate down _gs, stop when _gs[i] > that._gs[i],
// or _gs[i] < that._gs[i]. Order by various columns specified by
// gbCols. NaN is treated as least
@Override
public int compare(G g1, G g2) {
int strIdx = 0;
int numIdx = 0;
for (int gbColIdx = 0; gbColIdx < gbCols.length; gbColIdx++) {
if (gbColsTypes[gbColIdx] == Vec.T_STR) {
if (g1._gsStr[strIdx] != null && g2._gsStr[strIdx] == null) return -1;
if (g1._gsStr[strIdx] == null && g2._gsStr[strIdx] != null) return 1;
int res = g1._gsStr[strIdx].compareTo(g2._gsStr[strIdx]);
if (res != 0) return res;
strIdx++;
} else {
if (Double.isNaN(g1._gs[numIdx]) && !Double.isNaN(g2._gs[numIdx])) return -1;
if (!Double.isNaN(g1._gs[numIdx]) && Double.isNaN(g2._gs[numIdx])) return 1;
if (g1._gs[numIdx] != g2._gs[numIdx]) return g1._gs[numIdx] < g2._gs[numIdx] ? -1 : 1;
numIdx++;
}
}
return 0;
}
// I do not believe sort() calls equals() at this time, so no need to implement
@Override
public boolean equals(Object o) {
throw H2O.unimpl();
}
});
}
private int calculateMediansForGRPS(Frame fr, int[] gbCols, byte[] gbColsTypes, AGG[] aggs, IcedHashSet<G> gss, G[] grps) {
// median action exists, we do the following three things:
// 1. Find out how many columns over all groups we need to perform median on
// 2. Assign an index to the NewChunk that we will be storing the data for each median column for each group
// 3. Fill out the NewChunk for each column of each group
int numberOfMedianActionsNeeded = 0;
for (G g : grps) {
for (int index = 0; index < g.medianR._isMedian.length; index++) {
if (g.medianR._isMedian[index]) {
g.medianR._newChunkCols[index] = numberOfMedianActionsNeeded++;
}
}
}
BuildGroup buildMedians = new BuildGroup(gbCols, gbColsTypes, aggs, gss, grps, numberOfMedianActionsNeeded);
Vec[] groupChunks = buildMedians.doAll(numberOfMedianActionsNeeded, Vec.T_NUM, fr).close();
buildMedians.calcMedian(groupChunks);
return numberOfMedianActionsNeeded;
}
// Argument check helper
public static AstNumList check(long dstX, AstRoot ast) {
// Sanity check vs dst. To simplify logic, jam the 1 col/row case in as a AstNumList
AstNumList dim;
if (ast instanceof AstNumList) dim = (AstNumList) ast;
else if (ast instanceof AstNum) dim = new AstNumList(((AstNum) ast).getNum());
else throw new IllegalArgumentException("Requires a number-list, but found a " + ast.getClass());
if (dim.isEmpty()) return dim; // Allow empty
for (int col : dim.expand4())
if (!(0 <= col && col < dstX))
throw new IllegalArgumentException("Selection must be an integer from 0 to " + dstX);
return dim;
}
// Do all the grouping work. Find groups in frame 'fr', grouped according to
// the selected 'gbCols' columns, and for each group compute aggregrate
// results using 'aggs'. Return an array of groups, with the aggregate results.
public static IcedHashSet<G> doGroups(Frame fr, int[] gbCols, AGG[] aggs) {
final byte[] gbColsTypes = ArrayUtils.select(fr.types(), gbCols);
return doGroups(fr, gbCols, gbColsTypes, aggs, false, true);
}
public static IcedHashSet<G> doGroups(Frame fr, int[] gbCols, byte[] gbColsTypes, AGG[] aggs, boolean hasMedian, boolean perNodeAggregates) {
// do the group by work now
long start = System.currentTimeMillis();
GBTask<?> p1 = makeGBTask(perNodeAggregates, gbCols, gbColsTypes, aggs, hasMedian).doAll(fr);
Log.info("Group By Task done in " + (System.currentTimeMillis() - start) / 1000. + " (s)");
return p1.getGroups();
}
private static GBTask<? extends GBTask> makeGBTask(boolean perNodeAggregates, int[] gbCols, byte[] gbColsTypes, AGG[] aggs, boolean hasMedian) {
if (perNodeAggregates)
return new GBTaskAggsPerNode(gbCols, gbColsTypes, aggs, hasMedian);
else
return new GBTaskAggsPerMap(gbCols, gbColsTypes, aggs, hasMedian);
}
// Utility for AstDdply; return a single aggregate for counting rows-per-group
public static AGG[] aggNRows() {
return new AGG[]{new AGG(FCN.nrow, 0, NAHandling.IGNORE, 0)};
}
// Build output frame from the multi-column results
public static Frame buildOutput(int[] gbCols, int noutCols, Frame fr, String[] fcnames, int ngrps, MRTask mrfill) {
// Build the output!
// the names of columns
final int nCols = gbCols.length + noutCols;
String[] names = new String[nCols];
String[][] domains = new String[nCols][];
byte[] types = new byte[nCols];
// GroupBy columns
for (int i = 0; i < gbCols.length; i++) {
names[i] = fr.name(gbCols[i]);
domains[i] = fr.domains()[gbCols[i]];
types[i] = fr.vec(names[i]).get_type();
}
// Output columns of GroupBy functions
for (int i = 0; i < fcnames.length; i++) {
names[i + gbCols.length] = fcnames[i];
types[i + gbCols.length] = Vec.T_NUM;
}
Vec v = Vec.makeZero(ngrps); // dummy layout vec
// Convert the output arrays into a Frame, also doing the post-pass work
Frame f = mrfill.doAll(types, new Frame(v)).outputFrame(names, domains);
v.remove();
return f;
}
// Description of a single aggregate, including the reduction function, the
// column and specified NA handling
public static class AGG extends Iced {
final FCN _fcn;
public final int _col;
final NAHandling _na;
final int _maxx; // Largest integer this column
public AGG(FCN fcn, int col, NAHandling na, int maxx) {
_fcn = fcn;
_col = col;
_na = na;
_maxx = maxx;
}
// Update the array pair {ds[i],ns[i]} with d1.
// ds is the reduction array
// ns is the element count
public void op(double[][] d0ss, long[] n0s, int i, double d1) {
// Normal number or ALL : call op()
if (!Double.isNaN(d1) || _na == NAHandling.ALL) _fcn.op(d0ss[i], d1);
// Normal number or IGNORE: bump count; RM: do not bump count
if (!Double.isNaN(d1) || _na == NAHandling.IGNORE) n0s[i]++;
}
// Atomically update the array pair {dss[i],ns[i]} with the pair {d1,n1}.
// Same as op() above, but called racily and updates atomically.
public void atomic_op(double[][] d0ss, long[] n0s, int i, double[] d1s, long n1) {
synchronized (d0ss[i]) {
_fcn.atomic_op(d0ss[i], d1s);
n0s[i] += n1;
}
}
public double[] initVal() {
return _fcn.initVal(_maxx);
}
}
private static abstract class GBTask<E extends MRTask<E>> extends MRTask<E> {
final int[] _gbCols; // Columns used to define group
final byte[] _gbColsTypes; // Types of gb columns
final int _numericGbColsCnt;
final int _stringGbColsCnt;
final AGG[] _aggs; // Aggregate descriptions
final boolean _hasMedian;
GBTask(int[] gbCols, byte[] gbColsTypes, AGG[] aggs, boolean hasMedian) {
_gbCols = gbCols;
_gbColsTypes = gbColsTypes;
_stringGbColsCnt = ArrayUtils.occurrenceCount(_gbColsTypes, Vec.T_STR);
_numericGbColsCnt = gbColsTypes.length - _stringGbColsCnt;
_aggs = aggs;
_hasMedian = hasMedian;
}
protected void map(Chunk[] cs, IcedHashSet<G> groups) {
G gWork = new G(_numericGbColsCnt, _stringGbColsCnt, _aggs, _hasMedian); // Working Group
G gOld; // Existing Group to be filled in
for (int row = 0; row < cs[0]._len; row++) {
// Find the Group being worked on
gWork.fill(row, cs, _gbCols, _gbColsTypes); // Fill the worker Group for the hashtable lookup
if (groups.addIfAbsent(gWork) == null) { // Insert if not absent (note: no race, no need for atomic)
gOld = gWork; // Inserted 'gWork' into table
gWork = new G(_numericGbColsCnt, _stringGbColsCnt, _aggs, _hasMedian); // need entirely new G
} else gOld = groups.get(gWork); // Else get existing group
for (int i = 0; i < _aggs.length; i++) // Accumulate aggregate reductions
_aggs[i].op(gOld._dss, gOld._ns, i, cs[_aggs[i]._col].atd(row));
}
}
abstract IcedHashSet<G> getGroups();
}
// --------------------------------------------------------------------------
// Main worker MRTask. Makes 1 pass over the data, and accumulates both all
// groups and all aggregates
// This version merges discovered groups into a per-node aggregates map - it
// more memory efficient but it seems to suffer from a race condition
// (bug PUBDEV-6319).
private static class GBTaskAggsPerNode extends GBTask<GBTaskAggsPerNode> {
final IcedHashSet<G> _gss; // Shared per-node, common, racy
GBTaskAggsPerNode(int[] gbCols, byte[] gbColsTypes, AGG[] aggs, boolean hasMedian) {
super(gbCols, gbColsTypes, aggs, hasMedian);
_gss = new IcedHashSet<>();
}
@Override
public void map(Chunk[] cs) {
// Groups found in this Chunk
IcedHashSet<G> gs = new IcedHashSet<>();
map(cs, gs);
// This is a racy update into the node-local shared table of groups
reduce(gs); // Atomically merge Group stats
}
// Racy update on a subtle path: reduction is always single-threaded, but
// the shared global hashtable being reduced into is ALSO being written by
// parallel map calls.
@Override
public void reduce(GBTaskAggsPerNode t) {
if (_gss != t._gss) {
// this means we got the result from another node
// it is easy to partition the result into distinct subsets - we don't have to worry about collisions
// => no need to synchronize (but for now we do use atomic_op anyway), we just parallelize the merge
int otherSize = t._gss.size();
if (otherSize == 0)
return;
G[] otherGroups = t._gss.toArray(new G[otherSize]);
final int subGroupSize = otherSize > H2O.ARGS.nthreads ?
(int) Math.ceil((double) otherSize / H2O.ARGS.nthreads) : otherSize;
MergeGroupsFun f = new MergeGroupsFun(_aggs, _gss, otherGroups, subGroupSize);
if (subGroupSize == otherSize) {
f.map(0); // not worth parallelizing, execute directly
} else {
H2O.submitTask(new LocalMR(f, H2O.ARGS.nthreads)).join();
}
}
}
// Non-blocking race-safe update of the shared per-node groups hashtable
private void reduce(IcedHashSet<G> r) {
for (G rg : r) {
G lg;
if ((lg = _gss.addIfAbsent(rg)) != null) {
for (int i = 0; i < _aggs.length; i++)
_aggs[i].atomic_op(lg._dss, lg._ns, i, rg._dss[i], rg._ns[i]); // Need to atomically merge groups here
}
}
}
@Override
IcedHashSet<G> getGroups() {
return _gss;
}
}
private static class MergeGroupsFun extends MrFun<MergeGroupsFun> {
private final AGG[] _aggs;
private final transient IcedHashSet<G> _gss;
private final transient G[] _other;
private final int _size;
MergeGroupsFun(AGG[] aggs, IcedHashSet<G> gss, G[] other, int size) {
_aggs = aggs;
_gss = gss;
_other = other;
_size = size;
}
@Override
protected void map(final int subGroupId) {
for (int g = subGroupId * _size; g < (subGroupId + 1) * _size && g < _other.length; g++) {
G rg = _other[g];
G lg;
if ((lg = _gss.addIfAbsent(rg)) != null) {
for (int i = 0; i < _aggs.length; i++)
_aggs[i].atomic_op(lg._dss, lg._ns, i, rg._dss[i], rg._ns[i]); // Need to atomically merge groups here
}
}
}
}
// --------------------------------------------------------------------------
// "Safe" alternative of GBTaskAggsPerNode - instead of maintaining
// a node-global map of aggregates, it creates aggregates per chunk
// and uses reduce to reduce results of map into a single aggregated.
// Consumes more memory but doesn't suffer from bug PUBDEV-6319.
public static class GBTaskAggsPerMap extends GBTask<GBTaskAggsPerMap> {
IcedHashSet<G> _gss; // each map will have its own IcedHashMap
GBTaskAggsPerMap(int[] gbCols, byte[] gbColsTypes, AGG[] aggs, boolean hasMedian) {
super(gbCols, gbColsTypes, aggs, hasMedian);
}
@Override
public void map(Chunk[] cs) {
// Groups found in this Chunk
_gss = new IcedHashSet<>();
map(cs, _gss);
}
// combine IcedHashMap from all threads here.
@Override
public void reduce(GBTaskAggsPerMap t) {
for (G rg : t._gss) {
if (_gss.addIfAbsent(rg) != null) {
G lg = _gss.get(rg);
for (int i = 0; i < _aggs.length; i++)
_aggs[i].atomic_op(lg._dss, lg._ns, i, rg._dss[i], rg._ns[i]); // Need to atomically merge groups here
}
}
}
@Override
IcedHashSet<G> getGroups() {
return _gss;
}
}
public static class MedianResult extends Iced {
int[] _medianCols;
double[] _medians;
boolean[] _isMedian;
int[] _newChunkCols;
public NAHandling[] _na;
public MedianResult(int len) {
_medianCols = new int[len];
_medians = new double[len];
_isMedian = new boolean[len];
_newChunkCols = new int[len];
_na = new NAHandling[len];
}
}
// Groups! Contains a Group Key - arrays of doubles and strings that
// define the Group. Also contains an array of doubles for the
// aggregate results, one per aggregate.
public static class G extends Iced<G> {
public final double[] _gs; // Group Key: Array is final; contents change with the "fill"
public final BufferedString[] _gsStr; // Group Key: Array is final; contents change with the "fill"
int _hash; // Hash is not final; changes with the "fill"
public final double _dss[][]; // Aggregates: usually sum or sum*2
public final long _ns[]; // row counts per aggregate, varies by NA handling and column
/* int[] _medianCols; // record which columns in reference to data frame
double[] _medians;
boolean[] _isMedian;
int[] _newChunkCols; // record which columns in newChunk to store group
public NAHandling[] _na;*/
public MedianResult medianR = null;
public G(int ncolsNum, AGG[] aggs) {
this(ncolsNum, 0, aggs, false);
}
public G(int ncolsNum, int ncolsStr, AGG[] aggs) {
this(ncolsNum, ncolsStr, aggs, false);
}
public G(int ncolsNum, AGG[] aggs, boolean hasMedian) { this(ncolsNum, 0, aggs, hasMedian); }
public G(int ncolsNum, int ncolsStr, AGG[] aggs, boolean hasMedian) {
_gs = new double[ncolsNum];
_gsStr = new BufferedString[ncolsStr];
for (int i = 0; i < _gsStr.length; i++)
_gsStr[i] = new BufferedString();
int len = aggs == null ? 0 : aggs.length;
_dss = new double[len][];
_ns = new long[len];
if (hasMedian) {
medianR = new MedianResult(len);
}
for (int i = 0; i < len; i++) {
_dss[i] = aggs[i].initVal();
if (hasMedian && (aggs[i]._fcn.toString().equals("median"))) { // for median function only
medianR._medianCols[i] = aggs[i]._col; // which column in the data set to aggregate on
medianR._isMedian[i] = true;
medianR._na[i] = aggs[i]._na;
}
}
}
private int colsCount() { return _gs.length + _gsStr.length; }
public G fill(int row, Chunk chks[]) {
for (int c = 0; c < chks.length; c++) { // For all selection cols
Vec vec = chks[c].vec();
// Load into working array
if (vec.isString())
chks[c].atStr(_gsStr[c], row);
else
_gs[c] = chks[c].atd(row);
}
_hash = hash();
return this;
}
public G fill(int row, Chunk[] chks, int[] colsNum) {
byte[] gbColsTypes = new byte[colsNum.length];
Arrays.fill(gbColsTypes, Vec.T_NUM);
return fill(row, chks, colsNum, gbColsTypes);
}
public G fill(int row, Chunk[] chks, int[] cols, byte[] gbColsTypes) {
int strIdx = 0;
int numIdx = 0;
for (int c = 0; c < cols.length; c++) // For all selection cols
// Load into working array
if (gbColsTypes[c] == Vec.T_STR)
chks[cols[c]].atStr(_gsStr[strIdx++], row);
else
_gs[numIdx++] = chks[cols[c]].atd(row);
_hash = hash();
return this;
}
protected int hash() {
long h = 0; // hash is sum of field bits
for (double d : _gs) h += Double.doubleToRawLongBits(d);
// Doubles are lousy hashes; mix up the bits some
h ^= (h >>> 20) ^ (h >>> 12);
h ^= (h >>> 7) ^ (h >>> 4);
for (BufferedString str : _gsStr) h = 37 * h + str.hashCode();
return (int) ((h ^ (h >> 32)) & 0x7FFFFFFF);
}
@Override
public boolean equals(Object o) {
return o instanceof G
&& Arrays.equals(_gs, ((G) o)._gs)
&& Arrays.equals(_gsStr, ((G) o)._gsStr);
}
@Override
public int hashCode() {
return _hash;
}
@Override
public String toString() {
return Arrays.toString(_gsStr) + " - " + Arrays.toString(_gs);
}
}
// --------------------------------------------------------------------------
// For each groupG and each aggregate function (median only), we separate and
// extract the column per groupG per aggregate function into a NewChunk column
// here.
private static class BuildGroup extends MRTask<BuildGroup> {
final int[] _gbCols;
final byte[] _gbColsTypes;
final int _numericGbColsCnt;
final int _stringGbColsCnt;
private final AGG[] _aggs; // Aggregate descriptions
private final int _medianCols;
IcedHashSet<G> _gss;
private G[] _grps;
BuildGroup(int[] gbCols, byte[] gbColsTypes, AGG[] aggs, IcedHashSet<G> gss, G[] grps, int medianCols) {
_gbCols = gbCols;
_gbColsTypes = gbColsTypes;
_stringGbColsCnt = ArrayUtils.occurrenceCount(_gbColsTypes, Vec.T_STR);
_numericGbColsCnt = gbColsTypes.length - _stringGbColsCnt;
_aggs = aggs;
_gss = gss;
_grps = grps;
_medianCols = medianCols;
}
@Override
public void map(Chunk[] cs, NewChunk[] ncs) {
G gWork = new G(_numericGbColsCnt, _stringGbColsCnt, _aggs, _medianCols > 0); // Working Group
G gOld;
for (int row = 0; row < cs[0]._len; row++) { // for each
gWork.fill(row, cs, _gbCols, _gbColsTypes);
gOld = _gss.get(gWork);
for (int i = 0; i < gOld.medianR._isMedian.length; i++) { // Accumulate aggregate reductions
if (gOld.medianR._isMedian[i]) { // median action required on column and group
double d1 = cs[gOld.medianR._medianCols[i]].atd(row);
if (!Double.isNaN(d1) || gOld.medianR._na[i] != NAHandling.RM)
ncs[gOld.medianR._newChunkCols[i]].addNum(d1); // build up dataset for each group
}
}
}
}
// For the data column collected for each G and each aggregate function, make a frame out of the data
// newChunk column. Sort the column and return median as the middle value of mean of two middle values.
Vec[] close() {
Futures fs = new Futures();
int cCount = 0;
Vec[] tempVgrps = new Vec[_medianCols];
for (G oneG : _grps) {
for (int index = 0; index < oneG.medianR._isMedian.length; index++) {
if (oneG.medianR._isMedian[index]) { // median action is needed
// make a frame out of the NewChunk vector
tempVgrps[cCount++] = _appendables[oneG.medianR._newChunkCols[index]].close(_appendables[oneG.medianR._newChunkCols[index]].compute_rowLayout(), fs);
}
}
}
fs.blockForPending();
return tempVgrps;
}
public void calcMedian(Vec[] tempVgrps) {
int cCount = 0;
for (G oneG : _grps) {
for (int index = 0; index < oneG.medianR._isMedian.length; index++) {
if (oneG.medianR._isMedian[index]) {
Vec[] vgrps = new Vec[1];
vgrps[0] = tempVgrps[cCount++];
long totalRows = vgrps[0].length();
double medianVal;
if (totalRows == 0) {
medianVal = Double.NaN; // return NAN for empty frames. Should not have happened!
} else {
Frame myFrame = new Frame(Key.make(), vgrps);
long midRow = totalRows / 2;
Frame tempFrame = Merge.sort(myFrame, new int[]{0});
medianVal = totalRows % 2 == 0 ? 0.5 * (tempFrame.vec(0).at(midRow - 1) +
tempFrame.vec(0).at(midRow)) : tempFrame.vec(0).at(midRow);
tempFrame.delete();
myFrame.delete();
}
oneG.medianR._medians[index] = medianVal;
}
}
}
}
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstGroupSorted.java
|
/*
package water.rapids.ast.prims.mungers;
import water.fvec.*;
import water.*;
import water.rapids.RadixCount;
import water.rapids.assignG;
import water.util.ArrayUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class AstGroupSorted {
// 2^31 bytes > java max (2^31-1), so 2^30 / 8 bytes per long. TO DO - how to make global?
//private static final int MAXVECLONG = 134217728;
//private static final int MAXVECBYTE = 1073741824;
long[][] sort(Frame groupCols) {
//return (new RadixOrder(groupCols, ArrayUtils.seq(0,groupCols.numCols()-1))._groupIndex); // TO DO: won't work yet as needs 2nd group step
return (new long[][] {{1,2,3}});
// a vector
System.out.println("Calling RadixCount ...");
long t0 = System.nanoTime();
long t00 = t0;
int nChunks = groupCols.anyVec().nChunks();
if( groupCols.numCols() != 1 ) throw H2O.unimpl(); // Only looking at column 0 for now
long counts[][][] = new RadixCount(nChunks).doAll(groupCols.vec(0))._counts;
System.out.println("Time of RadixCount: " + (System.nanoTime() - t0) / 1e9); t0 = System.nanoTime();
// for (int c=0; c<5; c++) { System.out.print("First 10 for chunk "+c+" byte 0: "); for (int i=0; i<10; i++) System.out.print(counts[0][c][i] + " "); System.out.print("\n"); }
long totalHist[] = new long[256];
for (int c=0; c<nChunks; c++) {
for (int h=0; h<256; h++) {
totalHist[h] += counts[5][c][h]; // TO DO: hard coded 5 here
}
}
for (int b=0; b<8; b++) {
for (int h=0; h<256; h++) {
long rollSum = 0;
for (int c = 0; c < nChunks; c++) {
long tmp = counts[b][c][h];
counts[b][c][h] = rollSum;
rollSum += tmp;
}
}
}
// Any radix skipping needs to be detected with a loop over node results to ensure no use of those bits on any node.
System.out.println("Time to cumulate counts: " + (System.nanoTime() - t0) / 1e9); t0 = System.nanoTime();
// TO DO: by this stage we know now the width of byte field we need. So allocate it tight up to MAXVEC
// TO DO: reduce to 5 if we're only passed the first column
int keySize = 7;
long o[][][] = new long[256][][];
byte x[][][] = new byte[256][][]; // for each bucket, there might be > 2^31 bytes, so an extra dimension for that
for (int c=0; c<256; c++) {
if (totalHist[c] == 0) continue;
int d;
int nbatch = (int)(totalHist[c] * Math.max(keySize,8) / MAXVECBYTE); // TO DO. can't be 2^31 because 2^31-1 was limit. If we use 2^30, instead of /, can we do >> for speed?
int rem = (int)(totalHist[c] * Math.max(keySize,8) % MAXVECBYTE);
assert nbatch==0; // in the case of 20m rows, we should always be well within a batch size
// The Math.max ensures that batches are aligned, even for wide keys. For efficiency inside insert() above so it doesn't have to cross boundaries.
o[c] = new long[nbatch + (rem>0?1:0)][];
x[c] = new byte[nbatch + (rem>0?1:0)][];
assert nbatch==0;
for (d=0; d<nbatch; d++) {
o[c][d] = new long[MAXVECLONG];
// TO DO?: use MemoryManager.malloc8()
x[c][d] = new byte[MAXVECBYTE];
}
if (rem>0) {
o[c][d] = new long[rem];
x[c][d] = new byte[rem * keySize];
}
}
System.out.println("Time to allocate o[][] and x[][]: " + (System.nanoTime() - t0) / 1e9); t0 = System.nanoTime();
// NOT TO DO: we do need the full allocation of x[] and o[]. We need o[]
// anyway. x[] will be as dense as possible.
// o is the full ordering vector of the right size
// x is the byte key aligned with o
// o AND x are what bmerge() needs. Pushing x to each node as well as o avoids inter-node comms.
// feasibly, that we could move by byte 5 and then skip the next byte. Too
// complex case though and rare so simplify
new MoveByFirstByte(5, o, x, counts, keySize).doAll(groupCols);
System.out.println("Time to MoveByFirstByte: " + (System.nanoTime() - t0) / 1e9); t0 = System.nanoTime();
// Add check that this first split is reasonable. e.g. if it were just 2,
// it definitely would not be enough. 90 is enough though. Need to fill
// L2 with pages.
// for counted completer 0:255
long groups[][] = new long[256][]; // at most MAXVEC groups per radix, currently
long nGroup[] = new long[257]; // one extra to make undo of cumulate easier
Futures fs = new Futures();
for (int i=0; i<256; i++) {
if (totalHist[i] > 0)
fs.add(H2O.submitTask(new dradix(groups, nGroup, i, x[i], o[i], totalHist[i], keySize)));
}
fs.blockForPending();
long nGroups = 0;
for (int i = 0; i < 257; i++) {
long tmp = nGroup[i];
nGroup[i] = nGroups;
nGroups += tmp;
}
System.out.println("Time to recursive radix: " + (System.nanoTime() - t0) / 1e9 ); t0 = System.nanoTime();
System.out.println("Total groups found: " + nGroups);
// We now have o and x that bmerge() needs
long nrow = groupCols.numRows();
long g[][] = new long[(int)(1 + nrow / MAXVECLONG)][];
int c;
for (c=0; c<nrow/MAXVECLONG; c++) {
g[c] = new long[MAXVECLONG];
}
g[c] = new long[(int)(nrow % MAXVECLONG)];
fs = new Futures();
for (int i=0; i<256; i++) {
if (totalHist[i] > 0)
fs.add(H2O.submitTask(new assignG(g, groups[i], nGroup[i+1]-nGroup[i], nGroup[i], o[i])));
// reuse the x vector we allocated before to store the group numbers. i.e. a perfect and ordered hash, stored alongside table
}
fs.blockForPending();
System.out.println("Time to assign group index (length nrows): " + (System.nanoTime() - t0) / 1e9 ); t0 = System.nanoTime();
return g;
}
}
*/
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstGroupedPermute.java
|
package water.rapids.ast.prims.mungers;
import water.H2O;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.params.AstNumList;
import water.util.IcedHashMap;
import water.util.Log;
import java.util.HashMap;
public class AstGroupedPermute extends AstPrimitive {
// .newExpr("grouped_permute", fr, permCol, permByCol, groupByCols, keepCol)
@Override
public String[] args() {
return new String[]{"ary", "permCol", "groupBy", "permuteBy", "keepCol"};
} // currently only allow 2 items in permuteBy
@Override
public int nargs() {
return 1 + 5;
} // (trim x col groupBy permuteBy keepCol)
@Override
public String str() {
return "grouped_permute";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
final int permCol = (int) asts[2].exec(env).getNum();
AstNumList groupby = AstGroup.check(fr.numCols(), asts[3]);
final int[] gbCols = groupby.expand4();
final int permuteBy = (int) asts[4].exec(env).getNum();
final int keepCol = (int) asts[5].exec(env).getNum();
String[] names = new String[gbCols.length + 4];
int i = 0;
for (; i < gbCols.length; ++i)
names[i] = fr.name(gbCols[i]);
names[i++] = "In";
names[i++] = "Out";
names[i++] = "InAmnt";
names[i] = "OutAmnt";
String[][] domains = new String[names.length][];
int d = 0;
for (; d < gbCols.length; d++)
domains[d] = fr.domains()[gbCols[d]];
domains[d++] = fr.domains()[permCol];
domains[d++] = fr.domains()[permCol];
domains[d++] = fr.domains()[keepCol];
domains[d] = fr.domains()[keepCol];
long s = System.currentTimeMillis();
BuildGroups t = new BuildGroups(gbCols, permuteBy, permCol, keepCol).doAll(fr);
Log.info("Elapsed time: " + (System.currentTimeMillis() - s) / 1000. + "s");
s = System.currentTimeMillis();
SmashGroups sg;
H2O.submitTask(sg = new SmashGroups(t._grps)).join();
Log.info("Elapsed time: " + (System.currentTimeMillis() - s) / 1000. + "s");
return new ValFrame(buildOutput(sg._res.values().toArray(new double[0][][]), names, domains));
}
private static Frame buildOutput(final double[][][] a, String[] names, String[][] domains) {
Frame dVec = new Frame(Vec.makeSeq(0, a.length));
long s = System.currentTimeMillis();
Frame res = new MRTask() {
@Override
public void map(Chunk[] cs, NewChunk[] ncs) {
for (int i = 0; i < cs[0]._len; ++i)
for (double[] anAa : a[(int) cs[0].at8(i)])
for (int k = 0; k < anAa.length; ++k)
ncs[k].addNum(anAa[k]);
}
}.doAll(5, Vec.T_NUM, dVec).outputFrame(null, names, domains);
Log.info("Elapsed time: " + (System.currentTimeMillis() - s) / 1000. + "s");
dVec.delete();
return res;
}
private static class BuildGroups extends MRTask<BuildGroups> {
IcedHashMap<Long, IcedHashMap<Long, double[]>[]> _grps; // shared per node (all grps with permutations atomically inserted)
private final int _gbCols[];
private final int _permuteBy;
private final int _permuteCol;
private final int _amntCol;
BuildGroups(int[] gbCols, int permuteBy, int permuteCol, int amntCol) {
_gbCols = gbCols;
_permuteBy = permuteBy;
_permuteCol = permuteCol;
_amntCol = amntCol;
}
@Override
public void setupLocal() {
_grps = new IcedHashMap<>();
}
@Override
public void map(Chunk[] chks) {
String[] dom = chks[_permuteBy].vec().domain();
IcedHashMap<Long, IcedHashMap<Long, double[]>[]> grps = new IcedHashMap<>();
for (int row = 0; row < chks[0]._len; ++row) {
long jid = chks[_gbCols[0]].at8(row);
long rid = chks[_permuteCol].at8(row);
double[] aci = new double[]{rid, chks[_amntCol].atd(row)};
int type = dom[(int) chks[_permuteBy].at8(row)].equals("D") ? 0 : 1;
if (grps.containsKey(jid)) {
IcedHashMap<Long, double[]>[] dcWork = grps.get(jid);
if (dcWork[type].putIfAbsent(rid, aci) != null)
dcWork[type].get(rid)[1] += aci[1];
} else {
IcedHashMap<Long, double[]>[] dcAcnts = new IcedHashMap[2];
dcAcnts[0] = new IcedHashMap<>();
dcAcnts[1] = new IcedHashMap<>();
dcAcnts[type].put(rid, aci);
grps.put(jid, dcAcnts);
}
}
reduce(grps);
}
@Override
public void reduce(BuildGroups t) {
if (_grps != t._grps) reduce(t._grps);
}
private void reduce(IcedHashMap<Long, IcedHashMap<Long, double[]>[]> r) {
for (Long l : r.keySet()) {
if (_grps.putIfAbsent(l, r.get(l)) != null) {
IcedHashMap<Long, double[]>[] rdbls = r.get(l);
IcedHashMap<Long, double[]>[] ldbls = _grps.get(l);
for (Long rr : rdbls[0].keySet())
if (ldbls[0].putIfAbsent(rr, rdbls[0].get(rr)) != null)
ldbls[0].get(rr)[1] += rdbls[0].get(rr)[1];
for (Long rr : rdbls[1].keySet())
if (ldbls[1].putIfAbsent(rr, rdbls[1].get(rr)) != null)
ldbls[1].get(rr)[1] += rdbls[1].get(rr)[1];
}
}
}
}
private static class SmashGroups extends H2O.H2OCountedCompleter<SmashGroups> {
private final IcedHashMap<Long, IcedHashMap<Long, double[]>[]> _grps;
private final HashMap<Integer, Long> _map;
private int _hi;
private int _lo;
SmashGroups _left;
SmashGroups _rite;
private IcedHashMap<Long, double[][]> _res;
SmashGroups(IcedHashMap<Long, IcedHashMap<Long, double[]>[]> grps) {
_grps = grps;
_lo = 0;
_hi = _grps.size();
_res = new IcedHashMap<>();
_map = new HashMap<>();
int i = 0;
for (Long l : _grps.keySet())
_map.put(i++, l);
}
@Override
public void compute2() {
assert _left == null && _rite == null;
if ((_hi - _lo) >= 2) { // divide/conquer down to 1 IHM
final int mid = (_lo + _hi) >>> 1; // Mid-point
_left = copyAndInit();
_rite = copyAndInit();
_left._hi = mid; // Reset mid-point
_rite._lo = mid; // Also set self mid-point
addToPendingCount(1); // One fork awaiting completion
_left.fork(); // Runs in another thread/FJ instance
_rite.compute2(); // Runs in THIS F/J thread
return;
}
if (_hi > _lo) {
smash();
}
tryComplete();
}
private void smash() {
long key = _map.get(_lo);
IcedHashMap<Long, double[]>[] pair = _grps.get(key);
double[][] res = new double[pair[0].size() * pair[1].size()][]; // all combos
int d0 = 0;
for (double[] ds0 : pair[0].values()) {
for (double[] ds1 : pair[1].values())
res[d0++] = new double[]{key, ds0[0], ds1[0], ds0[1], ds1[1]};
}
_res.put(key, res);
}
private SmashGroups copyAndInit() {
SmashGroups x = SmashGroups.this.clone();
x.setCompleter(this);
x._left = x._rite = null;
x.setPendingCount(0);
return x;
}
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstIsCharacter.java
|
package water.rapids.ast.prims.mungers;
import water.fvec.Frame;
import water.rapids.Env;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValNums;
import water.rapids.ast.AstPrimitive;
/**
* Is String Vec?
*/
public class AstIsCharacter extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public int nargs() {
return 1 + 1;
} // (is.character col)
@Override
public String str() {
return "is.character";
}
@Override
public ValNums apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
if (fr.numCols() == 1) return new ValNums(new double[]{fr.anyVec().isString() ? 1 : 0});
double ds[] = new double[fr.numCols()];
for (int i = 0; i < fr.numCols(); i++)
ds[i] = fr.vec(i).isString() ? 1 : 0;
return new ValNums(ds);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstIsFactor.java
|
package water.rapids.ast.prims.mungers;
import water.fvec.Frame;
import water.rapids.Env;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValNums;
import water.rapids.ast.AstPrimitive;
/**
* Is a factor/categorical?
*/
public class AstIsFactor extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public int nargs() {
return 1 + 1;
} // (is.factor col)
@Override
public String str() {
return "is.factor";
}
@Override
public ValNums apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
if (fr.numCols() == 1) return new ValNums(new double[]{fr.anyVec().isCategorical() ? 1 : 0});
double ds[] = new double[fr.numCols()];
for (int i = 0; i < fr.numCols(); i++)
ds[i] = fr.vec(i).isCategorical() ? 1 : 0;
return new ValNums(ds);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstIsNa.java
|
package water.rapids.ast.prims.mungers;
import water.H2O;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.fvec.Vec;
import water.rapids.Val;
import water.rapids.ast.AstBuiltin;
import water.rapids.vals.ValFrame;
import water.rapids.vals.ValNum;
/**
* Split out in it's own function, instead of Yet Another UniOp, because it
* needs a "is.NA" check instead of just using the Double.isNaN hack... because
* it works on UUID and String columns.
*/
public class AstIsNa extends AstBuiltin<AstIsNa> {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public String str() {
return "is.na";
}
@Override
public int nargs() {
return 1 + 1;
}
@Override
public Val exec(Val... args) {
Val val = args[1];
switch (val.type()) {
case Val.NUM:
return new ValNum(op(val.getNum()));
case Val.FRM:
Frame fr = val.getFrame();
String[] newNames = new String[fr.numCols()];
for (int i = 0; i < newNames.length; i++) {
newNames[i] = "isNA(" + fr.name(i) + ")";
}
return new ValFrame(new MRTask() {
@Override
public void map(Chunk cs[], NewChunk ncs[]) {
for (int col = 0; col < cs.length; col++) {
Chunk c = cs[col];
NewChunk nc = ncs[col];
for (int i = 0; i < c._len; i++)
nc.addNum(c.isNA(i) ? 1 : 0);
}
}
}.doAll(fr.numCols(), Vec.T_NUM, fr).outputFrame(newNames, null));
case Val.STR:
return new ValNum(val.getStr() == null ? 1 : 0);
default:
throw H2O.unimpl("is.na unimpl: " + val.getClass());
}
}
double op(double d) {
return Double.isNaN(d) ? 1 : 0;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstIsNumeric.java
|
package water.rapids.ast.prims.mungers;
import water.fvec.Frame;
import water.rapids.Env;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValNums;
import water.rapids.ast.AstPrimitive;
/**
* Is a numeric?
*/
public class AstIsNumeric extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public int nargs() {
return 1 + 1;
} // (is.numeric col)
@Override
public String str() {
return "is.numeric";
}
@Override
public ValNums apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
if (fr.numCols() == 1) return new ValNums(new double[]{fr.anyVec().isNumeric() ? 1 : 0});
double ds[] = new double[fr.numCols()];
for (int i = 0; i < fr.numCols(); i++)
ds[i] = fr.vec(i).isNumeric() ? 1 : 0;
return new ValNums(ds);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstLevels.java
|
package water.rapids.ast.prims.mungers;
import water.Futures;
import water.Key;
import water.fvec.AppendableVec;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
/**
*/
public class AstLevels extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public int nargs() {
return 1 + 1;
} // (levels x)
@Override
public String str() {
return "levels";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame f = stk.track(asts[1].exec(env)).getFrame();
Futures fs = new Futures();
Key[] keys = Vec.VectorGroup.VG_LEN1.addVecs(f.numCols());
Vec[] vecs = new Vec[keys.length];
// compute the longest vec... that's the one with the most domain levels
int max = 0;
for (int i = 0; i < f.numCols(); ++i)
if (f.vec(i).isCategorical())
if (max < f.vec(i).domain().length) max = f.vec(i).domain().length;
final int rowLayout = Vec.ESPC.rowLayout(keys[0], new long[]{0, max});
for (int i = 0; i < f.numCols(); ++i) {
AppendableVec v = new AppendableVec(keys[i], Vec.T_NUM);
NewChunk nc = new NewChunk(v, 0);
String[] dom = f.vec(i).domain();
int numToPad = dom == null ? max : max - dom.length;
if (dom != null)
for (int j = 0; j < dom.length; ++j) nc.addNum(j);
for (int j = 0; j < numToPad; ++j) nc.addNA();
nc.close(0, fs);
vecs[i] = v.close(rowLayout, fs);
vecs[i].setDomain(dom);
}
fs.blockForPending();
Frame fr2 = new Frame(vecs);
return new ValFrame(fr2);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstMelt.java
|
package water.rapids.ast.prims.mungers;
import water.MRTask;
import water.fvec.*;
import water.rapids.Val;
import water.rapids.ast.AstBuiltin;
import water.rapids.vals.ValFrame;
import water.util.ArrayUtils;
public class AstMelt extends AstBuiltin<AstPivot> {
@Override
public String[] args() {
return new String[]{"frame", "id_vars", "value_vars", "var_name", "value_name", "skip_na"};
}
@Override
public int nargs() {
return 1 + 6;
} // (melt frame, id_vars value_vars var_name value_name skip_na)
@Override
public String str() {
return "melt";
}
@Override
public ValFrame exec(Val[] args) {
Frame fr = args[1].getFrame();
String[] idVars = args[2].getStrs();
String[] valueVars = args[3].isEmpty() ? null : args[3].getStrs();
String varName = args[4].getStr();
String valueName = args[5].getStr();
boolean skipNA = args[6].getBool();
if (idVars.length == 0) {
throw new IllegalArgumentException("Empty list of id_vars provided, id_vars needs to have at least one column name.");
}
final Frame idFrame = fr.subframe(idVars);
if (valueVars == null) {
valueVars = ArrayUtils.difference(fr.names(), idFrame.names());
}
if (valueVars.length == 0) {
throw new IllegalArgumentException("Empty list of value_vars provided, value_vars needs to have at least one column name.");
}
final Frame valueFrame = fr.subframe(valueVars);
for (Vec v : valueFrame.vecs()) {
if (! v.isNumeric()) {
throw new UnsupportedOperationException("You can only use `melt` with numerical columns. Categorical (and other) columns are not supported.");
}
}
byte[] outputTypes = ArrayUtils.append(idFrame.types(), new byte[]{Vec.T_CAT, Vec.T_NUM});
String[][] outputDomains = ArrayUtils.append(idFrame.domains(), valueVars, null);
String[] outputNames = ArrayUtils.append(idFrame.names(), varName, valueName);
Frame result = new MeltTask(idFrame.numCols(), skipNA)
.doAll(outputTypes, ArrayUtils.append(idFrame.vecs(), valueFrame.vecs()))
.outputFrame(null, outputNames, outputDomains);
return new ValFrame(result);
}
private static class MeltTask extends MRTask<MeltTask> {
private final int _id_vars_cnt;
private final boolean _skip_na;
MeltTask(int id_vars_cnt, boolean skip_na) {
_id_vars_cnt = id_vars_cnt;
_skip_na = skip_na;
}
@Override
public void map(Chunk[] cs, NewChunk[] ncs) {
ChunkVisitor.NewChunkVisitor[] id_ncs = new ChunkVisitor.NewChunkVisitor[_id_vars_cnt];
for (int i = 0; i < _id_vars_cnt; i++) {
id_ncs[i] = new ChunkVisitor.NewChunkVisitor(ncs[i]);
}
NewChunk var_ncs = ncs[_id_vars_cnt];
ChunkVisitor.NewChunkVisitor value_ncs = new ChunkVisitor.NewChunkVisitor(ncs[_id_vars_cnt + 1]);
for (int i = 0; i < cs[0]._len; i++) {
for (int c = _id_vars_cnt; c < cs.length; c++) {
if (_skip_na && cs[c].isNA(i))
continue;
// copy id vars
for (int j = 0; j < _id_vars_cnt; j++)
cs[j].processRows(id_ncs[j], i, i + 1);
// add var name
var_ncs.addNum(c - _id_vars_cnt, 0);
// add value
cs[c].processRows(value_ncs, i, i + 1);
}
}
}
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstMerge.java
|
package water.rapids.ast.prims.mungers;
import water.H2O;
import water.Iced;
import water.Key;
import water.MRTask;
import water.fvec.*;
import water.parser.BufferedString;
import water.rapids.Env;
import water.rapids.Merge;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.ast.params.AstNum;
import water.rapids.ast.params.AstNumList;
import water.rapids.vals.ValFrame;
import water.util.IcedHashMap;
import java.util.ArrayList;
import java.util.Arrays;
/**
* plyr's merge: Join by any other name.
* Sample AstRoot: (merge $leftFrame $rightFrame allLeftFlag allRightFlag)
* <p/>
* Joins two frames; all columns with the same names will be the join key. If
* you want to join on a subset of identical names, rename the columns first
* (otherwise the same column name would appear twice in the result).
* <p/>
* If the client side wants to allow named columns to be merged, the client
* side is responsible for renaming columns as needed to bring the names into
* alignment as above. This can be as simple as renaming the RHS to match the
* LHS column names. Duplicate columns NOT part of the merge are still not
* allowed - because the resulting Frame will end up with duplicate column
* names which blows a Frame invariant (uniqueness of column names).
* <p/>
* If allLeftFlag is true, all rows in the leftFrame will be included, even if
* there is no matching row in the rightFrame, and vice-versa for
* allRightFlag. Missing data will appear as NAs. Both flags can be true.
* </p>
* We support merge method hash, radix and auto. If a user chooses auto, the
* algorithm will default to method radix which is the better algorithm. It
* gives accurate merge results even if there are duplicated rows in the rightFrame.
* In addition, the radix method will allow the presences of string columns in
* the frames. The Hash method will not give correct merge results if there
* are duplicated rows in the rightFrame. The hash method cannot work with String columns,
* they need to be casted to enums/integer columns before calling merge.
*/
public class AstMerge extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"left", "rite", "all_left", "all_rite", "by_left", "by_right", "method"};
}
@Override
public String str() {
return "merge";
}
@Override
public int nargs() {
return 1 + 7;
} // (merge left rite all.left all.rite method)
// Size cutoff before switching between a hashed-join vs a sorting join.
// Hash tables beyond this count are assumed to be inefficient, and we're
// better served by sorting all the join columns and doing a global
// merge-join.
static final int MAX_HASH_SIZE = 120000000;
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame l = stk.track(asts[1].exec(env)).getFrame();
Frame r = stk.track(asts[2].exec(env)).getFrame();
boolean allLeft = asts[3].exec(env).getNum() == 1;
boolean allRite = asts[4].exec(env).getNum() == 1;
int[] byLeft = check(asts[5]);
int[] byRite = check(asts[6]);
String method = asts[7].exec(env).getStr();
// byLeft and byRight contains the columns to match between
// check them
if (byLeft.length == 0) {
assert byRite.length==0;
// Now find common column names here on the Java side. As for Python caller currently.
ArrayList<Integer> leftTmp = new ArrayList<>();
ArrayList<Integer> riteTmp = new ArrayList<>();
for (int i=0; i<l._names.length; i++) {
int idx = r.find(l._names[i]);
if (idx != -1) {
leftTmp.add(i);
riteTmp.add(idx);
}
}
if (leftTmp.size() == 0) throw new IllegalArgumentException("No join columns specified and there are no common names");
byLeft = new int[leftTmp.size()];
byRite = new int[riteTmp.size()];
for (int i=0; i < byLeft.length; i++)
{
byLeft[i] = leftTmp.get(i).intValue();
byRite[i] = riteTmp.get(i).intValue();
}
}
if (byLeft.length != byRite.length)
throw new IllegalArgumentException("byLeft and byRight are not the same length");
int ncols = byLeft.length; // Number of join columns dealt with so far
l.moveFirst(byLeft);
r.moveFirst(byRite);
for (int i = 0; i < ncols; i++) {
Vec lv = l.vecs()[i];
Vec rv = r.vecs()[i];
if (lv.get_type() != rv.get_type())
throw new IllegalArgumentException("Merging columns must be the same type, column " + l._names[i] +
" found types " + lv.get_type_str() + " and " + rv.get_type_str());
if (method.equals("hash") && lv.isString())
throw new IllegalArgumentException("Cannot merge Strings with hash method; flip toCategoricalVec first" +
" or set your method to auto or radix");
}
// GC now to sync nodes and get them to use young gen for the working memory. This helps get stable
// repeatable timings. Otherwise full GCs can cause blocks. Adding System.gc() here suggested by Cliff
// during F2F pair-programming and it for sure worked.
// TODO - would be better at the end to clean up, but there are several exit paths here.
new MRTask() {
@Override
public void setupLocal() {
System.gc();
}
}.doAllNodes();
if (method.equals("radix") || method.equals("auto")) { // default to radix as default merge metho
// Build categorical mappings, to rapidly convert categoricals from the left to the right
// With the sortingMerge approach there is no variance here: always map left to right
if (allLeft && allRite)
throw new IllegalArgumentException("all.x=TRUE and all.y=TRUE is not supported. Choose one only.");
boolean onlyLeftAllOff = allLeft || (!allLeft && !allRite); // use left frame as reference unless allRite==true
int[][] id_maps = new int[ncols][]; // will contain enum levels of the not included frame mapped to combined enum levels of both left/rite frames
for (int i = 0; i < ncols; i++) { // flip the frame orders for allRite
Vec lv = onlyLeftAllOff ? l.vec(i) : r.vec(i);
Vec rv = onlyLeftAllOff ? r.vec(i) : l.vec(i);
if (onlyLeftAllOff ? lv.isCategorical() : rv.isCategorical()) {
assert onlyLeftAllOff ? rv.isCategorical() : lv.isCategorical(); // if not, would have thrown above
id_maps[i] = CategoricalWrappedVec.computeMap(lv.domain(), rv.domain()); // flipped already, no need to flip again
}
}
if (onlyLeftAllOff) {
return sortingMerge(l, r, allLeft, ncols, id_maps);
} else { // implement allRite here by switching leftframe and riteframe. However, column order is wrong, re-order before return
ValFrame tempFrame = sortingMerge(r, l, allRite, ncols, id_maps);
Frame mergedFrame = tempFrame.getFrame(); // need to switch order of merged frame
int allColNum = mergedFrame.numCols();
int[] colMapping = new int[allColNum]; // index into combined frame but with correct order
for (int index = 0; index < ncols; index++) {
colMapping[index] = index; // no change to column order in the key columns
}
int offset = r.numCols() - ncols;
for (int index = ncols; index < l.numCols(); index++) { // set the order for right frame
colMapping[index] = offset + index; // move the left columns to the front
}
offset = l.numCols() - ncols;
for (int index = l.numCols(); index < allColNum; index++) {
colMapping[index] = index - offset;
}
mergedFrame.reOrder(colMapping); // reorder the frame columns for allrite = true
return tempFrame;
}
}
// Pick the frame to replicate & hash. If one set is "all" and the other
// is not, the "all" set must be walked, so the "other" is hashed. If both
// or neither are "all", then pick the smallest bytesize of the non-key
// columns. The hashed dataframe is completely replicated per-node
boolean walkLeft;
if (allLeft == allRite) {
walkLeft = l.numRows() > r.numRows();
} else {
walkLeft = allLeft;
}
Frame walked = walkLeft ? l : r;
Frame hashed = walkLeft ? r : l;
if (!walkLeft) {
boolean tmp = allLeft;
allLeft = allRite;
allRite = tmp;
}
// Build categorical mappings, to rapidly convert categoricals from the
// distributed set to the hashed & replicated set.
int[][] id_maps = new int[ncols][];
for (int i = 0; i < ncols; i++) {
Vec lv = walked.vecs()[i];
if (lv.isCategorical())
id_maps[i] = CategoricalWrappedVec.computeMap(hashed.vecs()[i].domain(), lv.domain());
}
// Build the hashed version of the hashed frame. Hash and equality are
// based on the known-integer key columns. Duplicates are either ignored
// (!allRite) or accumulated, and can force replication of the walked set.
//
// Count size of this hash table as-we-go. Bail out if the size exceeds
// a known threshold, and switch a sorting join instead of a hashed join.
final MergeSet ms = new MergeSet(ncols, id_maps, allRite).doAll(hashed);
final Key uniq = ms._uniq;
IcedHashMap<Row, String> rows = MergeSet.MERGE_SETS.get(uniq)._rows;
new MRTask() {
@Override
public void setupLocal() {
MergeSet.MERGE_SETS.remove(uniq);
}
}.doAllNodes();
if (method.equals("auto") && (rows == null || rows.size() > MAX_HASH_SIZE)) // Blew out hash size; switch to a sorting join. Matt: even with 0, rows was size 3 hence added ||
return sortingMerge(l, r, allLeft, ncols, id_maps);
// All of the walked set, and no dup handling on the right - which means no
// need to replicate rows of the walked dataset. Simple 1-pass over the
// walked set adding in columns (or NAs) from the right.
if (allLeft && !(allRite && ms._dup)) {
// The lifetime of the distributed dataset is independent of the original
// dataset, so it needs to be a deep copy.
// TODO: COW Optimization
walked = walked.deepCopy(null);
// run a global parallel work: lookup non-hashed rows in hashSet; find
// matching row; append matching column data
String[] names = Arrays.copyOfRange(hashed._names, ncols, hashed._names.length);
String[][] domains = Arrays.copyOfRange(hashed.domains(), ncols, hashed.domains().length);
byte[] types = Arrays.copyOfRange(hashed.types(), ncols, hashed.numCols());
Frame res = new AllLeftNoDupe(ncols, rows, hashed, allRite).doAll(types, walked).outputFrame(names, domains);
return new ValFrame(walked.add(res));
}
// Can be full or partial on the left, but won't nessecarily do all of the
// right. Dups on right are OK (left will be replicated or dropped as needed).
if (!allRite) {
String[] names = Arrays.copyOf(walked.names(), walked.numCols() + hashed.numCols() - ncols);
System.arraycopy(hashed.names(), ncols, names, walked.numCols(), hashed.numCols() - ncols);
String[][] domains = Arrays.copyOf(walked.domains(), walked.numCols() + hashed.numCols() - ncols);
System.arraycopy(hashed.domains(), ncols, domains, walked.numCols(), hashed.numCols() - ncols);
byte[] types = walked.types();
types = Arrays.copyOf(types, types.length + hashed.numCols() - ncols);
System.arraycopy(hashed.types(), ncols, types, walked.numCols(), hashed.numCols() - ncols);
return new ValFrame(new AllRiteWithDupJoin(ncols, rows, hashed, allLeft).doAll(types, walked).outputFrame(names, domains));
}
throw H2O.unimpl();
}
/**
* Use a sorting merge/join, probably because the hash table size exceeded
* MAX_HASH_SIZE; i.e. the number of unique keys in the hashed Frame exceeds
* MAX_HASH_SIZE. Join is done on the first ncol columns in both frames,
* which are already known to be not-null and have matching names and types.
* The walked and hashed frames are sorted according to allLeft; if allRite
* is set then allLeft will also be set (but not vice-versa).
*
* @param left is the LHS frame; not-null.
* @param right is the RHS frame; not-null.
* @param allLeft all rows in the LHS frame will appear in the result frame.
* @param ncols is the number of columns to join on, and these are ordered
* as the first ncols of both the left and right frames.
* @param id_maps if not-null denote simple integer mappings from one
* categorical column to another; the width is ncols
*/
private ValFrame sortingMerge(Frame left, Frame right, boolean allLeft, int ncols, int[][] id_maps) {
int cols[] = new int[ncols];
for (int i = 0; i < ncols; i++) cols[i] = i;
return new ValFrame(Merge.merge(left, right, cols, cols, allLeft, id_maps));
}
// One Row object per row of the hashed dataset, so kept as small as
// possible.
private static class Row extends Iced {
final long[] _keys; // Key: first ncols of longs
int _hash; // Hash of keys; not final as Row objects are reused
long _row; // Row in Vec; the payload is vecs[].atd(_row)
long[] _dups; // dup rows stored here (includes _row); updated atomically.
int _dupIdx; // pointer into _dups array; updated atomically
Row(int ncols) {
_keys = new long[ncols];
}
Row fill(final Chunk[] chks, final int[][] cat_maps, final int row) {
// Precompute hash: columns are integer only (checked before we started
// here). NAs count as a zero for hashing.
long l, hash = 0;
for (int i = 0; i < _keys.length; i++) {
if (chks[i].isNA(row)) l = 0;
else {
l = chks[i].at8(row);
l = (cat_maps == null || cat_maps[i] == null) ? l : cat_maps[i][(int) l];
hash += l;
}
_keys[i] = l;
}
_hash = (int) (hash ^ (hash >> 32));
_row = chks[0].start() + row; // Payload: actual absolute row number
return this;
}
@Override
public int hashCode() {
return _hash;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof Row)) return false;
Row r = (Row) o;
return _hash == r._hash && Arrays.equals(_keys, r._keys);
}
private void atomicAddDup(long row) {
synchronized (this) {
if (_dups == null) {
_dups = new long[]{_row, row};
_dupIdx = 2;
} else {
if (_dupIdx == _dups.length)
_dups = Arrays.copyOf(_dups, _dups.length << 1);
_dups[_dupIdx++] = row;
}
}
}
}
// Build a HashSet of one entire Frame, where the Key is the contents of the
// first few columns. One entry-per-row.
private static class MergeSet extends MRTask<MergeSet> {
// All active Merges have a per-Node hashset of one of the datasets. If
// this is missing, it means the HashMap exceeded the size bounds and the
// whole MergeSet is being aborted (gracefully) - and the Merge is
// switching to a sorting merge instead of a hashed merge.
static IcedHashMap<Key, MergeSet> MERGE_SETS = new IcedHashMap<>();
final Key _uniq; // Key to allow sharing of this MergeSet on each Node
final int _ncols; // Number of leading columns for the Hash Key
final int[][] _id_maps; // Rapid mapping between matching enums
final boolean _allRite; // Collect all rows with the same matching Key, or just the first
boolean _dup; // Dups are present at all
IcedHashMap<Row, String> _rows;
MergeSet(int ncols, int[][] id_maps, boolean allRite) {
_uniq = Key.make();
_ncols = ncols;
_id_maps = id_maps;
_allRite = allRite;
}
// Per-node, make the empty hashset for later reduction
@Override
public void setupLocal() {
_rows = new IcedHashMap<>();
MERGE_SETS.put(_uniq, this);
}
@Override
public void map(Chunk chks[]) {
final IcedHashMap<Row, String> rows = MERGE_SETS.get(_uniq)._rows; // Shared per-node HashMap
if (rows == null) return; // Missing: Aborted due to exceeding size
final int len = chks[0]._len;
Row row = new Row(_ncols);
for (int i = 0; i < len; i++) // For all rows
if (add(rows, row.fill(chks, _id_maps, i))) { // Fill & attempt add row
if (rows.size() > MAX_HASH_SIZE) {
abort();
return;
}
row = new Row(_ncols); // If added, need a new row to fill
}
}
private boolean add(IcedHashMap<Row, String> rows, Row row) {
if (rows.putIfAbsent(row, "") == null)
return true; // Added!
// dup handling: keys are identical
if (_allRite) { // Collect the dups?
_dup = true; // MergeSet has dups.
rows.getk(row).atomicAddDup(row._row);
}
return false;
}
private void abort() {
MERGE_SETS.get(_uniq)._rows = _rows = null;
}
@Override
public void reduce(MergeSet ms) {
final IcedHashMap<Row, String> rows = _rows; // Shared per-node hashset
if (rows == ms._rows) return;
if (rows == null || ms._rows == null) {
abort();
return;
} // Missing: aborted due to size
for (Row row : ms._rows.keySet())
add(rows, row); // Merge RHS into LHS, collecting dups as we go
}
}
private static abstract class JoinTask extends MRTask<JoinTask> {
protected final IcedHashMap<Row, String> _rows;
protected final int _ncols; // Number of merge columns
protected final Frame _hashed;
protected final boolean _allLeft, _allRite;
JoinTask(int ncols, IcedHashMap<Row, String> rows, Frame hashed, boolean allLeft, boolean allRite) {
_rows = rows;
_ncols = ncols;
_hashed = hashed;
_allLeft = allLeft;
_allRite = allRite;
}
protected static void addElem(NewChunk nc, Chunk c, int row) {
c.extractRows(nc,row,row+1);
}
protected static void addElem(NewChunk nc, Vec v, long absRow, BufferedString bStr) {
Chunk c = v.chunkForRow(absRow);
int relRow = (int)(absRow-c.start());
c.extractRows(nc,relRow,relRow+1);
}
}
// Build the join-set by iterating over all the local Chunks of the walked
// dataset, doing a hash-lookup on the hashed replicated dataset, and adding
// in the matching columns.
private static class AllLeftNoDupe extends JoinTask {
AllLeftNoDupe(int ncols, IcedHashMap<Row, String> rows, Frame hashed, boolean allRite) {
super(ncols, rows, hashed, true, allRite);
}
@Override
public void map(Chunk chks[], NewChunk nchks[]) {
// Shared common hash map
final IcedHashMap<Row, String> rows = _rows;
Vec[] vecs = _hashed.vecs(); // Data source from hashed set
assert vecs.length == _ncols + nchks.length;
Row row = new Row(_ncols); // Recycled Row object on the bigger dataset
BufferedString bStr = new BufferedString(); // Recycled BufferedString
int len = chks[0]._len;
for (int i = 0; i < len; i++) {
Row hashed = rows.getk(row.fill(chks, null, i));
if (hashed == null) { // Hashed is missing
for (NewChunk nc : nchks) nc.addNA(); // All Left: keep row, use missing data
} else {
// Copy fields from matching hashed set into walked set
final long absrow = hashed._row;
for (int c = 0; c < nchks.length; c++)
addElem(nchks[c], vecs[_ncols + c], absrow, bStr);
}
}
}
}
private int[] check(AstRoot ast) {
double[] n;
if (ast instanceof AstNumList) n = ((AstNumList) ast).expand();
else if (ast instanceof AstNum)
n = new double[]{((AstNum) ast).getNum()}; // this is the number of breaks wanted...
else throw new IllegalArgumentException("Requires a number-list, but found a " + ast.getClass());
int[] ni = new int[n.length];
for (int i = 0; i < ni.length; ++i)
ni[i] = (int) n[i];
return ni;
}
// Build the join-set by iterating over all the local Chunks of the walked
// dataset, doing a hash-lookup on the hashed replicated dataset, and adding
// in BOTH the walked and the matching columns.
private static class AllRiteWithDupJoin extends JoinTask {
AllRiteWithDupJoin(int ncols, IcedHashMap<Row, String> rows, Frame hashed, boolean allLeft) {
super(ncols, rows, hashed, allLeft, true);
}
@Override
public void map(Chunk[] chks, NewChunk[] nchks) {
// Shared common hash map
final IcedHashMap<Row, String> rows = _rows;
Vec[] vecs = _hashed.vecs(); // Data source from hashed set
// assert vecs.length == _ncols + nchks.length;
Row row = new Row(_ncols); // Recycled Row object on the bigger dataset
BufferedString bStr = new BufferedString(); // Recycled BufferedString
int len = chks[0]._len;
for (int i = 0; i < len; i++) {
Row hashed = _rows.getk(row.fill(chks, null, i));
if (hashed == null) { // no rows, fill in chks, and pad NAs as needed...
if (_allLeft) { // pad NAs to the right...
int c = 0;
for (; c < chks.length; ++c) addElem(nchks[c], chks[c], i);
for (; c < nchks.length; ++c) nchks[c].addNA();
} // else no hashed and no _allLeft... skip (row is dropped)
} else {
if (hashed._dups != null) for (long absrow : hashed._dups) addRow(nchks, chks, vecs, i, absrow, bStr);
else addRow(nchks, chks, vecs, i, hashed._row, bStr);
}
}
}
void addRow(NewChunk[] nchks, Chunk[] chks, Vec[] vecs, int relRow, long absRow, BufferedString bStr) {
int c = 0;
for (; c < chks.length; ++c) addElem(nchks[c], chks[c], relRow);
for (; c < nchks.length; ++c) addElem(nchks[c], vecs[c - chks.length + _ncols], absRow, bStr);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstNLevels.java
|
package water.rapids.ast.prims.mungers;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.vals.ValNum;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
/**
*
*/
public class AstNLevels extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public int nargs() {
return 1 + 1;
} // (nlevels x)
@Override
public String str() {
return "nlevels";
}
@Override
public ValNum apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
int nlevels;
Frame fr = stk.track(asts[1].exec(env)).getFrame();
if (fr.numCols() == 1) {
Vec v = fr.anyVec();
nlevels = v != null && v.isCategorical() ? v.domain().length : 0;
return new ValNum(nlevels);
} else throw new IllegalArgumentException("nlevels applies to a single column. Got: " + fr.numCols());
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstNaOmit.java
|
package water.rapids.ast.prims.mungers;
import water.MRTask;
import water.fvec.*;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
/**
* Remove rows with NAs from the H2OFrame
* Note: Current implementation is NOT in-place replacement
*/
public class AstNaOmit extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public String str() {
return "na.omit";
}
@Override
public int nargs() {
return 1 + 1;
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
Frame fr2 = new MRTask() {
private void copyRow(int row, Chunk[] cs, NewChunk[] ncs) {
for (int i = 0; i < cs.length; ++i) {
if (cs[i] instanceof CStrChunk) ncs[i].addStr(cs[i], row);
else if (cs[i] instanceof C16Chunk) ncs[i].addUUID(cs[i], row);
else if (cs[i].hasFloat()) ncs[i].addNum(cs[i].atd(row));
else ncs[i].addNum(cs[i].at8(row), 0);
}
}
@Override
public void map(Chunk[] cs, NewChunk[] ncs) {
int col;
for (int row = 0; row < cs[0]._len; ++row) {
for (col = 0; col < cs.length; ++col)
if (cs[col].isNA(row)) break;
if (col == cs.length) copyRow(row, cs, ncs);
}
}
}.doAll(fr.types(), fr).outputFrame(fr.names(), fr.domains());
return new ValFrame(fr2);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstNcol.java
|
package water.rapids.ast.prims.mungers;
import water.fvec.Frame;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.vals.ValNum;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
/**
*/
public class AstNcol extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public int nargs() {
return 1 + 1;
}
@Override
public String str() {
return "ncol";
}
@Override
public ValNum apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
return new ValNum(fr.numCols());
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstNrow.java
|
package water.rapids.ast.prims.mungers;
import water.fvec.Frame;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.vals.ValNum;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
/**
*
*/
public class AstNrow extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public int nargs() {
return 1 + 1;
}
@Override
public String str() {
return "nrow";
}
@Override
public String example() {
return "(nrow frame)";
}
@Override
public String description() {
return "Return the number of rows in the frame.";
}
@Override
public ValNum apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
return new ValNum(fr.numRows());
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstPivot.java
|
package water.rapids.ast.prims.mungers;
import org.apache.commons.lang.ArrayUtils;
import water.*;
import water.fvec.*;
import water.rapids.Val;
import water.rapids.ast.AstBuiltin;
import water.rapids.vals.ValFrame;
import water.util.VecUtils;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.util.Arrays;
public class AstPivot extends AstBuiltin<AstPivot> {
@Override
public String[] args() {
return new String[]{"ary", "index", "column", "value"}; //the array and name of columns
}
@Override
public int nargs() {
return 1 + 4;
} // (pivot ary index column value)
@Override
public String str() {
return "pivot";
}
@Override
public ValFrame exec(Val[] args) {
// Distributed parallelized mrtask pivot
// Limitations: a single index value cant have more than chunk size * chunk size number of rows
// or if all rows of a single index value cant fit on a single node (due to the sort call)
Frame fr = args[1].getFrame();
String index = args[2].getStr();
String column = args[3].getStr();
String value = args[4].getStr();
int indexIdx = fr.find(index);
int colIdx = fr.find(column);
if(fr.vec(column).isConst())
throw new IllegalArgumentException("Column: '" + column + "'is constant. Perhaps use transpose?" );
if(fr.vec(index).naCnt() > 0)
throw new IllegalArgumentException("Index column '" + index + "' has > 0 NAs");
// This is the sort then MRTask method.
// Create the target Frame
// Now sort on the index key, result is that unique keys will be localized
Frame fr2 = fr.sort(new int[]{indexIdx});
final long[] classes = new VecUtils.CollectIntegerDomain().doAll(fr.vec(colIdx)).domain();
final int nClass = (fr.vec(colIdx).isNumeric() || fr.vec(colIdx).isTime()) ? classes.length : fr.vec(colIdx).domain().length;
String[] header = null;
if (fr.vec(colIdx).isNumeric()) {
header = (String[]) ArrayUtils.addAll(new String[]{index}, Arrays.toString(classes).split("[\\[\\]]")[1].split(", "));
} else if (fr.vec(colIdx).isTime()) {
header = new String[nClass];
for (int i=0;i<nClass;i++) header[i] = (new DateTime(classes[i], DateTimeZone.UTC)).toString();
} else {
header = (String[]) ArrayUtils.addAll(new String[]{index}, fr.vec(colIdx).domain());
}
Frame initialPass = new pivotTask(fr2.find(index),fr2.find(column),fr2.find(value),classes)
.doAll(nClass+1, Vec.T_NUM, fr2)
.outputFrame(null, header, null);
fr2.delete();
Frame result = new Frame(initialPass.vec(0).makeCopy(fr.vec(indexIdx).domain(),fr.vec(indexIdx).get_type()));
result._key = Key.<Frame>make();
result.setNames(new String[]{index});
initialPass.remove(0);
result.add(initialPass);
return new ValFrame(result);
}
private class pivotTask extends MRTask<AstPivot.pivotTask>{
int _indexColIdx;
int _colColIdx;
int _valColIdx;
long[] _classes;
pivotTask(int indexColIdx, int colColIdx, int valColIdx, long[] classes) {
_indexColIdx = indexColIdx; _colColIdx = colColIdx; _valColIdx = valColIdx; _classes=classes;
}
@Override
public void map(Chunk[] cs, NewChunk[] nc) {
// skip past the first rows of the first index if we know that the previous chunk will run in here
long firstIdx = cs[_indexColIdx].at8(0);
long globalIdx = cs[_indexColIdx].start();
int start = 0;
if (globalIdx > 0 && cs[_indexColIdx].vec().at8(globalIdx-1)==firstIdx){
while(start < cs[_indexColIdx].len() && firstIdx == cs[_indexColIdx].at8(start)) start++;
}
for (int i=start; i<cs[_indexColIdx]._len; i++) {
long currentIdx = cs[_indexColIdx].at8(i);
// start with a copy of the current row
double[] newRow = new double[nc.length-1];
Arrays.fill(newRow,Double.NaN);
if (((i == cs[_indexColIdx]._len -1) &&
(cs[_indexColIdx].nextChunk() == null || cs[_indexColIdx].nextChunk() != null && currentIdx != cs[_indexColIdx].nextChunk().at8(0)))
|| (i < cs[_indexColIdx]._len -1 && currentIdx != cs[_indexColIdx].at8(i+1))) {
newRow[ArrayUtils.indexOf(_classes,cs[_colColIdx].at8(i))] = cs[_valColIdx].atd(i);
nc[0].addNum(cs[_indexColIdx].at8(i));
for (int j = 1; j < nc.length; j++) nc[j].addNum(newRow[j - 1]);
// were done here since we know the next row has a different index
continue;
}
// here we know we have to search ahead
int count = 1;
newRow[ArrayUtils.indexOf(_classes,cs[_colColIdx].at8(i))] = cs[_valColIdx].atd(i);
while ( count + i < cs[_indexColIdx]._len && currentIdx == cs[_indexColIdx].at8(i + count) ) {
// merge the forward row, the newRow and the existing row
// here would be a good place to apply aggregating function
// for now we are aggregating by "first"
if (Double.isNaN(newRow[ArrayUtils.indexOf(_classes,cs[_colColIdx].at8(i + count))])) {
newRow[ArrayUtils.indexOf(_classes,cs[_colColIdx].at8(i + count))] = cs[_valColIdx].atd(i + count);
}
count++;
}
// need to look if we need to go to next chunk
if (i + count == cs[_indexColIdx]._len && cs[_indexColIdx].nextChunk() != null) {
Chunk indexNC = cs[_indexColIdx].nextChunk(); // for the index
Chunk colNC = cs[_colColIdx].nextChunk(); // for the rest of the columns
Chunk valNC = cs[_valColIdx].nextChunk(); // for the rest of the columns
int countNC = 0;
// If we reach the end of the chunk, we'll update nextChunk and nextChunkArr
while (indexNC != null && countNC < indexNC._len && currentIdx == indexNC.at8(countNC)) {
if (Double.isNaN(newRow[ArrayUtils.indexOf(_classes, colNC.at8(countNC))])) {
newRow[(int) colNC.atd(countNC)] = valNC.atd(countNC);
}
}
countNC++;
if (countNC == indexNC._len) { // go to the next chunk again
indexNC = indexNC.nextChunk();
colNC = colNC.nextChunk();
valNC = valNC.nextChunk();
countNC = 0;
}
}
nc[0].addNum(currentIdx);
for (int j = 1; j < nc.length; j++) {
nc[j].addNum(newRow[j - 1]);
}
i += (count - 1);
}
}
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstRBind.java
|
package water.rapids.ast.prims.mungers;
import jsr166y.CountedCompleter;
import water.DKV;
import water.H2O;
import water.Key;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
/**
* rbind: bind rows together into a new frame
*/
public class AstRBind extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"..."};
}
@Override
public int nargs() {
return -1;
} // variable number of args
@Override
public String str() {
return "rbind";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
// Execute all args. Find a canonical frame; all Frames must look like this one.
// Each argument turns into either a Frame (whose rows are entirely
// inlined) or a scalar (which is replicated across as a single row).
Frame fr = null; // Canonical Frame; all frames have the same column count, types and names
int nchks = 0; // Total chunks
Val vals[] = new Val[asts.length]; // Computed AstRoot results
for (int i = 1; i < asts.length; i++) {
vals[i] = stk.track(asts[i].exec(env));
if (vals[i].isFrame()) {
fr = vals[i].getFrame();
nchks += fr.anyVec().nChunks(); // Total chunks
} else nchks++; // One chunk per scalar
}
// No Frame, just a pile-o-scalars?
Vec zz = null; // The zero-length vec for the zero-frame frame
if (fr == null) { // Zero-length, 1-column, default name
fr = new Frame(new String[]{Frame.defaultColName(0)}, new Vec[]{zz = Vec.makeZero(0)});
if (asts.length == 1) return new ValFrame(fr);
}
// Verify all Frames are the same columns, names, and types. Domains can vary, and will be the union
final Frame frs[] = new Frame[asts.length]; // Input frame
final byte[] types = fr.types(); // Column types
final long[] espc = new long[nchks + 1]; // Compute a new layout!
int coffset = 0;
Frame[] tmp_frs = new Frame[asts.length];
for (int i = 1; i < asts.length; i++) {
Val val = vals[i]; // Save values computed for pass 2
Frame fr0 = val.isFrame() ? val.getFrame()
// Scalar: auto-expand into a 1-row frame
: (tmp_frs[i] = new Frame(fr._names, Vec.makeCons(val.getNum(), 1L, fr.numCols())));
// Check that all frames are compatible
if (fr.numCols() != fr0.numCols())
throw new IllegalArgumentException("rbind frames must have all the same columns, found " + fr.numCols() + " and " + fr0.numCols() + " columns.");
if (!Arrays.deepEquals(fr._names, fr0._names))
throw new IllegalArgumentException("rbind frames must have all the same column names, found " + Arrays.toString(fr._names) + " and " + Arrays.toString(fr0._names));
if (!Arrays.equals(types, fr0.types()))
throw new IllegalArgumentException("rbind frames must have all the same column types, found " + Arrays.toString(types) + " and " + Arrays.toString(fr0.types()));
frs[i] = fr0; // Save frame
// Roll up the ESPC row counts
long roffset = espc[coffset];
long[] espc2 = fr0.anyVec().espc();
for (int j = 1; j < espc2.length; j++) // Roll up the row counts
espc[coffset + j] = (roffset + espc2[j]);
coffset += espc2.length - 1; // Chunk offset
}
if (zz != null) zz.remove();
// build up the new domains for each vec
HashMap<String, Integer>[] dmap = new HashMap[types.length];
String[][] domains = new String[types.length][];
int[][][] cmaps = new int[types.length][][];
for (int k = 0; k < types.length; ++k) {
dmap[k] = new HashMap<>();
int c = 0;
byte t = types[k];
if (t == Vec.T_CAT) {
int[][] maps = new int[frs.length][];
for (int i = 1; i < frs.length; i++) {
maps[i] = new int[frs[i].vec(k).domain().length];
for (int j = 0; j < maps[i].length; j++) {
String s = frs[i].vec(k).domain()[j];
if (!dmap[k].containsKey(s)) dmap[k].put(s, maps[i][j] = c++);
else maps[i][j] = dmap[k].get(s);
}
}
cmaps[k] = maps;
} else {
cmaps[k] = new int[frs.length][];
}
domains[k] = c == 0 ? null : new String[c];
for (Map.Entry<String, Integer> e : dmap[k].entrySet())
domains[k][e.getValue()] = e.getKey();
}
// Now make Keys for the new Vecs
Key<Vec>[] keys = fr.anyVec().group().addVecs(fr.numCols());
Vec[] vecs = new Vec[fr.numCols()];
int rowLayout = Vec.ESPC.rowLayout(keys[0], espc);
for (int i = 0; i < vecs.length; i++)
vecs[i] = new Vec(keys[i], rowLayout, domains[i], types[i]);
// Do the row-binds column-by-column.
// Switch to F/J thread for continuations
AstRBind.ParallelRbinds t;
H2O.submitTask(t = new AstRBind.ParallelRbinds(frs, espc, vecs, cmaps)).join();
for (Frame tfr : tmp_frs) if (tfr != null) tfr.delete();
return new ValFrame(new Frame(fr.names(), t._vecs));
}
// Helper class to allow parallel column binds, up to MAXP in parallel at any
// point in time. TODO: Not sure why this is here, should just spam F/J with
// all columns, even up to 100,000's should be fine.
private static class ParallelRbinds extends H2O.H2OCountedCompleter {
private final AtomicInteger _ctr; // Concurrency control
private static int MAXP = 100; // Max number of concurrent columns
private Frame[] _frs; // All frame args
private int[][][] _cmaps; // Individual cmaps per each set of vecs to rbind
private long[] _espc; // Rolled-up final ESPC
private Vec[] _vecs; // Output
ParallelRbinds(Frame[] frs, long[] espc, Vec[] vecs, int[][][] cmaps) {
_frs = frs;
_espc = espc;
_vecs = vecs;
_cmaps = cmaps;
_ctr = new AtomicInteger(MAXP - 1);
}
@Override
public void compute2() {
final int ncols = _frs[1].numCols();
addToPendingCount(ncols - 1);
for (int i = 0; i < Math.min(MAXP, ncols); ++i) forkVecTask(i);
}
// An RBindTask for each column
private void forkVecTask(final int colnum) {
Vec[] vecs = new Vec[_frs.length]; // Source Vecs
for (int i = 1; i < _frs.length; i++)
vecs[i] = _frs[i].vec(colnum);
new AstRBind.RbindTask(new AstRBind.ParallelRbinds.Callback(), vecs, _vecs[colnum], _espc, _cmaps[colnum]).fork();
}
private class Callback extends H2O.H2OCallback {
public Callback() {
super(AstRBind.ParallelRbinds.this);
}
@Override
public void callback(H2O.H2OCountedCompleter h2OCountedCompleter) {
int i = _ctr.incrementAndGet();
if (i < _vecs.length)
forkVecTask(i);
}
}
}
// RBind a single column across all vals
private static class RbindTask extends H2O.H2OCountedCompleter<AstRBind.RbindTask> {
final Vec[] _vecs; // Input vecs to be row-bound
final Vec _v; // Result vec
final long[] _espc; // Result layout
int[][] _cmaps; // categorical mapping array
RbindTask(H2O.H2OCountedCompleter cc, Vec[] vecs, Vec v, long[] espc, int[][] cmaps) {
super(cc);
_vecs = vecs;
_v = v;
_espc = espc;
_cmaps = cmaps;
}
@Override
public void compute2() {
addToPendingCount(_vecs.length - 1 - 1);
int offset = 0;
for (int i = 1; i < _vecs.length; i++) {
new AstRBind.RbindMRTask(this, _cmaps[i], _v, offset).dfork(_vecs[i]);
offset += _vecs[i].nChunks();
}
}
@Override
public void onCompletion(CountedCompleter cc) {
DKV.put(_v);
}
}
private static class RbindMRTask extends MRTask<AstRBind.RbindMRTask> {
private final int[] _cmap;
private final int _chunkOffset;
private final Vec _v;
RbindMRTask(H2O.H2OCountedCompleter hc, int[] cmap, Vec v, int offset) {
super(hc);
_cmap = cmap;
_v = v;
_chunkOffset = offset;
}
@Override
public void map(Chunk cs) {
int idx = _chunkOffset + cs.cidx();
Key ckey = Vec.chunkKey(_v._key, idx);
if (_cmap != null) {
assert !cs.hasFloat() : "Input chunk (" + cs.getClass() + ") has float, but is expected to be categorical";
NewChunk nc = new NewChunk(_v, idx);
// loop over rows and update ints for new domain mapping according to vecs[c].domain()
for (int r = 0; r < cs._len; ++r) {
if (cs.isNA(r)) nc.addNA();
else nc.addNum(_cmap[(int) cs.at8(r)], 0);
}
nc.close(_fs);
} else {
DKV.put(ckey, cs.deepCopy(), _fs, true);
}
}
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstRankWithinGroupBy.java
|
package water.rapids.ast.prims.mungers;
import water.Iced;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.ast.AstParameter;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.ast.params.AstNum;
import water.rapids.ast.params.AstNumList;
import water.rapids.vals.ValFrame;
import java.util.Arrays;
/** Given a dataframe, a list of groupby columns, a list of sort columns, a list of sort directions, a string
* for the new name of the rank column, an integer sort_cols_order, this class
* will sort the whole dataframe according to the columns and sort directions. It will add the rank of the
* row within the groupby groups based on the sorted order determined by the sort columns and sort directions. Note
* that rank starts with 1.
*
* If there is any NAs in the sorting columns, the rank of that row will be NA as well.
*
* If there is any NAs in the groupby columns, they will be counted as a group and will be given a rank. The user
* can choose to ignore the ranks of groupby groups with NAs in them.
*
* If sort_cols_order is 1, the returned frame will be sorted according to the sort columns and the sort directions
* specified earlier. However, to get a small speed up, the user can set it to 0. In this case, the returned
* frame will be sorted according to the groupby columns followed by the sort columns. This will save you one
* sort action on the final frame.
*
*/
public class AstRankWithinGroupBy extends AstPrimitive {
@Override public String[] args() {
return new String[]{"frame", "groupby_cols", "sort_cols", "sort_orders", "new_colname", "sort_cols_order"};
}
@Override public String str(){ return "rank_within_groupby";}
@Override public int nargs() { return 1+6; } // (rank_within_groupby frame groupby_cols sort_cols sort_orders new_colname)
@Override public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame(); // first argument is dataframe
int[] groupbycols = ((AstParameter)asts[2]).columns(fr.names());
int[] sortcols =((AstParameter)asts[3]).columns(fr.names()); // sort columns
int[] sortAsc;
if (asts[4] instanceof AstNumList)
sortAsc = ((AstNumList) asts[4]).expand4();
else
sortAsc = new int[]{(int) ((AstNum) asts[4]).getNum()}; // R client can send 1 element for some reason
String newcolname = asts[5].str();
Boolean sortColsOrder = ((AstNum) asts[6]).getNum()==1;
assert sortAsc.length==sortcols.length;
SortnGrouby sortgroupbyrank = new SortnGrouby(fr, groupbycols, sortcols, sortAsc, newcolname);
sortgroupbyrank.doAll(sortgroupbyrank._groupedSortedOut); // sort and add rank column
RankGroups rankgroups = new RankGroups(sortgroupbyrank._groupedSortedOut, groupbycols,
sortcols, sortgroupbyrank._chunkFirstG, sortgroupbyrank._chunkLastG,
sortgroupbyrank._newRankCol).doAll(sortgroupbyrank._groupedSortedOut);
if (sortColsOrder)
return new ValFrame(rankgroups._finalResult.sort(sortcols, sortAsc));
else
return new ValFrame(rankgroups._finalResult);
}
public boolean foundNAs(Chunk[] chunks, int rind, int[] sortCols, int sortLen) {
for (int colInd = 0; colInd < sortLen; colInd++) { // check sort columns for NAs
if (Double.isNaN(chunks[sortCols[colInd]].atd(rind))) {
return true;
}
}
return false;
}
public class RankGroups extends MRTask<RankGroups> {
final int _newRankCol;
final int _groupbyLen;
final int[] _sortCols;
final int _sortLen;
final int[] _groupbyCols;
final GInfoPC[] _chunkFirstG; // store first Groupby group info per chunk
final GInfoPC[] _chunkLastG; // store last Groupby group info per chunk
Frame _finalResult;
private RankGroups(Frame inputFrame, int[] groupbycols, int[] sortCols, GInfoPC[] chunkFirstG,
GInfoPC[] chunkLastG, int newRankCol) {
_newRankCol = newRankCol;
_groupbyCols = groupbycols;
_groupbyLen = groupbycols.length;
_sortCols = sortCols;
_sortLen = sortCols.length;
_chunkFirstG= chunkFirstG; // store starting rank for next chunk
_chunkLastG = chunkLastG;
_finalResult = inputFrame;
}
@Override
public void map(Chunk[] chunks) {
int cidx = chunks[0].cidx(); // get current chunk id
long rankOffset = setStartRank(cidx);
GInfoPC previousKey = _chunkFirstG[cidx]==null ? new GInfoPC(_groupbyLen, 1) : _chunkFirstG[cidx]; // copy over first group info
GInfoPC rowKey = new GInfoPC(_groupbyLen, 1);
for (int rind = 0; rind < chunks[0]._len; rind++) {
if (!Double.isNaN(chunks[_newRankCol].atd(rind)) || !foundNAs(chunks, rind, _sortCols, _sortLen)) { // only rank when sorting columns contains no NAs
rowKey.fill(rind, chunks, _groupbyCols);
if (previousKey.equals(rowKey)) {
rankOffset += 1;
} else { // new key
previousKey.fill(rowKey._gs, 1); // only key value matter, _gs.
rankOffset = 1;
}
chunks[_newRankCol].set(rind, rankOffset);
}
}
}
public long setStartRank(int cidx) {
if (_chunkFirstG[cidx] != null) {
return _chunkFirstG[cidx]._val;
} else
return 0;
}
}
public class SortnGrouby extends MRTask<SortnGrouby> {
final int[] _sortCols;
final int[] _groupbyCols;
final int[] _sortOrders;
final String _newColname;
Frame _groupedSortedOut; // store final result
GInfoPC[] _chunkFirstG; // store first groupby class per chunk
GInfoPC[] _chunkLastG; // store first groupby class per chunk
final int _groupbyLen;
final int _sortLen;
final int _newRankCol;
final int _numChunks;
private SortnGrouby(Frame original, int[] groupbycols, int[] sortCols, int[] sortasc, String newcolname) {
_sortCols = sortCols;
_groupbyCols = groupbycols;
_groupbyLen = _groupbyCols.length;
_sortLen = sortCols.length;
_sortOrders = sortasc;
_newColname = newcolname;
int[] allSorts = new int[_groupbyLen+_sortLen];
int[] allSortDirs = new int[allSorts.length];
System.arraycopy(_groupbyCols, 0, allSorts, 0, _groupbyLen);
System.arraycopy(_sortCols, 0, allSorts, _groupbyLen, _sortLen);
Arrays.fill(allSortDirs, 1);
System.arraycopy(_sortOrders, 0, allSortDirs, _groupbyLen, _sortLen);
_groupedSortedOut = original.sort(allSorts, allSortDirs); // sort frame
Vec newrank = original.anyVec().makeCon(Double.NaN);
_groupedSortedOut.add(_newColname, newrank); // add new rank column of invalid rank, NAs
_numChunks = _groupedSortedOut.vec(0).nChunks();
_chunkFirstG = new GInfoPC[_numChunks];
_chunkLastG = new GInfoPC[_numChunks];
_newRankCol = _groupedSortedOut.numCols() - 1;
}
/**
* I will first go from row 0 towards end of chunk to collect info on first group of the chunk.
* Next, I will go from bottom of chunk towards 0 to collect info on last group of the chunk.
* It is possible that this chunk may contain only one chunk. That is okay. In this case, chunkFirstG
* and chunkLastG will contain the same information.
*/
@Override
public void map(Chunk[] chunks) {
int cidx = chunks[0].cidx(); // grab chunk id
int chunkLen = chunks[0].len();
GInfoPC gWork = new GInfoPC(_groupbyLen, 1);
int nextGRind = 0; // row where a new group is found
int rind = 0;
for (; rind < chunkLen; rind++) { // go through each row and try to find first groupby group
if (!foundNAs(chunks, rind, _sortCols, _sortLen)) { // no NA in sort columns
chunks[_newRankCol].set(rind, 0); // set new rank to 0 from NA
gWork.fill(rind, chunks, _groupbyCols);
if (_chunkFirstG[cidx] == null) { // has not found a group yet
_chunkFirstG[cidx] = new GInfoPC(_groupbyLen, 1);
_chunkFirstG[cidx].fill(gWork._gs, 1);
} else { // found a group already, still the same group?
if (_chunkFirstG[cidx].equals(gWork)) {
_chunkFirstG[cidx]._val += 1;
} else { // found new group
nextGRind = rind;
break; // found new group
}
}
}
}
// short cut to discover if there is only one group or no eligible group in this chunk
if (nextGRind == 0) { // only one group is found or no group is found (nothing needs to be done for no group case)
if (_chunkFirstG[cidx] != null) { // one big group in this chunk, lastG will contain the same info.
_chunkLastG[cidx] = new GInfoPC(_groupbyLen, _chunkFirstG[cidx]._val);
_chunkLastG[cidx].fill(_chunkFirstG[cidx]._gs, _chunkFirstG[cidx]._val);
}
} else { // has two groups at least, find the last group
for (int rowIndex = chunks[0]._len - 1; rowIndex >= rind; rowIndex--) {
if (!foundNAs(chunks, rowIndex, _sortCols, _sortLen)) { // only process eligible rows
chunks[_newRankCol].set(rowIndex, 0); // set new rank to 0 from NA
gWork.fill(rowIndex, chunks, _groupbyCols);
if (_chunkLastG[cidx] == null) { // has not found a group yet
_chunkLastG[cidx] = new GInfoPC(_groupbyLen, 1);
_chunkLastG[cidx].fill(gWork._gs, 1);
} else { // found a group already, still the same group?
if (_chunkLastG[cidx].equals(gWork)) {
_chunkLastG[cidx]._val += 1;
} else { // found new group
break;
}
}
}
}
}
}
@Override
public void reduce(SortnGrouby git) { // copy over the information from one chunk to the final
copyGroupInfo(_chunkFirstG, git._chunkFirstG); // copy over first group
copyGroupInfo(_chunkLastG, git._chunkLastG); // copy over last group info
}
public void copyGroupInfo(GInfoPC[] currentChunk, GInfoPC[] otherChunk) {
int numChunks = currentChunk.length;
for (int ind = 0; ind < numChunks; ind++) {
if (currentChunk[ind] == null) { // copy over first group info
if (otherChunk[ind] != null) {
currentChunk[ind] = new GInfoPC(_groupbyLen, 1);
currentChunk[ind].fill(otherChunk[ind]._gs, otherChunk[ind]._val);
}
}
}
}
@Override
public void postGlobal() { // change counts per group per chunk to be cumulative and assign the rank offset
for (int cInd = 1; cInd < _numChunks; cInd++) {
if (_chunkLastG[cInd - 1] != null) {
if (_chunkFirstG[cInd] != null) {
GInfoPC gPrevious = _chunkLastG[cInd - 1];
GInfoPC gNext = _chunkFirstG[cInd];
if (gNext.equals(gPrevious)) { // same group, need to update rank offset
gNext._val += gPrevious._val;
GInfoPC gLast = _chunkLastG[cInd];
if (gLast.equals(gNext)) { // chunk contains one big group, update the last group info as well
gLast._val += gPrevious._val; // one big group in this chunk, last group needs to update to reflect new rank offset
}
} else {
gNext._val = 0; // no rank offset is needed, different groups
}
}
}
}
_chunkFirstG[0]._val = 0; // first chunk, there is no offset
}
}
/**
* Store rank info for each chunk.
*/
public class GInfoPC extends Iced {
public final double[] _gs; // Group Key: Array is final; contents change with the "fill"
int _hash;
long _val; // store count of the groupby key inside the chunk
public GInfoPC(int ncols, long val) {
_gs = new double[ncols]; // denote a groupby group
_val = val; //number of rows belonging to the groupby group
}
public GInfoPC fill(int row, Chunk chks[], int cols[]) {
for (int c = 0; c < cols.length; c++) {// For all selection cols
_gs[c] = chks[cols[c]].atd(row); // Load into working array
}
_val = 1;
_hash = hash();
return this;
}
public GInfoPC fill(double cols[], long val) {
for (int c = 0; c < cols.length; c++) {// For all selection cols
_gs[c] = cols[c]; // Load into working array
}
_val = val;
_hash = hash();
return this;
}
protected int hash() {
long h = 0; // hash is sum of field bits
for (double d : _gs) h += Double.doubleToRawLongBits(d);
// Doubles are lousy hashes; mix up the bits some
h ^= (h >>> 20) ^ (h >>> 12);
h ^= (h >>> 7) ^ (h >>> 4);
return (int) ((h ^ (h >> 32)) & 0x7FFFFFFF);
}
@Override
public boolean equals(Object o) { // count keys as equal if they have the same key values.
return o instanceof GInfoPC && Arrays.equals(_gs, ((GInfoPC) o)._gs); // && _val==((GInfoPC) o)._val;
}
@Override
public int hashCode() {
return _hash;
}
@Override
public String toString() {
return Arrays.toString(_gs);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstReLevel.java
|
package water.rapids.ast.prims.mungers;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import java.util.Arrays;
/**
*/
public class AstReLevel extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary", "level"};
}
@Override
public int nargs() {
return 1 + 2;
} // (setLevel x level)
@Override
public String str() {
return "relevel";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
if (fr.numCols() != 1) throw new IllegalArgumentException("`setLevel` works on a single column at a time.");
String[] doms = fr.anyVec().domain().clone();
if (doms == null)
throw new IllegalArgumentException("Cannot set the level on a non-factor column!");
String lvl = asts[2].exec(env).getStr();
final int idx = Arrays.asList(doms).indexOf(lvl);
if (idx == -1) throw new IllegalArgumentException("Did not find level `" + lvl + "` in the column.");
if (idx == 0) return new ValFrame(new Frame(fr.names(), new Vec[]{fr.anyVec().makeCopy()}));
String[] srcDom = fr.anyVec().domain();
final String[] dom = new String[srcDom.length];
dom[0] = srcDom[idx];
int j = 1;
for (int i = 0; i < srcDom.length; ++i)
if (i != idx) dom[j++] = srcDom[i];
return new ValFrame(new MRTask() {
@Override
public void map(Chunk c, NewChunk nc) {
int[] vals = new int[c._len];
c.getIntegers(vals, 0, c._len, -1);
for (int i = 0; i < vals.length; ++i)
if (vals[i] == -1) nc.addNA();
else if (vals[i] == idx)
nc.addNum(0);
else
nc.addNum(vals[i] + (vals[i] < idx ? 1 : 0));
}
}.doAll(1, Vec.T_CAT, fr).outputFrame(fr.names(), new String[][]{dom}));
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstRelevelByFreq.java
|
package water.rapids.ast.prims.mungers;
import water.DKV;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import water.util.ArrayUtils;
import water.util.VecUtils;
import java.util.Arrays;
public class AstRelevelByFreq extends AstPrimitive<AstRelevelByFreq> {
@Override
public String[] args() {
return new String[]{"frame", "weights", "topn"};
}
@Override
public int nargs() {
return 1 + 3;
} // (relevel.by.freq frame weights topn)
@Override
public String str() {
return "relevel.by.freq";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot[] asts) {
final Frame f = stk.track(asts[1].exec(env)).getFrame();
final String weightsColumn = asts[2].exec(env).getStr();
final Vec weights = f.vec(weightsColumn);
if (weightsColumn != null && weights == null) {
throw new IllegalArgumentException("Frame doesn't contain weights column '" + weightsColumn + "'.");
}
final double topN = asts[3].exec(env).getNum();
if ((topN != -1 && topN <= 0) || (int) topN != topN) {
throw new IllegalArgumentException("TopN argument needs to be a positive integer number, got: " + topN);
}
Frame result = new Frame(f);
for (int i = 0; i < result.numCols(); i++) {
Vec v = result.vec(i);
if (!v.isCategorical()) {
continue;
}
v = v.makeCopy();
result.replace(i, v);
relevelByFreq(v, weights, (int) topN);
}
return new ValFrame(result);
}
static void relevelByFreq(Vec v, Vec weights, int topN) {
double[] levelWeights = VecUtils.collectDomainWeights(v, weights);
int[] newDomainOrder = ArrayUtils.seq(0, levelWeights.length);
ArrayUtils.sort(newDomainOrder, levelWeights, 0,-1 );
if ((topN != -1) && (topN < newDomainOrder.length - 1)) {
newDomainOrder = takeTopNMostFrequentDomain(newDomainOrder, topN);
}
String[] domain = v.domain();
String[] newDomain = v.domain().clone();
for (int i = 0; i < newDomainOrder.length; i++) {
newDomain[i] = domain[newDomainOrder[i]];
}
// new domain order != mapping of levels
new RemapDomain(getMapping(newDomainOrder)).doAll(v);
v.setDomain(newDomain);
DKV.put(v);
}
/**
* Create mapping from reordered domain list.
* @param domainOrder sorted domain by count/weights DESC
* @return mapping from the old level to the new level
*/
static int[] getMapping(int[] domainOrder){
int[] mapping = new int[domainOrder.length];
for (int i = 0; i < domainOrder.length; i++) {
mapping[domainOrder[i]] = i;
}
return mapping;
}
/**
* Take the top N domains and sort rest of the indexes ASC
* @param domainOrder domain order already ordered by frequency DESC
* @param topN number of top N domains to keep unsorted
* @return new domain order where top N domains are untouched and rest of the domains are sorted ASC
*/
static int[] takeTopNMostFrequentDomain(int[] domainOrder, final int topN) {
int domainSize = domainOrder.length;
int[] newDomainOrder = new int[domainSize];
int[] topNidxs = new int[topN];
for (int i = 0; i < topN; i++) {
int topIdx = domainOrder[i];
topNidxs[i] = topIdx;
newDomainOrder[i] = topIdx;
}
Arrays.sort(topNidxs);
int pos = topN;
for (int i = 0; i < domainSize; i++) {
if (Arrays.binarySearch(topNidxs, i) >= 0)
continue;
newDomainOrder[pos++] = i;
}
assert pos == domainSize;
return newDomainOrder;
}
static class RemapDomain extends MRTask<RemapDomain> {
private final int[] _mapping;
public RemapDomain(int[] mapping) {
_mapping = mapping;
}
@Override
public void map(Chunk c) {
for (int row = 0; row < c._len; row++) {
if (c.isNA(row))
continue;
int level = (int) c.atd(row);
int newLevel = _mapping[level];
c.set(row, newLevel);
}
}
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstRename.java
|
package water.rapids.ast.prims.mungers;
import hex.Model;
import water.DKV;
import water.Iced;
import water.Key;
import water.fvec.Frame;
import water.rapids.Env;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValNum;
import water.rapids.ast.AstPrimitive;
/**
*/
public class AstRename extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"oldId", "newId"};
}
@Override
public int nargs() {
return 1 + 2;
} // (rename oldId newId)
@Override
public String str() {
return "rename";
}
@Override
public ValNum apply(Env env, Env.StackHelp stk, AstRoot[] asts) {
Key oldKey = Key.make(env.expand(asts[1].exec(env).getStr()));
Key newKey = Key.make(env.expand(asts[2].exec(env).getStr()));
Iced o = DKV.remove(oldKey).get();
if (o instanceof Frame)
DKV.put(newKey, new Frame(newKey, ((Frame) o)._names, ((Frame) o).vecs()));
else if (o instanceof Model) {
((Model) o)._key = newKey;
DKV.put(newKey, o);
} else
throw new IllegalArgumentException("Trying to rename Value of type " + o.getClass());
return new ValNum(Double.NaN);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstRowSlice.java
|
package water.rapids.ast.prims.mungers;
import water.*;
import water.fvec.*;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstExec;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.params.AstId;
import water.rapids.ast.params.AstNum;
import water.rapids.ast.params.AstNumList;
import java.util.*;
/**
* Row Slice
*/
public class AstRowSlice extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary", "rows"};
}
// (rows src [row_list])
@Override
public int nargs() {
return 1 + 2;
}
@Override
public String str() {
return "rows";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
Frame returningFrame;
long nrows = fr.numRows();
if (asts[2] instanceof AstNumList) {
final AstNumList nums = (AstNumList) asts[2];
if (!nums._isSort && !nums.isEmpty() && nums._bases[0] >= 0)
throw new IllegalArgumentException("H2O does not currently reorder rows, please sort your row selection first");
long[] rows = (nums._isList || nums.min() < 0) ? nums.expand8Sort() : null;
if (rows != null) {
if (rows.length == 0) { // Empty inclusion list?
} else if (rows[0] >= 0) { // Positive (inclusion) list
if (rows[rows.length - 1] > nrows)
throw new IllegalArgumentException("Row must be an integer from 0 to " + (nrows - 1));
} else { // Negative (exclusion) list
if (rows[rows.length - 1] >= 0)
throw new IllegalArgumentException("Cannot mix negative and postive row selection");
// Invert the list to make a positive list, ignoring out-of-bounds values
BitSet bs = new BitSet((int) nrows);
for (long row : rows) {
int idx = (int) (-row - 1); // The positive index
if (idx >= 0 && idx < nrows)
bs.set(idx); // Set column to EXCLUDE
}
rows = new long[(int) nrows - bs.cardinality()];
for (int i = bs.nextClearBit(0), j = 0; i < nrows; i = bs.nextClearBit(i + 1))
rows[j++] = i;
}
}
final long[] ls = rows;
returningFrame = new MRTask() {
@Override
public void map(Chunk[] cs, NewChunk[] ncs) {
if (nums.cnt() == 0) return;
if (ls != null && ls.length == 0) return;
long start = cs[0].start();
long end = start + cs[0]._len;
long min = ls == null ? (long) nums.min() : ls[0], max = ls == null ? (long) nums.max() - 1 : ls[ls.length - 1]; // exclusive max to inclusive max when stride == 1
// [ start, ..., end ] the chunk
//1 [] nums out left: nums.max() < start
//2 [] nums out rite: nums.min() > end
//3 [ nums ] nums run left: nums.min() < start && nums.max() <= end
//4 [ nums ] nums run in : start <= nums.min() && nums.max() <= end
//5 [ nums ] nums run rite: start <= nums.min() && end < nums.max()
if (!(max < start || min > end)) { // not situation 1 or 2 above
long startOffset = (min > start ? min : start); // situation 4 and 5 => min > start;
for (int i = (int) (startOffset - start); i < cs[0]._len; ++i) {
if ((ls == null && nums.has(start + i)) || (ls != null && Arrays.binarySearch(ls, start + i) >= 0)) {
for (int c = 0; c < cs.length; ++c) {
if (cs[c] instanceof CStrChunk) ncs[c].addStr(cs[c], i);
else if (cs[c] instanceof C16Chunk) ncs[c].addUUID(cs[c], i);
else if (cs[c].isNA(i)) ncs[c].addNA();
else ncs[c].addNum(cs[c].atd(i));
}
}
}
}
}
}.doAll(fr.types(), fr).outputFrame(fr.names(), fr.domains());
} else if ((asts[2] instanceof AstNum)) {
long[] rows = new long[]{(long) (((AstNum) asts[2]).getNum())};
returningFrame = fr.deepSlice(rows, null);
} else if ((asts[2] instanceof AstExec) || (asts[2] instanceof AstId)) {
Frame predVec = stk.track(asts[2].exec(env)).getFrame();
if (predVec.numCols() != 1)
throw new IllegalArgumentException("Conditional Row Slicing Expression evaluated to " + predVec.numCols() + " columns. Must be a boolean Vec.");
returningFrame = fr.deepSlice(predVec, null);
} else
throw new IllegalArgumentException("Row slicing requires a number-list as the last argument, but found a " + asts[2].getClass());
return new ValFrame(returningFrame);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstScale.java
|
package water.rapids.ast.prims.mungers;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.ast.params.AstNumList;
import water.util.Log;
import java.util.Arrays;
/**
* Center and scale a frame. Can be passed in the centers and scales (one per column in an number list), or a
* TRUE/FALSE.
*/
public class AstScale extends AstPrimitive {
private final boolean _in_place;
private AstScale(boolean inPlace) {
_in_place = inPlace;
}
public AstScale() {
this(false);
}
@Override
public String[] args() {
return new String[]{"ary", "center", "scale"};
}
@Override
public int nargs() {
return 1 + 3;
} // (scale x center scale)
@Override
public String str() {
return "scale";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
final Frame originalFrame = stk.track(asts[1].exec(env)).getFrame();
final Frame numericFrame = new Frame(); // filter the frame to only numerical columns
for (int i = 0; i < originalFrame.numCols(); i++) {
Vec v = originalFrame.vec(i);
if (v.get_type() == Vec.T_NUM) {
numericFrame.add(originalFrame.name(i), v);
}
}
if (numericFrame.numCols() == 0) {
Log.info("Nothing scaled in frame '%s'. There are no numeric columns.");
return new ValFrame(originalFrame);
}
final double[] means = calcMeans(env, asts[2], numericFrame, originalFrame);
final double[] mults = calcMults(env, asts[3], numericFrame, originalFrame);
// Update in-place.
final Frame workFrame = _in_place ? numericFrame : numericFrame.deepCopy(null);
new InPlaceScaleTask(means, mults).doAll(workFrame);
final Frame outputFrame;
if (_in_place) {
outputFrame = originalFrame;
} else {
outputFrame = new Frame();
String[] names = originalFrame.names();
byte[] types = originalFrame.types();
for (int i = 0; i < originalFrame.numCols(); i++) {
if (types[i] == Vec.T_NUM) {
outputFrame.add(names[i], workFrame.vec(names[i]));
} else {
outputFrame.add(names[i], originalFrame.vec(i));
}
}
}
return new ValFrame(outputFrame);
}
private static class InPlaceScaleTask extends MRTask<InPlaceScaleTask> {
private final double[] _means;
private final double[] _mults;
InPlaceScaleTask(double[] means, double[] mults) {
_means = means;
_mults = mults;
}
@Override
public void map(Chunk[] cs) {
for (int i = 0; i < cs.length; i++)
for (int row = 0; row < cs[i]._len; row++)
cs[i].set(row, (cs[i].atd(row) - _means[i]) * _mults[i]);
}
}
// Peel out the bias/shift/mean
static double[] calcMeans(Env env, AstRoot meanSpec, Frame fr, Frame origFr) {
final int ncols = fr.numCols();
double[] means;
if (meanSpec instanceof AstNumList) {
means = extractNumericValues(((AstNumList) meanSpec).expand(), fr, origFr);
} else {
double d = meanSpec.exec(env).getNum();
if (d == 0) means = new double[ncols]; // No change on means, so zero-filled
else if (d == 1) means = fr.means();
else throw new IllegalArgumentException("Only true or false allowed");
}
return means;
}
// Peel out the scale/stddev
static double[] calcMults(Env env, AstRoot multSpec, Frame fr, Frame origFr) {
double[] mults;
if (multSpec instanceof AstNumList) {
mults = extractNumericValues(((AstNumList) multSpec).expand(), fr, origFr);
} else {
Val v = multSpec.exec(env);
if (v instanceof ValFrame) {
mults = extractNumericValues(toArray(v.getFrame().anyVec()), fr, origFr);
} else {
double d = v.getNum();
if (d == 0)
Arrays.fill(mults = new double[fr.numCols()], 1.0); // No change on mults, so one-filled
else if (d == 1) mults = fr.mults();
else throw new IllegalArgumentException("Only true or false allowed");
}
}
return mults;
}
private static double[] toArray(Vec v) {
double[] res = new double[(int) v.length()];
for (int i = 0; i < res.length; ++i)
res[i] = v.at(i);
return res;
}
private static double[] extractNumericValues(double[] vals, Frame fr, Frame origFr) {
if (vals.length != origFr.numCols()) {
throw new IllegalArgumentException("Values must be the same length as is the number of columns of the Frame to scale" +
" (fill 0 for non-numeric columns).");
}
if (vals.length == fr.numCols())
return vals;
double[] numVals = new double[fr.numCols()];
int pos = 0;
for (int i = 0; i < origFr.numCols(); i++) {
if (origFr.vec(i).get_type() != Vec.T_NUM)
continue;
numVals[pos++] = vals[i];
}
assert pos == numVals.length;
return numVals;
}
public static class AstScaleInPlace extends AstScale {
public AstScaleInPlace() {
super(true);
}
@Override
public String str() {
return "scale_inplace";
}
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstSetDomain.java
|
package water.rapids.ast.prims.mungers;
import water.DKV;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.params.AstStrList;
import water.util.VecUtils;
import java.util.Arrays;
/**
*/
public class AstSetDomain extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary", "inPlace", "newDomains"};
}
@Override
public int nargs() {
return 1 + 3;
} // (setDomain x inPlace [list of strings])
@Override
public String str() {
return "setDomain";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame f = stk.track(asts[1].exec(env)).getFrame();
boolean inPlace = asts[2].exec(env).getNum() == 1;
String[] newDomains = ((AstStrList) asts[3])._strs;
if (f.numCols() != 1)
throw new IllegalArgumentException("Must be a single column. Got: " + f.numCols() + " columns.");
if (! f.vec(0).isCategorical())
throw new IllegalArgumentException("Vector must be a factor column. Got: " + f.vec(0).get_type_str());
Vec v;
if (inPlace)
v = f.vec(0);
else
v = env._ses.copyOnWrite(f, new int[]{0})[0]; // copy-on-write
if (newDomains != null && newDomains.length != v.domain().length) {
// in this case we want to recollect the domain and check that number of levels matches _domains
VecUtils.CollectDomainFast t = new VecUtils.CollectDomainFast((int) v.max());
t.doAll(v);
final long[] dom = t.domain();
if (dom.length != newDomains.length)
throw new IllegalArgumentException("Number of replacement factors must equal current number of levels. Current number of levels: " + dom.length + " != " + newDomains.length);
new MRTask() {
@Override
public void map(Chunk c) {
for (int i = 0; i < c._len; ++i) {
if (!c.isNA(i)) {
long num = Arrays.binarySearch(dom, c.at8(i));
if (num < 0) throw new IllegalArgumentException("Could not find the categorical value!");
c.set(i, num);
}
}
}
}.doAll(v);
}
v.setDomain(newDomains);
DKV.put(v);
return new ValFrame(f);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstSetLevel.java
|
package water.rapids.ast.prims.mungers;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import java.util.Arrays;
/**
*/
public class AstSetLevel extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary", "level"};
}
@Override
public int nargs() {
return 1 + 2;
} // (setLevel x level)
@Override
public String str() {
return "setLevel";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
if (fr.numCols() != 1) throw new IllegalArgumentException("`setLevel` works on a single column at a time.");
String[] doms = fr.anyVec().domain().clone();
if (doms == null)
throw new IllegalArgumentException("Cannot set the level on a non-factor column!");
String lvl = asts[2].exec(env).getStr();
final int idx = Arrays.asList(doms).indexOf(lvl);
if (idx == -1) throw new IllegalArgumentException("Did not find level `" + lvl + "` in the column.");
// COW semantics
Frame fr2 = new MRTask() {
@Override
public void map(Chunk c, NewChunk nc) {
for (int i = 0; i < c._len; ++i)
nc.addNum(idx);
}
}.doAll(new byte[]{Vec.T_NUM}, fr.anyVec()).outputFrame(null, fr.names(), fr.domains());
return new ValFrame(fr2);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/mungers/AstSort.java
|
package water.rapids.ast.prims.mungers;
import water.fvec.Frame;
import water.rapids.Env;
import water.rapids.Merge;
import water.rapids.ast.AstParameter;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.ast.params.AstNum;
import water.rapids.ast.params.AstNumList;
import water.rapids.vals.ValFrame;
/** Sort the whole frame by the given columns. String columns are allowed in the frame. However, we do
* not support sorting on string columns.
*/
public class AstSort extends AstPrimitive {
@Override public String[] args() { return new String[]{"ary","cols"}; }
@Override public String str(){ return "sort";}
@Override public int nargs() { return 1+2+1; } // (sort ary [cols] [int])
@Override public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
int[] cols = ((AstParameter)asts[2]).columns(fr.names());
int[] sortAsc;
if (asts[3] instanceof AstNumList)
sortAsc = ((AstNumList) asts[3]).expand4();
else
sortAsc = new int[]{(int) ((AstNum) asts[3]).getNum()}; // R client can send 1 element for some reason
assert sortAsc.length==cols.length;
return new ValFrame(Merge.sort(fr,cols, sortAsc));
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstAnd.java
|
package water.rapids.ast.prims.operators;
/**
*/
public class AstAnd extends AstBinOp {
public String str() {
return "&";
}
public double op(double l, double r) {
return AstLAnd.and_op(l, r);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstBinOp.java
|
package water.rapids.ast.prims.operators;
import water.Futures;
import water.H2O;
import water.MRTask;
import water.MemoryManager;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.fvec.Vec;
import water.parser.BufferedString;
import water.rapids.*;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import water.rapids.vals.ValNum;
import water.rapids.vals.ValRow;
import water.util.ArrayUtils;
import java.util.*;
/**
* Binary operator.
* Subclasses auto-widen between scalars and Frames, and have exactly two arguments
*/
abstract public class AstBinOp extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"leftArg", "rightArg"};
}
@Override
public int nargs() {
return 1 + 2;
}
@Override
public Val apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Val left = stk.track(asts[1].exec(env));
Val rite = stk.track(asts[2].exec(env));
return prim_apply(left, rite);
}
public Val prim_apply(Val left, Val rite) {
switch (left.type()) {
case Val.NUM:
final double dlf = left.getNum();
switch (rite.type()) {
case Val.NUM:
return new ValNum(op(dlf, rite.getNum()));
case Val.NUMS:
return new ValNum(op(dlf, rite.getNums()[0]));
case Val.FRM:
return scalar_op_frame(dlf, rite.getFrame());
case Val.ROW:
double[] lft = new double[rite.getRow().length];
Arrays.fill(lft, dlf);
return row_op_row(lft, rite.getRow(), ((ValRow) rite).getNames());
case Val.STR:
throw H2O.unimpl();
case Val.STRS:
throw H2O.unimpl();
default:
throw H2O.unimpl();
}
case Val.NUMS:
final double ddlf = left.getNums()[0];
switch (rite.type()) {
case Val.NUM:
return new ValNum(op(ddlf, rite.getNum()));
case Val.NUMS:
return new ValNum(op(ddlf, rite.getNums()[0]));
case Val.FRM:
return scalar_op_frame(ddlf, rite.getFrame());
case Val.ROW:
double[] lft = new double[rite.getRow().length];
Arrays.fill(lft, ddlf);
return row_op_row(lft, rite.getRow(), ((ValRow) rite).getNames());
case Val.STR:
throw H2O.unimpl();
case Val.STRS:
throw H2O.unimpl();
default:
throw H2O.unimpl();
}
case Val.FRM:
Frame flf = left.getFrame();
switch (rite.type()) {
case Val.NUM:
return frame_op_scalar(flf, rite.getNum());
case Val.NUMS:
return frame_op_scalar(flf, rite.getNums()[0]);
case Val.STR:
return frame_op_scalar(flf, rite.getStr());
case Val.STRS:
return frame_op_scalar(flf, rite.getStrs()[0]);
case Val.FRM:
return frame_op_frame(flf, rite.getFrame());
default:
throw H2O.unimpl();
}
case Val.STR:
String slf = left.getStr();
switch (rite.type()) {
case Val.NUM:
throw H2O.unimpl();
case Val.NUMS:
throw H2O.unimpl();
case Val.STR:
throw H2O.unimpl();
case Val.STRS:
throw H2O.unimpl();
case Val.FRM:
return scalar_op_frame(slf, rite.getFrame());
default:
throw H2O.unimpl();
}
case Val.STRS:
String sslf = left.getStrs()[0];
switch (rite.type()) {
case Val.NUM:
throw H2O.unimpl();
case Val.NUMS:
throw H2O.unimpl();
case Val.STR:
throw H2O.unimpl();
case Val.STRS:
throw H2O.unimpl();
case Val.FRM:
return scalar_op_frame(sslf, rite.getFrame());
default:
throw H2O.unimpl();
}
case Val.ROW:
double dslf[] = left.getRow();
switch (rite.type()) {
case Val.NUM:
double[] right = new double[dslf.length];
Arrays.fill(right, rite.getNum());
return row_op_row(dslf, right, ((ValRow) left).getNames());
case Val.ROW:
return row_op_row(dslf, rite.getRow(), ((ValRow) rite).getNames());
case Val.FRM:
return row_op_row(dslf, rite.getRow(), rite.getFrame().names());
default:
throw H2O.unimpl();
}
default:
throw H2O.unimpl();
}
}
/**
* Override to express a basic math primitive
*/
public abstract double op(double l, double r);
public double str_op(BufferedString l, BufferedString r) {
throw H2O.unimpl("Binary operation '" + str() + "' is not supported on String columns.");
}
/**
* Auto-widen the scalar to every element of the frame
*/
private ValFrame scalar_op_frame(final double d, Frame fr) {
Frame res = new MRTask() {
@Override
public void map(Chunk[] chks, NewChunk[] cress) {
for (int c = 0; c < chks.length; c++) {
Chunk chk = chks[c];
NewChunk cres = cress[c];
for (int i = 0; i < chk._len; i++)
cres.addNum(op(d, chk.atd(i)));
}
}
}.doAll(fr.numCols(), Vec.T_NUM, fr).outputFrame(fr._names, null);
return cleanCategorical(fr, res); // Cleanup categorical misuse
}
/**
* Auto-widen the scalar to every element of the frame
*/
public ValFrame frame_op_scalar(Frame fr, final double d) {
Frame res = new MRTask() {
@Override
public void map(Chunk[] chks, NewChunk[] cress) {
for (int c = 0; c < chks.length; c++) {
Chunk chk = chks[c];
NewChunk cres = cress[c];
for (int i = 0; i < chk._len; i++)
cres.addNum(op(chk.atd(i), d));
}
}
}.doAll(fr.numCols(), Vec.T_NUM, fr).outputFrame(fr._names, null);
return cleanCategorical(fr, res); // Cleanup categorical misuse
}
// Ops do not make sense on categoricals, except EQ/NE; flip such ops to NAs
private ValFrame cleanCategorical(Frame oldfr, Frame newfr) {
final boolean categoricalOK = categoricalOK();
final Vec oldvecs[] = oldfr.vecs();
final Vec newvecs[] = newfr.vecs();
Futures fs = new Futures();
for (int i = 0; i < oldvecs.length; i++)
if ((oldvecs[i].isCategorical() && !categoricalOK)) { // categorical are OK (op is EQ/NE)
Vec cv = newvecs[i].makeCon(Double.NaN);
newfr.replace(i, cv).remove(fs);
}
fs.blockForPending();
return new ValFrame(newfr);
}
/**
* Auto-widen the scalar to every element of the frame
*/
private ValFrame frame_op_scalar(Frame fr, final String str) {
Frame res = new MRTask() {
@Override
public void map(Chunk[] chks, NewChunk[] cress) {
BufferedString vstr = new BufferedString();
for (int c = 0; c < chks.length; c++) {
Chunk chk = chks[c];
NewChunk cres = cress[c];
Vec vec = chk.vec();
// String Vectors: apply str_op as BufferedStrings to all elements
if (vec.isString()) {
final BufferedString conStr = new BufferedString(str);
for (int i = 0; i < chk._len; i++)
cres.addNum(str_op(chk.atStr(vstr, i), conStr));
} else if (vec.isCategorical()) {
// categorical Vectors: convert string to domain value; apply op (not
// str_op). Not sure what the "right" behavior here is, can
// easily argue that should instead apply str_op to the categorical
// string domain value - except that this whole operation only
// makes sense for EQ/NE, and is much faster when just comparing
// doubles vs comparing strings. Note that if the string is not
// part of the categorical domain, the find op returns -1 which is never
// equal to any categorical dense integer (which are always 0+).
final double d = (double) ArrayUtils.find(vec.domain(), str);
for (int i = 0; i < chk._len; i++)
cres.addNum(op(chk.atd(i), d));
} else { // mixing string and numeric
final double d = op(1, 2); // false or true only
for (int i = 0; i < chk._len; i++)
cres.addNum(d);
}
}
}
}.doAll(fr.numCols(), Vec.T_NUM, fr).outputFrame(fr._names, null);
return new ValFrame(res);
}
/**
* Auto-widen the scalar to every element of the frame
*/
private ValFrame scalar_op_frame(final String str, Frame fr) {
Frame res = new MRTask() {
@Override
public void map(Chunk[] chks, NewChunk[] cress) {
BufferedString vstr = new BufferedString();
for (int c = 0; c < chks.length; c++) {
Chunk chk = chks[c];
NewChunk cres = cress[c];
Vec vec = chk.vec();
// String Vectors: apply str_op as BufferedStrings to all elements
if (vec.isString()) {
final BufferedString conStr = new BufferedString(str);
for (int i = 0; i < chk._len; i++)
cres.addNum(str_op(conStr, chk.atStr(vstr, i)));
} else if (vec.isCategorical()) {
// categorical Vectors: convert string to domain value; apply op (not
// str_op). Not sure what the "right" behavior here is, can
// easily argue that should instead apply str_op to the categorical
// string domain value - except that this whole operation only
// makes sense for EQ/NE, and is much faster when just comparing
// doubles vs comparing strings.
final double d = (double) ArrayUtils.find(vec.domain(), str);
for (int i = 0; i < chk._len; i++)
cres.addNum(op(d, chk.atd(i)));
} else { // mixing string and numeric
final double d = op(1, 2); // false or true only
for (int i = 0; i < chk._len; i++)
cres.addNum(d);
}
}
}
}.doAll(fr.numCols(), Vec.T_NUM, fr).outputFrame(fr._names, null);
return new ValFrame(res);
}
/**
* Auto-widen: If one frame has only 1 column, auto-widen that 1 column to
* the rest. Otherwise the frames must have the same column count, and
* auto-widen element-by-element. Short-cut if one frame has zero
* columns.
*/
private ValFrame frame_op_frame(Frame lf, Frame rt) {
if (lf.numRows() != rt.numRows()) {
// special case for broadcasting a single row of data across a frame
if (lf.numRows() == 1 || rt.numRows() == 1) {
if (lf.numCols() != rt.numCols())
throw new IllegalArgumentException("Frames must have same columns, found " + lf.numCols() + " columns and " + rt.numCols() + " columns.");
return frame_op_row(lf, rt);
} else
throw new IllegalArgumentException("Frames must have same rows, found " + lf.numRows() + " rows and " + rt.numRows() + " rows.");
}
if (lf.numCols() == 0) return new ValFrame(lf);
if (rt.numCols() == 0) return new ValFrame(rt);
if (lf.numCols() == 1 && rt.numCols() > 1) return vec_op_frame(lf.vecs()[0], rt);
if (rt.numCols() == 1 && lf.numCols() > 1) return frame_op_vec(lf, rt.vecs()[0]);
if (lf.numCols() != rt.numCols())
throw new IllegalArgumentException("Frames must have same columns, found " + lf.numCols() + " columns and " + rt.numCols() + " columns.");
final int[][] alignedCategoricals = new int[lf.numCols()][];
final boolean[] categorical = new boolean[lf.numCols()];
final boolean[] rtDomainNotBigger = new boolean[lf.numCols()];
for (int c = 0; c < lf.numCols(); c++) {
// Store to read during iteration over lines
categorical[c] = categoricalOK() && lf.vec(c).isCategorical() && rt.vec(c).isCategorical();
if (categorical[c]) {
// Store to read during iteration over lines
rtDomainNotBigger[c] = lf.vec(c).domain().length >= rt.vec(c).domain().length;
if (rtDomainNotBigger[c]) {
alignedCategoricals[c] = alignCategoricals(lf.vec(c).domain(), rt.vec(c).domain());
} else {
alignedCategoricals[c] = alignCategoricals(rt.vec(c).domain(), lf.vec(c).domain());
}
}
}
Frame res = new MRTask() {
@Override
public void map(Chunk[] chks, NewChunk[] cress) {
BufferedString lfstr = new BufferedString();
BufferedString rtstr = new BufferedString();
assert (cress.length << 1) == chks.length;
for (int c = 0; c < cress.length; c++) {
Chunk clf = chks[c];
Chunk crt = chks[c + cress.length];
NewChunk cres = cress[c];
if (clf.vec().isString())
for (int i = 0; i < clf._len; i++)
cres.addNum(str_op(clf.atStr(lfstr, i), crt.atStr(rtstr, i)));
else if (categorical[c]) {
// The vec with longer domain is iterated over due to categorical mapping
if (rtDomainNotBigger[c]) {
for (int i = 0; i < clf._len; i++) {
double crtAtdValue = crt.atd(i);
if (crt.isNA(i)) {
cres.addNum(op(clf.atd(i), crtAtdValue));
} else {
cres.addNum(op(clf.atd(i), alignedCategoricals[c][(int) crtAtdValue]));
}
}
} else {
for (int i = 0; i < clf._len; i++) {
double clfAtdValue = clf.atd(i);
if (clf.isNA(i)) {
cres.addNum(op(clfAtdValue, crt.atd(i)));
} else {
cres.addNum(op(alignedCategoricals[c][(int) clfAtdValue], crt.atd(i)));
}
}
}
} else {
for (int i = 0; i < clf._len; i++)
cres.addNum(op(clf.atd(i), crt.atd(i)));
}
}
}
}.doAll(lf.numCols(), Vec.T_NUM, new Frame(lf).add(rt)).outputFrame(lf._names, null);
return cleanCategorical(lf, res); // Cleanup categorical misuse
}
/**
* Produces a mapping array with indexes of the smaller pointing to the larger domain.
*
* @param longerDomain Domain to originally map from
* @param shorterDomain Domain to originally map to
* @return Cross-domain mapping as an array of primitive integers
*/
private int[] alignCategoricals(String[] longerDomain, String[] shorterDomain) {
String[] sortedLongerDomain = Arrays.copyOf(longerDomain, longerDomain.length);
//Sort to make sure binary search is possible
Arrays.sort(sortedLongerDomain);
int[] transformedIndices = MemoryManager.malloc4(shorterDomain.length);
for (int i = 0; i < shorterDomain.length; i++) {
transformedIndices[i] = Arrays.binarySearch(sortedLongerDomain, shorterDomain[i]);
}
return transformedIndices;
}
private ValFrame frame_op_row(Frame lf, Frame row) {
final double[] rawRow = new double[row.numCols()];
for (int i = 0; i < rawRow.length; ++i)
rawRow[i] = row.vec(i).isNumeric() || row.vec(i).isTime() ? row.vec(i).at(0) : Double.NaN; // is numberlike, if not then NaN
Frame res = new MRTask() {
@Override
public void map(Chunk[] chks, NewChunk[] cress) {
for (int c = 0; c < cress.length; c++) {
Chunk clf = chks[c];
NewChunk cres = cress[c];
for (int r = 0; r < clf._len; ++r) {
if (clf.vec().isString())
cres.addNum(Double.NaN); // TODO: improve
else
cres.addNum(op(clf.atd(r), rawRow[c]));
}
}
}
}.doAll(lf.numCols(), Vec.T_NUM, lf).outputFrame(lf._names, null);
return cleanCategorical(lf, res);
}
private ValRow row_op_row(double[] lf, double[] rt, String[] names) {
double[] res = new double[lf.length];
for (int i = 0; i < lf.length; i++)
res[i] = op(lf[i], rt[i]);
return new ValRow(res, names);
}
private ValFrame vec_op_frame(Vec vec, Frame fr) {
// Already checked for same rows, non-zero frame
Frame rt = new Frame(fr);
rt.add("", vec);
Frame res = new MRTask() {
@Override
public void map(Chunk[] chks, NewChunk[] cress) {
assert cress.length == chks.length - 1;
Chunk clf = chks[cress.length];
for (int c = 0; c < cress.length; c++) {
Chunk crt = chks[c];
NewChunk cres = cress[c];
for (int i = 0; i < clf._len; i++)
cres.addNum(op(clf.atd(i), crt.atd(i)));
}
}
}.doAll(fr.numCols(), Vec.T_NUM, rt).outputFrame(fr._names, null);
return cleanCategorical(fr, res); // Cleanup categorical misuse
}
private ValFrame frame_op_vec(Frame fr, Vec vec) {
// Already checked for same rows, non-zero frame
Frame lf = new Frame(fr);
lf.add("", vec);
Frame res = new MRTask() {
@Override
public void map(Chunk[] chks, NewChunk[] cress) {
assert cress.length == chks.length - 1;
Chunk crt = chks[cress.length];
for (int c = 0; c < cress.length; c++) {
Chunk clf = chks[c];
NewChunk cres = cress[c];
for (int i = 0; i < clf._len; i++)
cres.addNum(op(clf.atd(i), crt.atd(i)));
}
}
}.doAll(fr.numCols(), Vec.T_NUM, lf).outputFrame(fr._names, null);
return cleanCategorical(fr, res); // Cleanup categorical misuse
}
/**
* Does it make sense to run this operation on a categorical variable ?
*
* @return True if the operation may be applied on a categorical variable, otherwise false.
*/
public boolean categoricalOK() {
return false;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstDiv.java
|
package water.rapids.ast.prims.operators;
/**
*/
public class AstDiv extends AstBinOp {
public String str() {
return "/";
}
public double op(double l, double r) {
return l / r;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstEq.java
|
package water.rapids.ast.prims.operators;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.fvec.Vec;
import water.parser.BufferedString;
import water.rapids.vals.ValFrame;
import water.util.MathUtils;
import water.util.StringUtils;
/**
*/
public class AstEq extends AstBinOp {
public String str() {
return "==";
}
@Override
public double op(double l, double r) {
return MathUtils.equalsWithinOneSmallUlp(l, r) ? 1 : 0;
}
@Override
public ValFrame frame_op_scalar(Frame fr, final double d) {
return new ValFrame(new MRTask() {
@Override
public void map(Chunk[] chks, NewChunk[] cress) {
for (int c = 0; c < chks.length; c++) {
Chunk chk = chks[c];
NewChunk cres = cress[c];
BufferedString bStr = new BufferedString();
if (chk.vec().isString())
for (int i = 0; i < chk._len; i++)
cres.addNum(str_op(chk.atStr(bStr, i), Double.isNaN(d) ? null : new BufferedString(String.valueOf(d))));
else if (!chk.vec().isNumeric()) cres.addZeros(chk._len);
else
for (int i = 0; i < chk._len; i++)
cres.addNum(op(chk.atd(i), d));
}
}
}.doAll(fr.numCols(), Vec.T_NUM, fr).outputFrame());
}
@Override
public boolean categoricalOK() {
return true;
} // Make sense to run this OP on an enm?
public double str_op(BufferedString l, BufferedString r) {
if (StringUtils.isNullOrEmpty(l))
return StringUtils.isNullOrEmpty(r) ? 1 : 0;
else
return l.equals(r) ? 1 : 0;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstGe.java
|
package water.rapids.ast.prims.operators;
/**
*/
public class AstGe extends AstBinOp {
public String str() {
return ">=";
}
public double op(double l, double r) {
return l >= r ? 1 : 0;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstGt.java
|
package water.rapids.ast.prims.operators;
/**
*/
public class AstGt extends AstBinOp {
public String str() {
return ">";
}
public double op(double l, double r) {
return l > r ? 1 : 0;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstIfElse.java
|
package water.rapids.ast.prims.operators;
import water.H2O;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.fvec.Vec;
import water.rapids.*;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import water.rapids.vals.ValNum;
import water.rapids.vals.ValRow;
import water.util.ArrayUtils;
import water.util.VecUtils;
import java.util.Arrays;
/**
* If-Else -- ternary conditional operator, equivalent of "?:" in C++ and Java.
* <p/>
* "NaNs poison". If the test is a NaN, evaluate neither side and return a NaN
* <p/>
* "Frames poison". If the test is a Frame, both sides are evaluated and selected between according to the test.
* The result is a Frame. All Frames must be compatible, and scalars and 1-column Frames are widened to match the
* widest frame. NaN test values produce NaN results.
* <p/>
* If the test is a scalar, then only the returned side is evaluated. If both sides are scalars or frames, then the
* evaluated result is returned. The unevaluated side is not checked for being a compatible frame. It is an error
* if one side is typed as a scalar and the other as a Frame.
*/
public class AstIfElse extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"test", "true", "false"};
}
/* (ifelse test true false) */
@Override
public int nargs() {
return 1 + 3;
}
public String str() {
return "ifelse";
}
@Override
public Val apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Val val = stk.track(asts[1].exec(env));
if (val.isNum()) { // Scalar test, scalar result
double d = val.getNum();
if (Double.isNaN(d)) return new ValNum(Double.NaN);
Val res = stk.track(asts[d == 0 ? 3 : 2].exec(env)); // exec only 1 of false and true
return res.isFrame() ? new ValNum(res.getFrame().vec(0).at(0)) : res;
}
// Frame test. Frame result.
if (val.type() == Val.ROW)
return row_ifelse((ValRow) val, asts[2].exec(env), asts[3].exec(env));
Frame tst = val.getFrame();
Frame fr = new Frame(tst);
// If all zero's, return false and never execute true.
Val tval = null;
for (Vec vec : tst.vecs())
if (vec.min() != 0 || vec.max() != 0) {
tval = exec_check(env, stk, tst, asts[2]);
break;
} else
tval = Unevaluated.INSTANCE;
// If all nonzero's (or NA's), then never execute false.
Val fval = null;
for (Vec vec : tst.vecs())
if (vec.nzCnt() + vec.naCnt() < vec.length()) {
fval = exec_check(env, stk, tst, asts[3]);
break;
} else
fval = Unevaluated.INSTANCE;
// If one of the ASTs was not evaluated use the other one as a placeholder (only the type information of the placeholder will be used)
if (tval == Unevaluated.INSTANCE && fval == Unevaluated.INSTANCE) { // neither of them are defined, result will NAs
tval = null;
fval = null;
} else if (tval == Unevaluated.INSTANCE) { // true-AST is unevaluated, use false-value as a placeholder with correct type
tval = fval;
} else if (fval == Unevaluated.INSTANCE) { // false-AST is unevaluated, use true-value as a placeholder with correct type
fval = tval;
}
final boolean has_tfr = tval != null && tval.isFrame();
final String ts = (tval != null && tval.isStr()) ? tval.getStr() : null;
final double td = (tval != null && tval.isNum()) ? tval.getNum() : Double.NaN;
final int[] tsIntMap = new int[tst.numCols()];
final boolean has_ffr = fval != null && fval.isFrame();
final String fs = (fval != null && fval.isStr()) ? fval.getStr() : null;
final double fd = (fval != null && fval.isNum()) ? fval.getNum() : Double.NaN;
final int[] fsIntMap = new int[tst.numCols()];
if (has_tfr)
fr.add(tval.getFrame());
if (has_ffr)
fr.add(fval.getFrame());
String[][] domains = null;
final int[][] maps = new int[tst.numCols()][];
if (fs != null || ts != null) { // time to build domains...
domains = new String[tst.numCols()][];
if (fs != null && ts != null) {
for (int i = 0; i < tst.numCols(); ++i) {
domains[i] = new String[]{fs, ts}; // false => 0; truth => 1
fsIntMap[i] = 0;
tsIntMap[i] = 1;
}
} else if (ts != null) {
for (int i = 0; i < tst.numCols(); ++i) {
if (has_ffr) {
Vec v = fr.vec(i + tst.numCols() + (has_tfr ? tst.numCols() : 0));
if (!v.isCategorical())
throw H2O.unimpl("Column is not categorical.");
String[] dom = Arrays.copyOf(v.domain(), v.domain().length + 1);
dom[dom.length - 1] = ts;
Arrays.sort(dom);
maps[i] = computeMap(v.domain(), dom);
tsIntMap[i] = ArrayUtils.find(dom, ts);
domains[i] = dom;
} else throw H2O.unimpl();
}
} else { // fs!=null
for (int i = 0; i < tst.numCols(); ++i) {
if (has_tfr) {
Vec v = fr.vec(i + tst.numCols() + (has_ffr ? tst.numCols() : 0));
if (!v.isCategorical())
throw H2O.unimpl("Column is not categorical.");
String[] dom = Arrays.copyOf(v.domain(), v.domain().length + 1);
dom[dom.length - 1] = fs;
Arrays.sort(dom);
maps[i] = computeMap(v.domain(), dom);
fsIntMap[i] = ArrayUtils.find(dom, fs);
domains[i] = dom;
} else throw H2O.unimpl();
}
}
}
// Now pick from left-or-right in the new frame
Frame res = new MRTask() {
@Override
public void map(Chunk chks[], NewChunk nchks[]) {
assert nchks.length + (has_tfr ? nchks.length : 0) + (has_ffr ? nchks.length : 0) == chks.length;
for (int i = 0; i < nchks.length; i++) {
Chunk ctst = chks[i];
NewChunk res = nchks[i];
for (int row = 0; row < ctst._len; row++) {
double d;
if (ctst.isNA(row)) d = Double.NaN;
else if (ctst.atd(row) == 0) d = has_ffr
? domainMap(chks[i + nchks.length + (has_tfr ? nchks.length : 0)].atd(row), maps[i])
: fs != null ? fsIntMap[i] : fd;
else d = has_tfr
? domainMap(chks[i + nchks.length].atd(row), maps[i])
: ts != null ? tsIntMap[i] : td;
res.addNum(d);
}
}
}
}.doAll(tst.numCols(), Vec.T_NUM, fr).outputFrame(null, domains);
// flatten domains since they may be larger than needed
if (domains != null) {
for (int i = 0; i < res.numCols(); ++i) {
if (res.vec(i).domain() != null) {
final long[] dom = new VecUtils.CollectDomainFast((int) res.vec(i).max()).doAll(res.vec(i)).domain();
String[] newDomain = new String[dom.length];
for (int l = 0; l < dom.length; ++l)
newDomain[l] = res.vec(i).domain()[(int) dom[l]];
new MRTask() {
@Override
public void map(Chunk c) {
for (int i = 0; i < c._len; ++i) {
if (!c.isNA(i))
c.set(i, ArrayUtils.find(dom, c.at8(i)));
}
}
}.doAll(res.vec(i));
res.vec(i).setDomain(newDomain); // needs a DKVput?
}
}
}
return new ValFrame(res);
}
private static double domainMap(double d, int[] maps) {
if (maps != null && d == (int) d && (0 <= d && d < maps.length)) return maps[(int) d];
return d;
}
private static int[] computeMap(String[] from, String[] to) {
int[] map = new int[from.length];
for (int i = 0; i < from.length; ++i)
map[i] = ArrayUtils.find(to, from[i]);
return map;
}
Val exec_check(Env env, Env.StackHelp stk, Frame tst, AstRoot ast) {
Val val = ast.exec(env);
if (val.isFrame()) {
Frame fr = stk.track(val).getFrame();
if (tst.numCols() != fr.numCols() || tst.numRows() != fr.numRows())
throw new IllegalArgumentException("ifelse test frame and other frames must match dimensions, found " + tst + " and " + fr);
}
return val;
}
ValRow row_ifelse(ValRow tst, Val yes, Val no) {
double[] test = tst.getRow();
double[] True;
double[] False;
if (!(yes.isRow() || no.isRow())) throw H2O.unimpl();
switch (yes.type()) {
case Val.NUM:
True = new double[]{yes.getNum()};
break;
case Val.ROW:
True = yes.getRow();
break;
default:
throw H2O.unimpl("row ifelse unimpl: " + yes.getClass());
}
switch (no.type()) {
case Val.NUM:
False = new double[]{no.getNum()};
break;
case Val.ROW:
False = no.getRow();
break;
default:
throw H2O.unimpl("row ifelse unimplL " + no.getClass());
}
double[] ds = new double[test.length];
String[] ns = new String[test.length];
for (int i = 0; i < test.length; ++i) {
ns[i] = "C" + (i + 1);
if (Double.isNaN(test[i])) ds[i] = Double.NaN;
else ds[i] = test[i] == 0 ? False[i] : True[i];
}
return new ValRow(ds, ns);
}
private static class Unevaluated extends Val {
static Unevaluated INSTANCE = new Unevaluated();
@Override
public int type() { return -1; }
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstIntDiv.java
|
package water.rapids.ast.prims.operators;
/**
* Integer division
*/
public class AstIntDiv extends AstBinOp {
public String str() {
return "intDiv";
}
public double op(double l, double r) {
return (((int) r) == 0) ? Double.NaN : (int) l / (int) r;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstIntDivR.java
|
package water.rapids.ast.prims.operators;
/**
* Language R intdiv op
*/
public class AstIntDivR extends AstBinOp {
public String str() {
return "%/%";
}
public double op(double l, double r) {
return (int) (l / r);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstLAnd.java
|
package water.rapids.ast.prims.operators;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.ast.AstRoot;
/**
* Logical-AND. If the first arg is false, do not execute the 2nd arg.
*/
public class AstLAnd extends AstBinOp {
public String str() {
return "&&";
}
@Override
public Val apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Val left = stk.track(asts[1].exec(env));
// If the left is zero, just return the left
if (left.isNum()) {
double d = left.getNum();
if (d == 0) return left;
}
Val rite = stk.track(asts[2].exec(env));
return prim_apply(left, rite);
}
// 0 trumps NA, and NA trumps 1
public double op(double l, double r) {
return and_op(l, r);
}
public static double and_op(double l, double r) {
return (l == 0 || r == 0) ? 0 : (Double.isNaN(l) || Double.isNaN(r) ? Double.NaN : 1);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstLOr.java
|
package water.rapids.ast.prims.operators;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.ast.AstRoot;
/**
* Logical-OR. If the first arg is true, do not execute the 2nd arg.
*/
public class AstLOr extends AstBinOp {
public String str() {
return "||";
}
@Override
public Val apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Val left = stk.track(asts[1].exec(env));
// If the left is 1, just return the left
if (left.isNum()) {
double d = left.getNum();
if (d == 1) return left;
}
Val rite = stk.track(asts[2].exec(env));
return prim_apply(left, rite);
}
// 1 trumps NA, and NA trumps 0.
public double op(double l, double r) {
return or_op(l, r);
}
public static double or_op(double l, double r) {
return (l == 1 || r == 1) ? 1 : (Double.isNaN(l) || Double.isNaN(r) ? Double.NaN : 0);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstLe.java
|
package water.rapids.ast.prims.operators;
/**
*/
public class AstLe extends AstBinOp {
public String str() {
return "<=";
}
public double op(double l, double r) {
return l <= r ? 1 : 0;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstLt.java
|
package water.rapids.ast.prims.operators;
/**
*/
public class AstLt extends AstBinOp {
public String str() {
return "<";
}
public double op(double l, double r) {
return l < r ? 1 : 0;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstMod.java
|
package water.rapids.ast.prims.operators;
/**
* @see AstModR
*/
public class AstMod extends AstBinOp {
public String str() {
return "%";
}
public double op(double l, double r) {
return l % r;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstModR.java
|
package water.rapids.ast.prims.operators;
/**
* Language R mod operator
*/
public class AstModR extends AstBinOp {
public String str() {
return "%%";
}
public double op(double l, double r) {
return l % r;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstMul.java
|
package water.rapids.ast.prims.operators;
/**
* Multiplication
*/
public class AstMul extends AstBinOp {
public String str() {
return "*";
}
public double op(double l, double r) {
return l * r;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstNe.java
|
package water.rapids.ast.prims.operators;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.fvec.Vec;
import water.parser.BufferedString;
import water.rapids.vals.ValFrame;
import water.util.MathUtils;
import water.util.StringUtils;
/**
*/
public class AstNe extends AstBinOp {
public String str() {
return "!=";
}
public double op(double l, double r) {
return MathUtils.equalsWithinOneSmallUlp(l, r) ? 0 : 1;
}
@Override
public ValFrame frame_op_scalar(Frame fr, final double d) {
return new ValFrame(new MRTask() {
@Override
public void map(Chunk[] chks, NewChunk[] cress) {
for (int c = 0; c < chks.length; c++) {
Chunk chk = chks[c];
NewChunk cres = cress[c];
BufferedString bStr = new BufferedString();
if (chk.vec().isString())
for (int i = 0; i < chk._len; i++)
cres.addNum(str_op(chk.atStr(bStr, i), Double.isNaN(d) ? null : new BufferedString(String.valueOf(d))));
else if (!chk.vec().isNumeric()) cres.addZeros(chk._len);
else
for (int i = 0; i < chk._len; i++)
cres.addNum(op(chk.atd(i), d));
}
}
}.doAll(fr.numCols(), Vec.T_NUM, fr).outputFrame());
}
@Override
public boolean categoricalOK() {
return true;
} // Make sense to run this OP on an enm?
public double str_op(BufferedString l, BufferedString r) {
if (StringUtils.isNullOrEmpty(l))
return StringUtils.isNullOrEmpty(r) ? 0 : 1;
else
return l.equals(r) ? 0 : 1;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstOr.java
|
package water.rapids.ast.prims.operators;
/**
*/
public class AstOr extends AstBinOp {
public String str() {
return "|";
}
public double op(double l, double r) {
return AstLOr.or_op(l, r);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstPlus.java
|
package water.rapids.ast.prims.operators;
/**
*/
public class AstPlus extends AstBinOp {
public String str() {
return "+";
}
public double op(double l, double r) {
return l + r;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstPow.java
|
package water.rapids.ast.prims.operators;
/**
*/
public class AstPow extends AstBinOp {
public String str() {
return "^";
}
public double op(double l, double r) {
return Math.pow(l, r);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/operators/AstSub.java
|
package water.rapids.ast.prims.operators;
/**
* Subtraction
*/
public class AstSub extends AstBinOp {
public String str() {
return "-";
}
public double op(double l, double r) {
return l - r;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstAll.java
|
package water.rapids.ast.prims.reducers;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.vals.ValNum;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import static hex.genmodel.utils.ArrayUtils.isBoolColumn;
/**
* Bulk AND operation on a scalar or numeric column; NAs count as true. Returns 0 or 1.
*/
public class AstAll extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public String str() {
return "all";
}
@Override
public int nargs() {
return 1 + 1;
}
@Override
public ValNum apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Val val = stk.track(asts[1].exec(env));
if (val.isNum()) return new ValNum(val.getNum() == 0 ? 0 : 1);
for (Vec vec : val.getFrame().vecs()) {
String[] domainV = vec.domain();
if (domainV != null && !isBoolColumn(domainV)) // contain domain that are not true/fale levels
return new ValNum(0); // not a boolean column
long trueCount = ((domainV != null) && domainV[0].equalsIgnoreCase("true"))
?(vec.length()-vec.nzCnt()):vec.nzCnt()+vec.naCnt();
if (trueCount < vec.length())
return new ValNum(0); // Some zeros in there somewhere
}
return new ValNum(1);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstAny.java
|
package water.rapids.ast.prims.reducers;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.vals.ValNum;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import static hex.genmodel.utils.ArrayUtils.isBoolColumn;
/**
* Bulk OR operation on boolean column; NAs count as true. Returns 0 or 1.
*/
public class AstAny extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public int nargs() {
return 1 + 1;
} // (any x)
@Override
public String str() {
return "any";
}
@Override
public ValNum apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Val val = stk.track(asts[1].exec(env));
if (val.isNum()) return new ValNum(val.getNum() == 0 ? 0 : 1);
for (Vec vec : val.getFrame().vecs()) {
String[] domainV = vec.domain();
if (domainV != null && !isBoolColumn(domainV)) // contain domain that are not true/fale levels
return new ValNum(0); // not a boolean column
long trueCount = ((domainV != null) && domainV[0].equalsIgnoreCase("true"))
?(vec.length()-vec.nzCnt()):vec.nzCnt()+vec.naCnt();
if (trueCount > 0)
return new ValNum(1); // Some nonzeros in there somewhere
}
return new ValNum(0);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstAnyNa.java
|
package water.rapids.ast.prims.reducers;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.vals.ValNum;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
/**
* Bulk OR operation on boolean column. Returns 0 or 1.
*/
public class AstAnyNa extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public int nargs() {
return 1 + 1;
} // (any.na x)
@Override
public String str() {
return "any.na";
}
@Override
public ValNum apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
for (Vec vec : fr.vecs()) if (vec.nzCnt() > 0) return new ValNum(1);
return new ValNum(0);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstCumMax.java
|
package water.rapids.ast.prims.reducers;
public class AstCumMax extends AstCumu {
@Override
public int nargs() { return 1 + 2; }
@Override
public String str() {
return "cummax";
}
@Override
public double op(double l, double r) {
return Math.max(l, r);
}
@Override
public double init() {
return -Double.MAX_VALUE;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstCumMin.java
|
package water.rapids.ast.prims.reducers;
/**
*/
public class AstCumMin extends AstCumu {
@Override
public int nargs() { return 1 + 2; }
@Override
public String str() {
return "cummin";
}
@Override
public double op(double l, double r) {
return Math.min(l, r);
}
@Override
public double init() {
return Double.MAX_VALUE;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstCumProd.java
|
package water.rapids.ast.prims.reducers;
/**
*/
public class AstCumProd extends AstCumu {
@Override
public int nargs() { return 1 + 2; } // (cumprod x)
@Override
public String str() {
return "cumprod";
}
@Override
public double op(double l, double r) {
return l * r;
}
@Override
public double init() {
return 1;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstCumSum.java
|
package water.rapids.ast.prims.reducers;
/**
*/
public class AstCumSum extends AstCumu {
@Override
public int nargs() { return 1 + 2; } // (cumsum x)
@Override
public String str() {
return "cumsum";
}
@Override
public double op(double l, double r) {
return l + r;
}
@Override
public double init() {
return 0;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstCumu.java
|
package water.rapids.ast.prims.reducers;
import water.H2O;
import water.Key;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.util.ArrayUtils;
import java.util.Arrays;
/**
*/
public abstract class AstCumu extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary","axis"};
}
@Override
public int nargs() {
return 1 + 1;
} // (cumu x)
@Override
public String str() {
throw H2O.unimpl();
}
public abstract double op(double l, double r);
public abstract double init();
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame f = stk.track(asts[1].exec(env)).getFrame();
AstRoot axisAR = asts[2];
for (Vec v:f.vecs()) {
if(v.isCategorical() || v.isString() || v.isUUID()) throw new IllegalArgumentException(
"Cumulative functions not applicable to enum, string, or UUID values");
}
double axis = axisAR.exec(env).getNum();
if (axis != 1.0 && axis != 0.0) throw new IllegalArgumentException("Axis must be 0 or 1");
if (f.numCols() == 1) {
if (axis == 0.0) {
AstCumu.CumuTask t = new AstCumu.CumuTask(f.anyVec().nChunks(), init());
t.doAll(new byte[]{Vec.T_NUM}, f.anyVec());
final double[] chkCumu = t._chkCumu;
Vec cumuVec = t.outputFrame().anyVec();
new MRTask() {
@Override
public void map(Chunk c) {
if (c.cidx() != 0) {
double d = chkCumu[c.cidx() - 1];
for (int i = 0; i < c._len; ++i)
c.set(i, op(c.atd(i), d));
}
}
}.doAll(cumuVec);
Key<Frame> k = Key.make();
return new ValFrame(new Frame(k, null, new Vec[]{cumuVec}));
} else {
return new ValFrame(new Frame(f));
}
}
else {
if (axis == 0.0) { // down the column implementation
AstCumu.CumuTaskWholeFrame t = new AstCumu.CumuTaskWholeFrame(f.anyVec().nChunks(), init(), f.numCols());
Frame fr2 = t.doAll(f.numCols(), Vec.T_NUM, f).outputFrame(null, f.names(), null);
final double[][] chkCumu = t._chkCumu;
new MRTask() {
@Override
public void map(Chunk cs[]) {
if (cs[0].cidx() != 0) {
for (int i = 0; i < cs.length; i++) {
double d = chkCumu[i][cs[i].cidx() - 1];
for (int j = 0; j < cs[i]._len; ++j)
cs[i].set(j, op(cs[i].atd(j), d));
}
}
}
}.doAll(fr2);
return new ValFrame(new Frame(fr2));
} else {
AstCumu.CumuTaskAxis1 t = new AstCumu.CumuTaskAxis1(init());
Frame fr2 = t.doAll(f.numCols(), Vec.T_NUM, f).outputFrame(null, f.names(), null);
return new ValFrame(new Frame(fr2));
}
}
}
protected class CumuTaskAxis1 extends MRTask<AstCumu.CumuTaskAxis1> {
// apply function along the rows
final double _init;
CumuTaskAxis1(double init) {
_init = init;
}
@Override
public void map(Chunk cs[], NewChunk nc[]) {
for (int i = 0; i < cs[0].len(); i++) {
for (int j = 0; j < cs.length; j++) {
double preVal = j == 0 ? _init : nc[j-1].atd(i);
nc[j].addNum(op(preVal,cs[j].atd(i)));
}
}
}
}
protected class CumuTaskWholeFrame extends MRTask<AstCumu.CumuTaskWholeFrame> {
final int _nchks; // IN
final double _init; // IN
final int _ncols; // IN
double[][] _chkCumu; // OUT, accumulation over each chunk
CumuTaskWholeFrame(int nchks, double init, int ncols) {
_nchks = nchks;
_init = init;
_ncols = ncols;
}
@Override
public void setupLocal() {
_chkCumu = new double[_ncols][_nchks];
}
@Override
public void map(Chunk cs[], NewChunk nc[]) {
double acc[] = new double[cs.length];
Arrays.fill(acc,_init);
for (int i = 0; i < cs.length; i++) {
for (int j = 0; j < cs[i]._len; ++j)
nc[i].addNum(acc[i] = op(acc[i], cs[i].atd(j)));
_chkCumu[i][cs[i].cidx()] = acc[i];
}
}
@Override
public void reduce(AstCumu.CumuTaskWholeFrame t) {
if (_chkCumu != t._chkCumu) ArrayUtils.add(_chkCumu, t._chkCumu);
}
@Override
public void postGlobal() {
for (int i = 1; i < _chkCumu.length; i++) {
for (int j = 1; j < _chkCumu[i].length; ++j) {
_chkCumu[i][j] = op(_chkCumu[i][j], _chkCumu[i][j - 1]);
}
}
}
}
protected class CumuTask extends MRTask<AstCumu.CumuTask> {
final int _nchks; // IN
final double _init; // IN
double[] _chkCumu; // OUT, accumulation over each chunk
CumuTask(int nchks, double init) {
_nchks = nchks;
_init = init;
}
@Override
public void setupLocal() {
_chkCumu = new double[_nchks];
}
@Override
public void map(Chunk c, NewChunk nc) {
double acc = _init;
for (int i = 0; i < c._len; ++i)
nc.addNum(acc = op(acc, c.atd(i)));
_chkCumu[c.cidx()] = acc;
}
@Override
public void reduce(AstCumu.CumuTask t) {
if (_chkCumu != t._chkCumu) ArrayUtils.add(_chkCumu, t._chkCumu);
}
@Override
public void postGlobal() {
for (int i = 1; i < _chkCumu.length; ++i) _chkCumu[i] = op(_chkCumu[i], _chkCumu[i - 1]);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstMad.java
|
package water.rapids.ast.prims.reducers;
import hex.quantile.QuantileModel;
import water.DKV;
import water.Key;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.vals.ValNum;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
/**
* Median absolute deviation
*/
public class AstMad extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary", "combineMethod", "const"};
}
@Override
public int nargs() {
return 1 + 3;
} //(mad fr combine_method const)
@Override
public String str() {
return "h2o.mad";
}
@Override
public ValNum apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
Vec[] vecs = fr.vecs();
if (vecs.length == 0 || vecs[0].naCnt() > 0) return new ValNum(Double.NaN);
if (vecs.length > 1) throw new IllegalArgumentException("MAD expects a single numeric column");
QuantileModel.CombineMethod cm = QuantileModel.CombineMethod.valueOf(asts[2].exec(env).getStr().toUpperCase());
double constant = asts[3].exec(env).getNum();
return new ValNum(mad(fr, cm, constant));
}
public static double mad(Frame f, QuantileModel.CombineMethod cm, double constant) {
// need Frames everywhere because of QuantileModel demanding a Frame...
Key tk = null;
if (f._key == null) {
DKV.put(tk = Key.make(), f = new Frame(tk, f.names(), f.vecs()));
}
final double median = AstMedian.median(f, cm);
Frame abs_dev = new MRTask() {
@Override
public void map(Chunk c, NewChunk nc) {
for (int i = 0; i < c._len; ++i)
nc.addNum(Math.abs(c.at8(i) - median));
}
}.doAll(1, Vec.T_NUM, f).outputFrame();
if (abs_dev._key == null) {
DKV.put(tk = Key.make(), abs_dev = new Frame(tk, abs_dev.names(), abs_dev.vecs()));
}
double mad = AstMedian.median(abs_dev, cm);
DKV.remove(f._key); // drp mapping, keep vec
DKV.remove(abs_dev._key);
return constant * mad;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstMax.java
|
package water.rapids.ast.prims.reducers;
import water.fvec.Vec;
/**
*/
public class AstMax extends AstRollupOp {
public String str() {
return "max";
}
public double op(double l, double r) {
return Math.max(l, r);
}
public double rup(Vec vec) {
return vec.max();
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstMaxNa.java
|
package water.rapids.ast.prims.reducers;
import water.fvec.Vec;
/**
*/
public class AstMaxNa extends AstNaRollupOp {
public String str() {
return "maxNA";
}
public double op(double l, double r) {
return Math.max(l, r);
}
public double rup(Vec vec) {
return vec.max();
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstMean.java
|
package water.rapids.ast.prims.reducers;
import water.Key;
import water.MRTask;
import water.fvec.*;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValRow;
public class AstMean extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"frame", "na_rm", "axis"};
}
@Override
public String str() {
return "mean";
}
@Override
public int nargs() {
return -1; // 1 + 3;
}
@Override
public String example() {
return "(mean frame na_rm axis)";
}
@Override
public String description() {
return "Compute the mean values within the provided frame. If axis = 0, then the mean is computed " +
"column-wise, and the result is a frame of shape [1 x ncols], where ncols is the number of columns in " +
"the original frame. If axis = 1, then the mean is computed row-wise, and the result is a frame of shape " +
"[nrows x 1], where nrows is the number of rows in the original frame. Flag na_rm controls treatment of " +
"the NA values: if it is 1, then NAs are ignored; if it is 0, then presence of NAs renders the result " +
"in that column (row) also NA.\n" +
"Mean of a double / integer / binary column is a double value. Mean of a categorical / string / uuid " +
"column is NA. Mean of a time column is time. Mean of a column with 0 rows is NaN.\n" +
"When computing row-wise means, we try not to mix columns of different types. In particular, if there " +
"are any numeric columns, then all time columns are omitted from computation. However when computing " +
"mean over multiple time columns, then the Time result is returned. Lastly, binary columns are treated " +
"as NAs always.";
}
@Override
public Val apply(Env env, Env.StackHelp stk, AstRoot[] asts) {
Val val1 = asts[1].exec(env);
if (val1 instanceof ValFrame) {
Frame fr = stk.track(val1).getFrame();
boolean na_rm = asts[2].exec(env).getNum() == 1;
boolean axis = asts.length == 4 && (asts[3].exec(env).getNum() == 1);
return axis? rowwiseMean(fr, na_rm) : colwiseMean(fr, na_rm);
}
else if (val1 instanceof ValRow) {
// This may be called from AstApply when doing per-row computations.
double[] row = val1.getRow();
boolean na_rm = asts[2].exec(env).getNum() == 1;
double d = 0;
int n = 0;
for (double r: row) {
if (Double.isNaN(r)) {
if (!na_rm)
return new ValRow(new double[]{Double.NaN}, null);
} else {
d += r;
n++;
}
}
return new ValRow(new double[]{d / n}, null);
} else
throw new IllegalArgumentException("Incorrect argument to (mean): expected a frame or a row, received " + val1.getClass());
}
/**
* Compute Frame means by rows, and return a frame consisting of a single Vec of means in each row.
*/
private ValFrame rowwiseMean(Frame fr, final boolean na_rm) {
String[] newnames = {"mean"};
Key<Frame> newkey = Key.make();
// Determine how many columns of different types we have
int n_numeric = 0, n_time = 0;
for (Vec vec : fr.vecs()) {
if (vec.isNumeric()) n_numeric++;
if (vec.isTime()) n_time++;
}
// Compute the type of the resulting column: if all columns are TIME then the result is also time; otherwise
// if at least one column is numeric then the result is also numeric.
byte resType = n_numeric > 0? Vec.T_NUM : Vec.T_TIME;
// Construct the frame over which the mean should be computed
Frame compFrame = new Frame();
for (int i = 0; i < fr.numCols(); i++) {
Vec vec = fr.vec(i);
if (n_numeric > 0? vec.isNumeric() : vec.isTime())
compFrame.add(fr.name(i), vec);
}
Vec anyvec = compFrame.anyVec();
// Take into account certain corner cases
if (anyvec == null) {
Frame res = new Frame(newkey);
anyvec = fr.anyVec();
if (anyvec != null) {
// All columns in the original frame are non-numeric -> return a vec of NAs
res.add("mean", anyvec.makeCon(Double.NaN));
} // else the original frame is empty, in which case we return an empty frame too
return new ValFrame(res);
}
if (!na_rm && n_numeric < fr.numCols() && n_time < fr.numCols()) {
// If some of the columns are non-numeric and na_rm==false, then the result is a vec of NAs
Frame res = new Frame(newkey, newnames, new Vec[]{anyvec.makeCon(Double.NaN)});
return new ValFrame(res);
}
// Compute the mean over all rows
final int numCols = compFrame.numCols();
Frame res = new MRTask() {
@Override
public void map(Chunk[] cs, NewChunk nc) {
for (int i = 0; i < cs[0]._len; i++) {
double d = 0;
int numNaColumns = 0;
for (int j = 0; j < numCols; j++) {
double val = cs[j].atd(i);
if (Double.isNaN(val))
numNaColumns++;
else
d += val;
}
if (na_rm? numNaColumns < numCols : numNaColumns == 0)
nc.addNum(d / (numCols - numNaColumns));
else
nc.addNum(Double.NaN);
}
}
}.doAll(1, resType, compFrame)
.outputFrame(newkey, newnames, null);
// Return the result
return new ValFrame(res);
}
/**
* Compute column-wise means (i.e. means of each column), and return a frame having a single row.
*/
private ValFrame colwiseMean(Frame fr, final boolean na_rm) {
Frame res = new Frame();
Vec vec1 = Vec.makeCon(null, 0);
assert vec1.length() == 1;
for (int i = 0; i < fr.numCols(); i++) {
Vec v = fr.vec(i);
boolean valid = (v.isNumeric() || v.isTime() || v.isBinary()) && v.length() > 0 && (na_rm || v.naCnt() == 0);
Vec newvec = vec1.makeCon(valid? v.mean() : Double.NaN, v.isTime()? Vec.T_TIME : Vec.T_NUM);
res.add(fr.name(i), newvec);
}
vec1.remove();
return new ValFrame(res);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstMedian.java
|
package water.rapids.ast.prims.reducers;
import hex.quantile.Quantile;
import hex.quantile.QuantileModel;
import water.DKV;
import water.Key;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.vals.ValNums;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
/**
*/
public class AstMedian extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary", "method"};
}
@Override
public String str() {
return "median";
}
@Override
public int nargs() {
return 1 + 2;
} // (median fr method)
@Override
public ValNums apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
boolean narm = asts[2].exec(env).getNum() == 1;
double[] ds = new double[fr.numCols()];
Vec[] vecs = fr.vecs();
for (int i = 0; i < fr.numCols(); i++)
ds[i] = (!vecs[i].isNumeric() || vecs[i].length() == 0 || (!narm && vecs[i].naCnt() > 0)) ? Double.NaN : median(vecs[i], QuantileModel.CombineMethod.INTERPOLATE);
return new ValNums(ds);
}
public static double median(Frame fr, QuantileModel.CombineMethod combine_method) {
// Frame needs a Key for Quantile, might not have one from rapids
Key tk = null;
if (fr._key == null) {
DKV.put(tk = Key.make(), fr = new Frame(tk, fr.names(), fr.vecs()));
}
// Quantiles to get the median
QuantileModel.QuantileParameters parms = new QuantileModel.QuantileParameters();
parms._probs = new double[]{0.5};
parms._train = fr._key;
parms._combine_method = combine_method;
QuantileModel q = new Quantile(parms).trainModel().get();
double median = q._output._quantiles[0][0];
q.delete();
if (tk != null) {
DKV.remove(tk);
}
return median;
}
static double median(Vec v, QuantileModel.CombineMethod combine_method) {
return median(new Frame(v), combine_method);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstMin.java
|
package water.rapids.ast.prims.reducers;
import water.fvec.Vec;
/**
*/
public class AstMin extends AstRollupOp {
public String str() {
return "min";
}
public double op(double l, double r) {
return Math.min(l, r);
}
public double rup(Vec vec) {
return vec.min();
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstMinNa.java
|
package water.rapids.ast.prims.reducers;
import water.fvec.Vec;
/**
* Subclasses take a Frame and produces a scalar. NAs are dropped
*/
//abstract class ASTNARedOp extends AstReducerOp {
// @Override ValNum apply( Env env, Env.StackHelp stk, AstRoot asts[] ) {
// Frame fr = stk.track(asts[1].exec(env)).getFrame();
// return new ValNum(new NaRmRedOp().doAll(fr)._d);
// }
//}
public class AstMinNa extends AstNaRollupOp {
public String str() {
return "minNA";
}
public double op(double l, double r) {
return Math.min(l, r);
}
public double rup(Vec vec) {
return vec.min();
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstNaCnt.java
|
package water.rapids.ast.prims.reducers;
import water.fvec.Frame;
import water.rapids.Env;
import water.rapids.vals.ValNums;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
/**
*/
public class AstNaCnt extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public String str() {
return "naCnt";
}
@Override
public int nargs() {
return 1 + 1;
} // (naCnt fr)
@Override
public ValNums apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
double ds[] = new double[fr.numCols()];
for (int i = 0; i < fr.numCols(); ++i)
ds[i] = fr.vec(i).naCnt();
return new ValNums(ds);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstNaRollupOp.java
|
package water.rapids.ast.prims.reducers;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.vals.ValNum;
import water.rapids.ast.AstRoot;
/**
* Optimization for the RollupStats: use them directly
*/
public abstract class AstNaRollupOp extends AstRollupOp {
@Override
public ValNum apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
Vec[] vecs = fr.vecs();
if (vecs.length == 0) return new ValNum(Double.NaN);
double d = rup(vecs[0]);
for (int i = 1; i < vecs.length; i++)
d = op(d, rup(vecs[i]));
return new ValNum(d);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstProd.java
|
package water.rapids.ast.prims.reducers;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.vals.ValNum;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
/**
*/
public class AstProd extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public int nargs() {
return 1 + 1;
} // (prod x)
@Override
public String str() {
return "prod";
}
@Override
public ValNum apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
for (Vec v : fr.vecs())
if (v.isCategorical() || v.isUUID() || v.isString())
throw new IllegalArgumentException("`" + str() + "`" + " only defined on a data frame with all numeric variables");
double prod = new AstProd.RedProd().doAll(fr)._d;
return new ValNum(prod);
}
private static class RedProd extends MRTask<AstProd.RedProd> {
double _d;
@Override
public void map(Chunk chks[]) {
int rows = chks[0]._len;
for (Chunk C : chks) {
double prod = 1.;
for (int r = 0; r < rows; r++)
prod *= C.atd(r);
_d = prod;
if (Double.isNaN(prod)) break;
}
}
@Override
public void reduce(AstProd.RedProd s) {
_d *= s._d;
}
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstProdNa.java
|
package water.rapids.ast.prims.reducers;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.vals.ValNum;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
/**
*/
public class AstProdNa extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public int nargs() {
return 1 + 1;
} // (prod x)
@Override
public String str() {
return "prod.na";
}
@Override
public ValNum apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
for (Vec v : fr.vecs())
if (v.isCategorical() || v.isUUID() || v.isString())
throw new IllegalArgumentException("`" + str() + "`" + " only defined on a data frame with all numeric variables");
double prod = new AstProdNa.RedProd().doAll(fr)._d;
return new ValNum(prod);
}
private static class RedProd extends MRTask<AstProdNa.RedProd> {
double _d;
@Override
public void map(Chunk chks[]) {
int rows = chks[0]._len;
for (Chunk C : chks) {
double prod = 1.;
for (int r = 0; r < rows; r++) {
if (C.isNA(r)) continue;
prod *= C.atd(r);
}
_d = prod;
if (Double.isNaN(prod)) break;
}
}
@Override
public void reduce(AstProdNa.RedProd s) {
_d += s._d;
}
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstReducerOp.java
|
package water.rapids.ast.prims.reducers;
import water.MRTask;
import water.fvec.Chunk;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.vals.ValNum;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
/**
* Subclasses take a Frame and produces a scalar. NAs -> NAs
*/
public abstract class AstReducerOp extends AstPrimitive {
@Override
public int nargs() {
return -1;
}
@Override
public Val apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
// NOTE: no *initial* value needed for the reduction. Instead, the
// reduction op is used between pairs of actual values, and never against
// the empty list. NaN is returned if there are *no* values in the
// reduction.
double d = Double.NaN;
for (int i = 1; i < asts.length; i++) {
Val val = asts[i].exec(env);
double d2 = val.isFrame() ? new AstReducerOp.RedOp().doAll(stk.track(val).getFrame())._d : val.getNum();
if (i == 1) d = d2;
else d = op(d, d2);
}
return new ValNum(d);
}
/**
* Override to express a basic math primitive
*/
public abstract double op(double l, double r);
class RedOp extends MRTask<AstReducerOp.RedOp> {
double _d;
@Override
public void map(Chunk chks[]) {
int rows = chks[0]._len;
for (Chunk C : chks) {
if (!C.vec().isNumeric()) throw new IllegalArgumentException("Numeric columns only");
double sum = _d;
for (int r = 0; r < rows; r++)
sum = op(sum, C.atd(r));
_d = sum;
if (Double.isNaN(sum)) break; // Shortcut if the reduction is already NaN
}
}
@Override
public void reduce(AstReducerOp.RedOp s) {
_d = op(_d, s._d);
}
}
// class NaRmRedOp extends MRTask<NaRmRedOp> {
// double _d;
// @Override public void map( Chunk chks[] ) {
// int rows = chks[0]._len;
// for( Chunk C : chks ) {
// if( !C.vec().isNumeric() ) throw new IllegalArgumentException("Numeric columns only");
// double sum = _d;
// for( int r = 0; r < rows; r++ ) {
// double d = C.atd(r);
// if( !Double.isNaN(d) )
// sum = op(sum, d);
// }
// _d = sum;
// if( Double.isNaN(sum) ) break; // Shortcut if the reduction is already NaN
// }
// }
// @Override public void reduce( NaRmRedOp s ) { _d = op(_d, s._d); }
// }
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstRollupOp.java
|
package water.rapids.ast.prims.reducers;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.vals.ValNum;
import water.rapids.vals.ValRow;
import water.rapids.ast.AstRoot;
/**
* Optimization for the RollupStats: use them directly
*/
public abstract class AstRollupOp extends AstReducerOp {
@Override
public String[] args() {
return new String[]{"ary"};
}
public abstract double rup(Vec vec);
@Override
public Val apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Val arg1 = asts[1].exec(env);
if (arg1.isRow()) { // Row-wise operation
double[] ds = arg1.getRow();
double d = ds[0];
for (int i = 1; i < ds.length; i++)
d = op(d, ds[i]);
return new ValRow(new double[]{d}, null);
}
// Normal column-wise operation
Frame fr = stk.track(arg1).getFrame();
Vec[] vecs = fr.vecs();
if (vecs.length == 0 || vecs[0].naCnt() > 0) return new ValNum(Double.NaN);
double d = rup(vecs[0]);
for (int i = 1; i < vecs.length; i++) {
if (vecs[i].naCnt() > 0) return new ValNum(Double.NaN);
d = op(d, rup(vecs[i]));
}
return new ValNum(d);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstSdev.java
|
package water.rapids.ast.prims.reducers;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.vals.ValNums;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
/**
* TODO: allow for multiple columns, package result into Frame
*/
public class AstSdev extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary", "na_rm"};
}
@Override
public int nargs() {
return 1 + 2;
}
@Override
public String str() {
return "sd";
}
@Override
public ValNums apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
boolean narm = asts[2].exec(env).getNum() == 1;
double[] ds = new double[fr.numCols()];
Vec[] vecs = fr.vecs();
for (int i = 0; i < fr.numCols(); i++)
ds[i] = (!vecs[i].isNumeric() || vecs[i].length() == 0 || (!narm && vecs[i].naCnt() > 0)) ? Double.NaN : vecs[i].sigma();
return new ValNums(ds);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstSum.java
|
package water.rapids.ast.prims.reducers;
import water.fvec.Vec;
/**
*/
public class AstSum extends AstRollupOp {
public String str() {
return "sum";
}
public double op(double l, double r) {
return l + r;
}
public double rup(Vec vec) {
return vec.mean() * vec.length();
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstSumAxis.java
|
package water.rapids.ast.prims.reducers;
import water.Key;
import water.MRTask;
import water.fvec.*;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValRow;
public class AstSumAxis extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"frame", "na_rm", "axis"};
}
@Override
public String str() {
return "sumaxis";
}
@Override
public int nargs() {
return -1; // 1 + 3;
}
@Override
public String example() {
return "(sumaxis frame na_rm axis)";
}
@Override
public String description() {
return "Compute the sum values within the provided frame. If axis = 0, then the sum is computed " +
"column-wise, and the result is a frame of shape [1 x ncols], where ncols is the number of columns in " +
"the original frame. If axis = 1, then the sum is computed row-wise, and the result is a frame of shape " +
"[nrows x 1], where nrows is the number of rows in the original frame. Flag na_rm controls treatment of " +
"the NA values: if it is 1, then NAs are ignored; if it is 0, then presence of NAs renders the result " +
"in that column (row) also NA.\n" +
"sum of a double / integer / binary column is a double value. sum of a categorical / string / uuid " +
"column is NA. sum of a time column is time. sum of a column with 0 rows is NaN.\n" +
"When computing row-wise sums, we try not to mix columns of different types. In particular, if there " +
"are any numeric columns, then all time columns are omitted from computation. However when computing " +
"sum over multiple time columns, then the Time result is returned. Lastly, binary columns are treated " +
"as NAs always.";
}
@Override
public Val apply(Env env, Env.StackHelp stk, AstRoot[] asts) {
Val val1 = asts[1].exec(env);
if (val1 instanceof ValFrame) {
Frame fr = stk.track(val1).getFrame();
boolean na_rm = asts[2].exec(env).getNum() == 1;
boolean axis = asts.length == 4 && (asts[3].exec(env).getNum() == 1);
return axis? rowwiseSum(fr, na_rm) : colwisesum(fr, na_rm);
}
else if (val1 instanceof ValRow) {
// This may be called from AstApply when doing per-row computations.
double[] row = val1.getRow();
boolean na_rm = asts[2].exec(env).getNum() == 1;
double d = 0;
int n = 0;
for (double r: row) {
if (Double.isNaN(r)) {
if (!na_rm)
return new ValRow(new double[]{Double.NaN}, null);
} else {
d += r;
n++;
}
}
return new ValRow(new double[]{d}, null);
} else
throw new IllegalArgumentException("Incorrect argument to (sum): expected a frame or a row, received " + val1.getClass());
}
/**
* Compute Frame sum for each row. This returns a frame consisting of a single Vec of sums in each row.
*/
private ValFrame rowwiseSum(Frame fr, final boolean na_rm) {
String[] newnames = {"sum"};
Key<Frame> newkey = Key.make();
// Determine how many columns of different types we have
int n_numeric = 0, n_time = 0;
for (Vec vec : fr.vecs()) {
if (vec.isNumeric()) n_numeric++;
if (vec.isTime()) n_time++;
}
// Compute the type of the resulting column: if all columns are TIME then the result is also time; otherwise
// if at least one column is numeric then the result is also numeric.
byte resType = n_numeric > 0? Vec.T_NUM : Vec.T_TIME;
// Construct the frame over which the sum should be computed
Frame compFrame = new Frame();
for (int i = 0; i < fr.numCols(); i++) {
Vec vec = fr.vec(i);
if (n_numeric > 0? vec.isNumeric() : vec.isTime())
compFrame.add(fr.name(i), vec);
}
Vec anyvec = compFrame.anyVec();
//Certain corner cases
if (anyvec == null) {
Frame res = new Frame(newkey);
anyvec = fr.anyVec();
if (anyvec != null) {
// All columns in the original frame are non-numeric? Return a vec of NAs
res.add("sum", anyvec.makeCon(Double.NaN));
} // else the original frame is empty, in which case we return an empty frame too
return new ValFrame(res);
}
if (!na_rm && n_numeric < fr.numCols() && n_time < fr.numCols()) {
// If some of the columns are non-numeric and na_rm==false, then the result is a vec of NAs
Frame res = new Frame(newkey, newnames, new Vec[]{anyvec.makeCon(Double.NaN)});
return new ValFrame(res);
}
// Compute the sum over all rows
final int numCols = compFrame.numCols();
Frame res = new MRTask() {
@Override
public void map(Chunk[] cs, NewChunk nc) {
for (int i = 0; i < cs[0]._len; i++) {
double d = 0;
int numNaColumns = 0;
for (int j = 0; j < numCols; j++) {
double val = cs[j].atd(i);
if (Double.isNaN(val))
numNaColumns++;
else
d += val;
}
if (na_rm? numNaColumns < numCols : numNaColumns == 0)
nc.addNum(d);
else
nc.addNum(Double.NaN);
}
}
}.doAll(1, resType, compFrame)
.outputFrame(newkey, newnames, null);
// Return the result
return new ValFrame(res);
}
/**
* Compute column-wise sums and return a frame having a single row.
*/
private ValFrame colwisesum(Frame fr, final boolean na_rm) {
Frame res = new Frame();
Vec vec1 = Vec.makeCon(null, 0);
assert vec1.length() == 1;
for (int i = 0; i < fr.numCols(); i++) {
Vec v = fr.vec(i);
boolean valid = (v.isNumeric() || v.isTime() || v.isBinary()) && v.length() > 0 && (na_rm || v.naCnt() == 0);
Vec newvec = vec1.makeCon(valid? v.mean() * (v.length() - v.naCnt()) : Double.NaN, v.isTime()? Vec.T_TIME : Vec.T_NUM);
res.add(fr.name(i), newvec);
}
vec1.remove();
return new ValFrame(res);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstSumNa.java
|
package water.rapids.ast.prims.reducers;
import water.fvec.Vec;
/**
*/
public class AstSumNa extends AstNaRollupOp {
public String str() {
return "sumNA";
}
public double op(double l, double r) {
return l + r;
}
public double rup(Vec vec) {
return vec.mean() * (vec.length() - vec.naCnt());
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/reducers/AstTopN.java
|
package water.rapids.ast.prims.reducers;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import java.util.PriorityQueue;
import static java.lang.StrictMath.min;
public class AstTopN extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"frame", "col", "nPercent", "grabTopN"};
}
@Override
public String str() {
return "topn";
}
@Override
public int nargs() {
return 1 + 4;
} // function name plus 4 arguments.
@Override
public String example() {
return "(topn frame col nPercent getBottomN)";
}
@Override
public String description() {
return "Return the top N percent rows for a numerical column as a frame with two columns. The first column " +
"will contain the original row indices of the chosen values. The second column contains the top N row" +
"values. If grabTopN is -1, we will return the bottom N percent. If grabTopN is 1, we will return" +
"the top N percent of rows";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot[] asts) { // implementation with PriorityQueue
Frame frOriginal = stk.track(asts[1].exec(env)).getFrame(); // get the 2nd argument and convert it to a Frame
int colIndex = (int) stk.track(asts[2].exec(env)).getNum(); // column index of interest
double nPercent = stk.track(asts[3].exec(env)).getNum(); // top or bottom percentage of row to return
int grabTopN = (int) stk.track(asts[4].exec(env)).getNum(); // 0, return top, 1 return bottom percentage
long numRows = Math.round(nPercent * 0.01 * frOriginal.numRows()); // number of rows to return
String[] finalColumnNames = {"Original_Row_Indices", frOriginal.name(colIndex)}; // set output frame names
GrabTopNPQ grabTask = new GrabTopNPQ(finalColumnNames, numRows, grabTopN, frOriginal.vec(colIndex).isInt());
grabTask.doAll(frOriginal.vec(colIndex));
return new ValFrame(grabTask._sortedOut);
}
public class GrabTopNPQ<E extends Comparable<E>> extends MRTask<GrabTopNPQ<E>> {
final String[] _columnName; // name of column that we are grabbing top N for
PriorityQueue _sortQueue;
long[] _rowIndices; // store original row indices of values that are grabbed
long[] _lValues; // store the grabbed values
double[] _dValues; // store grabbed longs
Frame _sortedOut; // store the final result of sorting
final int _rowSize; // number of top or bottom rows to keep
final int _flipSign; // 1 for top values, -1 for bottom values
boolean _csLong = false; // chunk of interest is long
private GrabTopNPQ(String[] columnName, long rowSize, int flipSign, boolean isLong) {
_columnName = columnName;
_rowSize = (int) rowSize;
_flipSign = flipSign;
_csLong = isLong;
}
@Override
public void map(Chunk cs) {
_sortQueue = new PriorityQueue<RowValue<E>>(); // instantiate a priority queue
long startRow = cs.start(); // absolute row offset
for (int rowIndex = 0; rowIndex < cs._len; rowIndex++) { // stuff our chunks into priorityQueue
long absRowIndex = rowIndex + startRow;
if (!cs.isNA(rowIndex)) { // skip NAN values
addOneValue(cs, rowIndex, absRowIndex, _sortQueue);
}
}
// copy the PQ into the corresponding arrays
if (_csLong) {
_lValues = new long[_sortQueue.size()];
copyPQ2ArryL(_sortQueue, _lValues);
} else {
_dValues = new double[_sortQueue.size()];
copyPQ2ArryD(_sortQueue, _dValues);
}
}
public void copyPQ2ArryL(PriorityQueue sortQueue, long[] values) {
//copy values on PQ into arrays in sorted order
int qSize = sortQueue.size();
_rowIndices = new long[qSize];
for (int index = qSize - 1; index >= 0; index--) {
RowValue tempPairs = (RowValue) sortQueue.poll();
_rowIndices[index] = tempPairs.getRow();
values[index] = (long) tempPairs.getValue();
}
}
public <T> void copyPQ2ArryD(PriorityQueue sortQueue, double[] values) {
//copy values on PQ into arrays in sorted order
int qSize = sortQueue.size();
_rowIndices = new long[qSize];
for (int index = qSize - 1; index >= 0; index--) {
RowValue tempPairs = (RowValue) sortQueue.poll();
_rowIndices[index] = tempPairs.getRow();
values[index] = (double) tempPairs.getValue();
}
}
@Override
public void reduce(GrabTopNPQ<E> other) {
// do a combine here of two arrays. Note the value always store values that are increasing
if (_csLong)
mergeArraysL(other._rowIndices, other._lValues);
else
mergeArraysD(other._rowIndices, other._dValues);
}
public void mergeArraysL(long[] otherRow, long[] otherValue) {
// grab bottom and grab top are slightly different
int finalArraySize = min(this._rowSize, this._lValues.length + otherValue.length);
long[] newRow = new long[finalArraySize];
long[] newValues = new long[finalArraySize]; // desired values are at start of array
int thisRowIndex = 0;
int otherRowIndex = 0;
for (int index = 0; index < finalArraySize; index++) {
if ((thisRowIndex < this._lValues.length) && (otherRowIndex < otherValue.length)) {
if ((((Long) this._lValues[thisRowIndex]).compareTo(otherValue[otherRowIndex]) * this._flipSign >= 0)) {
newRow[index] = this._rowIndices[thisRowIndex];
newValues[index] = this._lValues[thisRowIndex++];
} else {
newRow[index] = otherRow[otherRowIndex];
newValues[index] = otherValue[otherRowIndex++];
}
} else { // one of the array is done!
if (thisRowIndex < this._lValues.length) { // otherArray is done
newRow[index] = this._rowIndices[thisRowIndex];
newValues[index] = this._lValues[thisRowIndex++];
} else { // thisArray is done. Use the other one
newRow[index] = otherRow[otherRowIndex];
newValues[index] = otherValue[otherRowIndex++];
}
}
}
this._rowIndices = newRow;
this._lValues = newValues;
}
public void mergeArraysD(long[] otherRow, double[] otherValue) {
// grab bottom and grab top are slightly different
int finalArraySize = min(this._rowSize, this._rowIndices.length + otherRow.length);
long[] newRow = new long[finalArraySize];
double[] newValues = new double[finalArraySize]; // desired values are at start of array
int thisRowIndex = 0;
int otherRowIndex = 0;
for (int index = 0; index < finalArraySize; index++) {
if ((thisRowIndex < this._dValues.length) && (otherRowIndex < otherValue.length)) {
if (((Double) this._dValues[thisRowIndex]).compareTo(otherValue[otherRowIndex]) * this._flipSign >= 0) {
newRow[index] = this._rowIndices[thisRowIndex];
newValues[index] = this._dValues[thisRowIndex++];
} else {
newRow[index] = otherRow[otherRowIndex];
newValues[index] = otherValue[otherRowIndex++];
}
} else { // one of the array is done!
if (thisRowIndex < this._dValues.length) { // otherArray is done
newRow[index] = this._rowIndices[thisRowIndex];
newValues[index] = this._dValues[thisRowIndex++];
} else { // thisArray is done. Use the other one
newRow[index] = otherRow[otherRowIndex];
newValues[index] = otherValue[otherRowIndex++];
}
}
}
this._rowIndices = newRow;
this._dValues = newValues;
}
@Override
public void postGlobal() { // copy the sorted heap into a vector and make a frame out of it.
Vec[] xvecs = new Vec[2]; // final output frame will have two chunks, original row index, top/bottom values
long actualRowOutput = this._rowIndices.length; // due to NAs, may not have enough rows to return
for (int index = 0; index < xvecs.length; index++)
xvecs[index] = Vec.makeZero(actualRowOutput);
for (int index = 0; index < actualRowOutput; index++) {
xvecs[0].set(index, this._rowIndices[index]);
xvecs[1].set(index, _csLong ? this._lValues[index] : this._dValues[index]);
}
_sortedOut = new Frame(_columnName, xvecs);
}
/*
This function will add one value to the sorted priority queue.
*/
public void addOneValue(Chunk cs, int rowIndex, long absRowIndex, PriorityQueue sortHeap) {
RowValue currPair = null;
if (_csLong) { // long chunk
long a = cs.at8(rowIndex);
currPair = new RowValue(absRowIndex, a, _flipSign);
} else { // other numeric chunk
double a = cs.atd(rowIndex);
currPair = new RowValue(absRowIndex, a, _flipSign);
}
sortHeap.offer(currPair); // add pair to PriorityQueue
if (sortHeap.size() > _rowSize) {
sortHeap.poll(); // remove head if exceeds queue size
}
}
}
/*
Small class to implement priority entry is a key/value pair of original row index and the
corresponding value. Implemented the compareTo function and comparison is performed on
the value.
*/
public class RowValue<E extends Comparable<E>> implements Comparable<RowValue<E>> {
private long _rowIndex;
private E _value;
boolean _increasing; // true if grabbing for top N, false for bottom N
int _flipSign; // 1 to grab top and -1 to grab bottom
public RowValue(long rowIndex, E value, int flipSign) {
this._rowIndex = rowIndex;
this._value = value;
this._flipSign = flipSign;
}
public E getValue() {
return this._value;
}
public long getRow() {
return this._rowIndex;
}
@Override
public int compareTo(RowValue<E> other) {
return (this.getValue().compareTo(other.getValue()) * this._flipSign);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/repeaters/AstRepLen.java
|
package water.rapids.ast.prims.repeaters;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
/**
*/
public class AstRepLen extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary", "length"};
}
@Override
public int nargs() {
return 1 + 2;
} // (rep_len x length)
@Override
public String str() {
return "rep_len";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Val v = asts[1].exec(env);
long length = (long) asts[2].exec(env).getNum();
Frame ff;
if (v instanceof ValFrame) ff = stk.track(v).getFrame();
else return new ValFrame(new Frame(Vec.makeCon(v.getNum(), length)));
final Frame fr = ff;
if (fr.numCols() == 1) {
Vec vec = Vec.makeRepSeq(length, fr.numRows());
new MRTask() {
@Override
public void map(Chunk c) {
for (int i = 0; i < c._len; ++i)
c.set(i, fr.anyVec().at((long) c.atd(i)));
}
}.doAll(vec);
vec.setDomain(fr.anyVec().domain());
return new ValFrame(new Frame(vec));
} else {
Frame f = new Frame();
for (int i = 0; i < length; ++i)
f.add(Frame.defaultColName(f.numCols()), fr.vec(i % fr.numCols()));
return new ValFrame(f);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/repeaters/AstSeq.java
|
package water.rapids.ast.prims.repeaters;
import water.Futures;
import water.fvec.AppendableVec;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.fvec.Vec;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
/**
* Same logic as R's generic seq method
*/
public class AstSeq extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"from", "to", "by"};
}
/* (seq from to by) */
@Override
public int nargs() {
return 1 + 3;
}
@Override
public String str() {
return "seq";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
double from = asts[1].exec(env).getNum();
double to = asts[2].exec(env).getNum();
double by = asts[3].exec(env).getNum();
double delta = to - from;
if (delta == 0 && to == 0)
throw new IllegalArgumentException("Expected `to` and `from` to have nonzero difference.");
else {
double n = delta / by;
if (n < 0) throw new IllegalArgumentException("wrong sign in 'by' argument");
else if (n > Double.MAX_VALUE) throw new IllegalArgumentException("'by' argument is much too small");
Futures fs = new Futures();
AppendableVec av = new AppendableVec(Vec.newKey(), Vec.T_NUM);
NewChunk nc = new NewChunk(av, 0);
int len = (int) n + 1;
for (int r = 0; r < len; r++) nc.addNum(from + r * by);
// May need to adjust values = by > 0 ? min(values, to) : max(values, to)
nc.close(0, fs);
Vec vec = av.layout_and_close(fs);
fs.blockForPending();
return new ValFrame(new Frame(vec));
}
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/repeaters/AstSeqLen.java
|
package water.rapids.ast.prims.repeaters;
import water.fvec.*;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
/**
* Simple sequence of length n
*/
public class AstSeqLen extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"length"};
}
/* (seq_len n) */
@Override
public int nargs() {
return 1 + 1;
}
@Override
public String str() {
return "seq_len";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
int len = (int) Math.ceil(asts[1].exec(env).getNum());
if (len <= 0)
throw new IllegalArgumentException("Error in seq_len(" + len + "): argument must be coercible to positive integer");
return new ValFrame(new Frame(Vec.makeSeq(len, true)));
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/search/AstMatch.java
|
package water.rapids.ast.prims.search;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.fvec.Vec;
import water.parser.BufferedString;
import water.rapids.Env;
import water.rapids.ast.params.*;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.util.IcedDouble;
import water.util.IcedHashMap;
import water.util.MathUtils;
import java.util.Arrays;
import java.util.Map;
/**
* Makes a vector where an index of value from the table is returned if the value matches;
* returns nomatch value otherwise.
*
* Implemented as R base::match function.
*/
public class AstMatch extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary", "table", "nomatch", "start_index"};
}
@Override
public int nargs() {
return 1 + 4;
} // (match fr table nomatch start_index)
@Override
public String str() {
return "match";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
if(fr.anyVec() == null){
throw new IllegalArgumentException("Expected frame with one vector. Got empty frame.");
}
final MRTask<?> matchTask;
// nomatch can be NaN or number, Nan is default
double noMatch;
if (asts[3] instanceof AstNum) {
noMatch = asts[3].exec(env).getNum();
} else if (asts[3] instanceof AstId && ((asts[3]).str().equals("NA") || (asts[3]).str().equals("nan"))){
noMatch = Double.NaN;
} else {
throw new IllegalArgumentException("Expected number or 'NA' or 'nan'. Got: " + asts[3]);
}
// start index is 1 by default
int startIndex;
if (asts[4] instanceof AstNum) {
startIndex = (int) asts[4].exec(env).getNum();
if (startIndex < 0) {
throw new IllegalArgumentException("Expected number >= 0. Got: " + asts[4].getClass());
}
} else if(asts[4] instanceof AstId) {
startIndex = 1;
} else {
throw new IllegalArgumentException("Expected number. Got: " + asts[4].getClass());
}
if (asts[2] instanceof AstNumList) {
if(fr.anyVec().isString()){
throw new IllegalArgumentException("Input vector is string and has string domain. Got numeric match values.");
}
matchTask = new NumMatchTask(((AstNumList) asts[2]).expand(), noMatch, startIndex);
} else if (asts[2] instanceof AstNum) {
if(fr.anyVec().isString()){
throw new IllegalArgumentException("Input vector is string and has string domain. Got numeric match values.");
}
matchTask = new NumMatchTask(new double[]{asts[2].exec(env).getNum()}, noMatch, startIndex);
} else if (asts[2] instanceof AstStrList) {
if(fr.anyVec().isNumeric()){
throw new IllegalArgumentException("Input vector is numeric and has no domain.");
}
String[] values = ((AstStrList) asts[2])._strs;
matchTask = fr.anyVec().isString() ? new StrMatchTask(values, noMatch, startIndex) :
new CatMatchTask(values, noMatch, startIndex);
} else if (asts[2] instanceof AstStr) {
String[] values = new String[]{asts[2].exec(env).getStr()};
if(fr.anyVec().isNumeric()){
throw new IllegalArgumentException("Input vector is numeric and has no domain.");
}
matchTask = fr.anyVec().isString() ? new StrMatchTask(values, noMatch, startIndex) :
new CatMatchTask(values, noMatch, startIndex);
} else
throw new IllegalArgumentException("Expected numbers/strings. Got: " + asts[2].getClass());
Frame result = matchTask.doAll(Vec.T_NUM, fr.anyVec()).outputFrame();
return new ValFrame(result);
}
private static class StrMatchTask extends MRTask<CatMatchTask> {
String[] _sortedValues;
double _noMatch;
int _startIndex;
IcedHashMap<String, Integer> _mapping;
IcedHashMap<String, Integer> _matchesIndexes;
StrMatchTask(String[] values, double noMatch, int indexes) {
_mapping = new IcedHashMap<>();
for (int i = 0; i < values.length; i++) {
String value = values[i];
if (!_mapping.containsKey(value)) _mapping.put(values[i], i);
}
_sortedValues = values.clone();
Arrays.sort(_sortedValues);
_noMatch = noMatch;
_startIndex = indexes;
_matchesIndexes = new IcedHashMap<>();
}
@Override
public void map(Chunk c, NewChunk nc) {
BufferedString bs = new BufferedString();
int rows = c._len;
for (int r = 0; r < rows; r++) {
double x = c.isNA(r) ? _noMatch : in(_matchesIndexes, _sortedValues, _mapping, c.atStr(bs, r).toString(), _noMatch, _startIndex);
nc.addNum(x);
}
}
}
private static class CatMatchTask extends MRTask<CatMatchTask> {
String[] _sortedValues;
int[] _firstMatchRow;
double _noMatch;
int _startIndex;
IcedHashMap<String, Integer> _mapping;
IcedHashMap<String, Integer> _matchesIndexes;
CatMatchTask(String[] values, double noMatch, int startIndex) {
_mapping = new IcedHashMap<>();
for (int i = 0; i < values.length; i++) {
String value = values[i];
if (!_mapping.containsKey(value)) _mapping.put(values[i], i);
}
_sortedValues = values.clone();
Arrays.sort(_sortedValues);
_noMatch = noMatch;
_startIndex = startIndex;
_firstMatchRow = new int[values.length];
_matchesIndexes = new IcedHashMap<>();
}
@Override
public void map(Chunk c, NewChunk nc) {
String[] domain = c.vec().domain();
int rows = c._len;
for (int r = 0; r < rows; r++) {
double x = c.isNA(r) ? _noMatch : in(_matchesIndexes, _sortedValues, _mapping, domain[(int) c.at8(r)], _noMatch, _startIndex);
nc.addNum(x);
}
}
}
private static class NumMatchTask extends MRTask<CatMatchTask> {
double[] _sortedValues;
double _noMatch;
int _startIndex;
IcedHashMap<IcedDouble, Integer> _mapping;
IcedHashMap<IcedDouble, Integer> _matchesIndexes;
NumMatchTask(double[] values, double noMatch, int startIndex) {
_mapping = new IcedHashMap<>();
for (int i = 0; i < values.length; i++) {
double value = values[i];
if (!_mapping.containsKey(new IcedDouble(value))) _mapping.put(new IcedDouble(values[i]), i);
}
_sortedValues = values.clone();
Arrays.sort(_sortedValues);
_noMatch = noMatch;
_startIndex = startIndex;
_matchesIndexes = new IcedHashMap<>();
}
@Override
public void map(Chunk c, NewChunk nc) {
int rows = c._len;
for (int r = 0; r < rows; r++) {
double x = c.isNA(r) ? _noMatch : in(_matchesIndexes, _sortedValues, _mapping, c.atd(r), _noMatch, _startIndex);
nc.addNum(x);
}
}
}
private static double in(Map<String, Integer> matchesIndexes, String[] sortedMatches, IcedHashMap<String, Integer> mapping, String s, double noMatch, int startIndex) {
Integer mapResult = matchesIndexes.get(s);
int match;
if (mapResult == null){
match = Arrays.binarySearch(sortedMatches, s);
matchesIndexes.put(s, match);
} else {
match = mapResult;
}
return match >= 0 ? applyStartIndex(mapping.get(s), startIndex) : noMatch;
}
private static double in(Map<IcedDouble, Integer> matchesIndexes, double[] sortedMatches, IcedHashMap<IcedDouble, Integer> mapping, double d, double noMatch, int startIndex) {
IcedDouble id = new IcedDouble(d);
Integer mapResult = matchesIndexes.get(id);
int match;
if (mapResult == null){
match = binarySearchDoublesUlp(sortedMatches, 0, sortedMatches.length, d);
matchesIndexes.put(id, match);
} else {
match = mapResult;
}
return match >= 0 ? applyStartIndex(mapping.get(id).doubleValue(), startIndex) : noMatch;
}
private static double applyStartIndex(double value, int startIndex) {
assert startIndex >= 0;
return value + startIndex;
}
private static int binarySearchDoublesUlp(double[] a, int from, int to, double key) {
int lo = from;
int hi = to - 1;
while (lo <= hi) {
int mid = (lo + hi) >>> 1;
double midVal = a[mid];
if (MathUtils.equalsWithinOneSmallUlp(midVal, key)) return mid;
if (midVal < key) lo = mid + 1;
else if (midVal > key) hi = mid - 1;
else {
long midBits = Double.doubleToLongBits(midVal);
long keyBits = Double.doubleToLongBits(key);
if (midBits == keyBits) return mid;
else if (midBits < keyBits) lo = mid + 1;
else hi = mid - 1;
}
}
return -(lo + 1); // key not found.
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/search/AstWhich.java
|
package water.rapids.ast.prims.search;
import water.Futures;
import water.MRTask;
import water.fvec.*;
import water.rapids.Env;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
/**
* Indices of which entries are not equal to 0
*/
public class AstWhich extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary"};
}
@Override
public int nargs() {
return 1 + 1;
} // (which col)
@Override
public String str() {
return "which";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame f = stk.track(asts[1].exec(env)).getFrame();
// The 1-row version
if (f.numRows() == 1 && f.numCols() > 1) {
AppendableVec v = new AppendableVec(Vec.VectorGroup.VG_LEN1.addVec(), Vec.T_NUM);
NewChunk chunk = new NewChunk(v, 0);
for (int i = 0; i < f.numCols(); i++)
if (f.vecs()[i].at8(0) != 0)
chunk.addNum(i);
Futures fs = chunk.close(0, new Futures());
Vec vec = v.layout_and_close(fs);
fs.blockForPending();
return new ValFrame(new Frame(vec));
}
// The 1-column version
Vec vec = f.anyVec();
if (f.numCols() > 1 || !vec.isInt())
throw new IllegalArgumentException("which requires a single integer column");
Frame f2 = new MRTask() {
@Override
public void map(Chunk c, NewChunk nc) {
long start = c.start();
for (int i = 0; i < c._len; ++i)
if (c.at8(i) != 0) nc.addNum(start + i);
}
}.doAll(new byte[]{Vec.T_NUM}, vec).outputFrame();
return new ValFrame(f2);
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/search/AstWhichFunc.java
|
package water.rapids.ast.prims.search;
import water.H2O;
import water.Key;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.rapids.Env;
import water.rapids.Val;
import water.rapids.ast.AstBuiltin;
import water.fvec.Vec;
import water.rapids.ast.AstRoot;
import water.rapids.vals.ValFrame;
import water.rapids.vals.ValRow;
public abstract class AstWhichFunc extends AstBuiltin<AstWhichFunc> {
@Override
public String[] args() {
return new String[]{"frame", "na_rm", "axis"};
}
@Override
public int nargs() {
return 1 + 1;
}
@Override
public String str() {
throw H2O.unimpl();
}
public abstract double op(Vec l); //Operation to perform in colWiseWhichVal() -> Vec.max() or Vec.min().
public abstract String searchVal(); //String indicating what we are searching for across rows in rowWiseWhichVal() -> max or min.
public abstract double init();
@Override
public Val apply(Env env, Env.StackHelp stk, AstRoot[] asts) {
Val val1 = asts[1].exec(env);
if (val1 instanceof ValFrame) {
Frame fr = stk.track(val1).getFrame();
boolean na_rm = asts[2].exec(env).getNum() == 1;
boolean axis = asts.length == 4 && (asts[3].exec(env).getNum() == 1);
return axis ? rowwiseWhichVal(fr, na_rm) : colwiseWhichVal(fr, na_rm);
}
else if (val1 instanceof ValRow) {
// This may be called from AstApply when doing per-row computations.
double[] row = val1.getRow();
boolean na_rm = asts[2].exec(env).getNum() == 1;
double val = Double.NEGATIVE_INFINITY;
double valIndex = 0;
if(searchVal() == "max") { //Looking for the max?
for (int i = 0; i < row.length; i++) {
if (Double.isNaN(row[i])) {
if (!na_rm)
return new ValRow(new double[]{Double.NaN}, null);
} else {
if (row[i] > val) {
val = row[i];
valIndex = i;
}
}
}
}else if(searchVal() == "min"){ //Looking for the min?
for (int i = 0; i < row.length; i++) {
if (Double.isNaN(row[i])) {
if (!na_rm)
return new ValRow(new double[]{Double.NaN}, null);
} else {
if (row[i] < val) {
val = row[i];
valIndex = i;
}
}
}
}
else{
throw new IllegalArgumentException("Incorrect argument: expected to search for max() or min(), received " + searchVal());
}
return new ValRow(new double[]{valIndex}, null);
} else
throw new IllegalArgumentException("Incorrect argument: expected a frame or a row, received " + val1.getClass());
}
/**
* Compute row-wise, and return a frame consisting of a single Vec of value indexes in each row.
*/
private ValFrame rowwiseWhichVal(Frame fr, final boolean na_rm) {
String[] newnames = {"which." + searchVal()};
Key<Frame> newkey = Key.make();
// Determine how many columns of different types we have
int n_numeric = 0, n_time = 0;
for (Vec vec : fr.vecs()) {
if (vec.isNumeric()) n_numeric++;
if (vec.isTime()) n_time++;
}
// Compute the type of the resulting column: if all columns are TIME then the result is also time; otherwise
// if at least one column is numeric then the result is also numeric.
byte resType = n_numeric > 0 ? Vec.T_NUM : Vec.T_TIME;
// Construct the frame over which the val index should be computed
Frame compFrame = new Frame();
for (int i = 0; i < fr.numCols(); i++) {
Vec vec = fr.vec(i);
if (n_numeric > 0? vec.isNumeric() : vec.isTime())
compFrame.add(fr.name(i), vec);
}
Vec anyvec = compFrame.anyVec();
// Take into account certain corner cases
if (anyvec == null) {
Frame res = new Frame(newkey);
anyvec = fr.anyVec();
if (anyvec != null) {
// All columns in the original frame are non-numeric -> return a vec of NAs
res.add("which." + searchVal(), anyvec.makeCon(Double.NaN));
} // else the original frame is empty, in which case we return an empty frame too
return new ValFrame(res);
}
if (!na_rm && n_numeric < fr.numCols() && n_time < fr.numCols()) {
// If some of the columns are non-numeric and na_rm==false, then the result is a vec of NAs
Frame res = new Frame(newkey, newnames, new Vec[]{anyvec.makeCon(Double.NaN)});
return new ValFrame(res);
}
// Compute over all rows
final int numCols = compFrame.numCols();
Frame res = new MRTask() {
@Override
public void map(Chunk[] cs, NewChunk nc) {
for (int i = 0; i < cs[0]._len; i++) {
int numNaColumns = 0;
double value = Double.NEGATIVE_INFINITY;
int valueIndex = 0;
if (searchVal() == "max") { //Looking for the max?
for (int j = 0; j < numCols; j++) {
double val = cs[j].atd(i);
if (Double.isNaN(val)) {
numNaColumns++;
} else if (val > value) { //Return the first occurrence of the val
value = val;
valueIndex = j;
}
}
}else if(searchVal()=="min"){ //Looking for the min?
for (int j = 0; j < numCols; j++) {
double val = cs[j].atd(i);
if (Double.isNaN(val)) {
numNaColumns++;
}
else if(val < value) { //Return the first occurrence of the min index
value = val;
valueIndex = j;
}
}
}else{
throw new IllegalArgumentException("Incorrect argument: expected to search for max() or min(), received " + searchVal());
}
if (na_rm ? numNaColumns < numCols : numNaColumns == 0)
nc.addNum(valueIndex);
else
nc.addNum(Double.NaN);
}
}
}.doAll(1, resType, compFrame)
.outputFrame(newkey, newnames, null);
// Return the result
return new ValFrame(res);
}
/**
* Compute column-wise (i.e.value index of each column), and return a frame having a single row.
*/
private ValFrame colwiseWhichVal(Frame fr, final boolean na_rm) {
Frame res = new Frame();
Vec vec1 = Vec.makeCon(null, 0);
assert vec1.length() == 1;
for (int i = 0; i < fr.numCols(); i++) {
Vec v = fr.vec(i);
double searchValue = op(v);
boolean valid = (v.isNumeric() || v.isTime() || v.isBinary()) && v.length() > 0 && (na_rm || v.naCnt() == 0);
FindIndexCol findIndexCol = new FindIndexCol(searchValue).doAll(new byte[]{Vec.T_NUM}, v);
Vec newvec = vec1.makeCon(valid ? findIndexCol._valIndex : Double.NaN, v.isTime()? Vec.T_TIME : Vec.T_NUM);
res.add(fr.name(i), newvec);
}
vec1.remove();
return new ValFrame(res);
}
private static class FindIndexCol extends MRTask<AstWhichFunc.FindIndexCol>{
double _val;
double _valIndex;
FindIndexCol(double val) {
_val = val;
_valIndex = Double.POSITIVE_INFINITY;
}
@Override
public void map(Chunk c, NewChunk nc) {
long start = c.start();
for (int i = 0; i < c._len; ++i) {
if (c.atd(i) == _val) {
_valIndex = start + i;
break;
}
}
}
@Override
public void reduce(AstWhichFunc.FindIndexCol mic) {
_valIndex = Math.min(_valIndex, mic._valIndex); //Return the first occurrence of the val index
}
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/search/AstWhichMax.java
|
package water.rapids.ast.prims.search;
import water.fvec.Vec;
public class AstWhichMax extends AstWhichFunc {
@Override
public int nargs() { return 1 + 3; } // "frame", "na_rm", "axis"
@Override
public String str() {
return "which.max";
}
@Override
public double op(Vec l) {
return l.max();
}
@Override
public String searchVal(){
return "max";
}
@Override
public double init() {
return 0;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/search/AstWhichMin.java
|
package water.rapids.ast.prims.search;
import water.fvec.Vec;
public class AstWhichMin extends AstWhichFunc {
@Override
public int nargs() { return 1 + 3; } // "frame", "na_rm", "axis"
@Override
public String str() {
return "which.min";
}
@Override
public double op(Vec l) {
return l.min();
}
@Override
public String searchVal(){
return "min";
}
@Override
public double init() {
return 0;
}
}
|
0
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims
|
java-sources/ai/h2o/h2o-core/3.46.0.7/water/rapids/ast/prims/string/AstCountMatches.java
|
package water.rapids.ast.prims.string;
import org.apache.commons.lang.StringUtils;
import water.MRTask;
import water.fvec.*;
import water.parser.BufferedString;
import water.rapids.Env;
import water.rapids.vals.ValFrame;
import water.rapids.ast.AstPrimitive;
import water.rapids.ast.AstRoot;
import water.rapids.ast.params.AstStrList;
/**
* Accepts a frame with a single string column, and a substring to look for in the target.
* Returns a new integer column containing the countMatches result for each string in the
* target column.
* <p/>
* countMatches - Counts how many times the substring appears in the larger string.
* If either the target string or substring are empty (""), 0 is returned.
*/
public class AstCountMatches extends AstPrimitive {
@Override
public String[] args() {
return new String[]{"ary", "pattern"};
}
@Override
public int nargs() {
return 1 + 2;
} // (countmatches x pattern)
@Override
public String str() {
return "countmatches";
}
@Override
public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) {
Frame fr = stk.track(asts[1].exec(env)).getFrame();
final String[] pattern = asts[2] instanceof AstStrList
? ((AstStrList) asts[2])._strs
: new String[]{asts[2].exec(env).getStr()};
// Type check
for (Vec v : fr.vecs())
if (!(v.isCategorical() || v.isString()))
throw new IllegalArgumentException("countmatches() requires a string or categorical column. "
+ "Received " + fr.anyVec().get_type_str()
+ ". Please convert column to a string or categorical first.");
// Transform each vec
Vec nvs[] = new Vec[fr.numCols()];
int i = 0;
for (Vec v : fr.vecs()) {
if (v.isCategorical())
nvs[i] = countMatchesCategoricalCol(v, pattern);
else
nvs[i] = countMatchesStringCol(v, pattern);
i++;
}
return new ValFrame(new Frame(nvs));
}
private Vec countMatchesCategoricalCol(Vec vec, String[] pattern) {
final int[] matchCounts = countDomainMatches(vec.domain(), pattern);
return new MRTask() {
@Override
public void map(Chunk[] cs, NewChunk[] ncs) {
Chunk c = cs[0];
for (int i = 0; i < c._len; ++i) {
if (!c.isNA(i)) {
int idx = (int) c.at8(i);
ncs[0].addNum(matchCounts[idx]);
} else ncs[0].addNA();
}
}
}.doAll(1, Vec.T_NUM, new Frame(vec)).outputFrame().anyVec();
}
int[] countDomainMatches(String[] domain, String[] pattern) {
int[] res = new int[domain.length];
for (int i = 0; i < domain.length; i++)
for (String aPattern : pattern)
res[i] += StringUtils.countMatches(domain[i], aPattern);
return res;
}
private Vec countMatchesStringCol(Vec vec, String[] pat) {
final String[] pattern = pat;
return new MRTask() {
@Override
public void map(Chunk chk, NewChunk newChk) {
if (chk instanceof C0DChunk) // all NAs
for (int i = 0; i < chk.len(); i++)
newChk.addNA();
else {
BufferedString tmpStr = new BufferedString();
for (int i = 0; i < chk._len; ++i) {
if (chk.isNA(i)) newChk.addNA();
else {
int cnt = 0;
for (String aPattern : pattern)
cnt += StringUtils.countMatches(chk.atStr(tmpStr, i).toString(), aPattern);
newChk.addNum(cnt, 0);
}
}
}
}
}.doAll(Vec.T_NUM, new Frame(vec)).outputFrame().anyVec();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.